github.com/hikaru7719/go@v0.0.0-20181025140707-c8b2ac68906a/src/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValueARM64(v *Value) bool { 17 switch v.Op { 18 case OpARM64ADD: 19 return rewriteValueARM64_OpARM64ADD_0(v) || rewriteValueARM64_OpARM64ADD_10(v) || rewriteValueARM64_OpARM64ADD_20(v) 20 case OpARM64ADDconst: 21 return rewriteValueARM64_OpARM64ADDconst_0(v) 22 case OpARM64ADDshiftLL: 23 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 24 case OpARM64ADDshiftRA: 25 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 26 case OpARM64ADDshiftRL: 27 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 28 case OpARM64AND: 29 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 30 case OpARM64ANDconst: 31 return rewriteValueARM64_OpARM64ANDconst_0(v) 32 case OpARM64ANDshiftLL: 33 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 34 case OpARM64ANDshiftRA: 35 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 36 case OpARM64ANDshiftRL: 37 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 38 case OpARM64BIC: 39 return rewriteValueARM64_OpARM64BIC_0(v) 40 case OpARM64BICshiftLL: 41 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 42 case OpARM64BICshiftRA: 43 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 44 case OpARM64BICshiftRL: 45 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 46 case OpARM64CMN: 47 return rewriteValueARM64_OpARM64CMN_0(v) 48 case OpARM64CMNW: 49 return rewriteValueARM64_OpARM64CMNW_0(v) 50 case OpARM64CMNWconst: 51 return rewriteValueARM64_OpARM64CMNWconst_0(v) 52 case OpARM64CMNconst: 53 return rewriteValueARM64_OpARM64CMNconst_0(v) 54 case OpARM64CMNshiftLL: 55 return rewriteValueARM64_OpARM64CMNshiftLL_0(v) 56 case OpARM64CMNshiftRA: 57 return rewriteValueARM64_OpARM64CMNshiftRA_0(v) 58 case OpARM64CMNshiftRL: 59 return rewriteValueARM64_OpARM64CMNshiftRL_0(v) 60 case OpARM64CMP: 61 return rewriteValueARM64_OpARM64CMP_0(v) 62 case OpARM64CMPW: 63 return rewriteValueARM64_OpARM64CMPW_0(v) 64 case OpARM64CMPWconst: 65 return rewriteValueARM64_OpARM64CMPWconst_0(v) 66 case OpARM64CMPconst: 67 return rewriteValueARM64_OpARM64CMPconst_0(v) 68 case OpARM64CMPshiftLL: 69 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 70 case OpARM64CMPshiftRA: 71 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 72 case OpARM64CMPshiftRL: 73 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 74 case OpARM64CSEL: 75 return rewriteValueARM64_OpARM64CSEL_0(v) 76 case OpARM64CSEL0: 77 return rewriteValueARM64_OpARM64CSEL0_0(v) 78 case OpARM64DIV: 79 return rewriteValueARM64_OpARM64DIV_0(v) 80 case OpARM64DIVW: 81 return rewriteValueARM64_OpARM64DIVW_0(v) 82 case OpARM64EON: 83 return rewriteValueARM64_OpARM64EON_0(v) 84 case OpARM64EONshiftLL: 85 return rewriteValueARM64_OpARM64EONshiftLL_0(v) 86 case OpARM64EONshiftRA: 87 return rewriteValueARM64_OpARM64EONshiftRA_0(v) 88 case OpARM64EONshiftRL: 89 return rewriteValueARM64_OpARM64EONshiftRL_0(v) 90 case OpARM64Equal: 91 return rewriteValueARM64_OpARM64Equal_0(v) 92 case OpARM64FADDD: 93 return rewriteValueARM64_OpARM64FADDD_0(v) 94 case OpARM64FADDS: 95 return rewriteValueARM64_OpARM64FADDS_0(v) 96 case OpARM64FMOVDfpgp: 97 return rewriteValueARM64_OpARM64FMOVDfpgp_0(v) 98 case OpARM64FMOVDgpfp: 99 return rewriteValueARM64_OpARM64FMOVDgpfp_0(v) 100 case OpARM64FMOVDload: 101 return rewriteValueARM64_OpARM64FMOVDload_0(v) 102 case OpARM64FMOVDloadidx: 103 return rewriteValueARM64_OpARM64FMOVDloadidx_0(v) 104 case OpARM64FMOVDstore: 105 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 106 case OpARM64FMOVDstoreidx: 107 return rewriteValueARM64_OpARM64FMOVDstoreidx_0(v) 108 case OpARM64FMOVSload: 109 return rewriteValueARM64_OpARM64FMOVSload_0(v) 110 case OpARM64FMOVSloadidx: 111 return rewriteValueARM64_OpARM64FMOVSloadidx_0(v) 112 case OpARM64FMOVSstore: 113 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 114 case OpARM64FMOVSstoreidx: 115 return rewriteValueARM64_OpARM64FMOVSstoreidx_0(v) 116 case OpARM64FMULD: 117 return rewriteValueARM64_OpARM64FMULD_0(v) 118 case OpARM64FMULS: 119 return rewriteValueARM64_OpARM64FMULS_0(v) 120 case OpARM64FNEGD: 121 return rewriteValueARM64_OpARM64FNEGD_0(v) 122 case OpARM64FNEGS: 123 return rewriteValueARM64_OpARM64FNEGS_0(v) 124 case OpARM64FNMULD: 125 return rewriteValueARM64_OpARM64FNMULD_0(v) 126 case OpARM64FNMULS: 127 return rewriteValueARM64_OpARM64FNMULS_0(v) 128 case OpARM64FSUBD: 129 return rewriteValueARM64_OpARM64FSUBD_0(v) 130 case OpARM64FSUBS: 131 return rewriteValueARM64_OpARM64FSUBS_0(v) 132 case OpARM64GreaterEqual: 133 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 134 case OpARM64GreaterEqualU: 135 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 136 case OpARM64GreaterThan: 137 return rewriteValueARM64_OpARM64GreaterThan_0(v) 138 case OpARM64GreaterThanU: 139 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 140 case OpARM64LessEqual: 141 return rewriteValueARM64_OpARM64LessEqual_0(v) 142 case OpARM64LessEqualU: 143 return rewriteValueARM64_OpARM64LessEqualU_0(v) 144 case OpARM64LessThan: 145 return rewriteValueARM64_OpARM64LessThan_0(v) 146 case OpARM64LessThanU: 147 return rewriteValueARM64_OpARM64LessThanU_0(v) 148 case OpARM64MADD: 149 return rewriteValueARM64_OpARM64MADD_0(v) || rewriteValueARM64_OpARM64MADD_10(v) || rewriteValueARM64_OpARM64MADD_20(v) 150 case OpARM64MADDW: 151 return rewriteValueARM64_OpARM64MADDW_0(v) || rewriteValueARM64_OpARM64MADDW_10(v) || rewriteValueARM64_OpARM64MADDW_20(v) 152 case OpARM64MNEG: 153 return rewriteValueARM64_OpARM64MNEG_0(v) || rewriteValueARM64_OpARM64MNEG_10(v) || rewriteValueARM64_OpARM64MNEG_20(v) 154 case OpARM64MNEGW: 155 return rewriteValueARM64_OpARM64MNEGW_0(v) || rewriteValueARM64_OpARM64MNEGW_10(v) || rewriteValueARM64_OpARM64MNEGW_20(v) 156 case OpARM64MOD: 157 return rewriteValueARM64_OpARM64MOD_0(v) 158 case OpARM64MODW: 159 return rewriteValueARM64_OpARM64MODW_0(v) 160 case OpARM64MOVBUload: 161 return rewriteValueARM64_OpARM64MOVBUload_0(v) 162 case OpARM64MOVBUloadidx: 163 return rewriteValueARM64_OpARM64MOVBUloadidx_0(v) 164 case OpARM64MOVBUreg: 165 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 166 case OpARM64MOVBload: 167 return rewriteValueARM64_OpARM64MOVBload_0(v) 168 case OpARM64MOVBloadidx: 169 return rewriteValueARM64_OpARM64MOVBloadidx_0(v) 170 case OpARM64MOVBreg: 171 return rewriteValueARM64_OpARM64MOVBreg_0(v) 172 case OpARM64MOVBstore: 173 return rewriteValueARM64_OpARM64MOVBstore_0(v) || rewriteValueARM64_OpARM64MOVBstore_10(v) || rewriteValueARM64_OpARM64MOVBstore_20(v) || rewriteValueARM64_OpARM64MOVBstore_30(v) || rewriteValueARM64_OpARM64MOVBstore_40(v) 174 case OpARM64MOVBstoreidx: 175 return rewriteValueARM64_OpARM64MOVBstoreidx_0(v) || rewriteValueARM64_OpARM64MOVBstoreidx_10(v) 176 case OpARM64MOVBstorezero: 177 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 178 case OpARM64MOVBstorezeroidx: 179 return rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v) 180 case OpARM64MOVDload: 181 return rewriteValueARM64_OpARM64MOVDload_0(v) 182 case OpARM64MOVDloadidx: 183 return rewriteValueARM64_OpARM64MOVDloadidx_0(v) 184 case OpARM64MOVDloadidx8: 185 return rewriteValueARM64_OpARM64MOVDloadidx8_0(v) 186 case OpARM64MOVDreg: 187 return rewriteValueARM64_OpARM64MOVDreg_0(v) 188 case OpARM64MOVDstore: 189 return rewriteValueARM64_OpARM64MOVDstore_0(v) 190 case OpARM64MOVDstoreidx: 191 return rewriteValueARM64_OpARM64MOVDstoreidx_0(v) 192 case OpARM64MOVDstoreidx8: 193 return rewriteValueARM64_OpARM64MOVDstoreidx8_0(v) 194 case OpARM64MOVDstorezero: 195 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 196 case OpARM64MOVDstorezeroidx: 197 return rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v) 198 case OpARM64MOVDstorezeroidx8: 199 return rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v) 200 case OpARM64MOVHUload: 201 return rewriteValueARM64_OpARM64MOVHUload_0(v) 202 case OpARM64MOVHUloadidx: 203 return rewriteValueARM64_OpARM64MOVHUloadidx_0(v) 204 case OpARM64MOVHUloadidx2: 205 return rewriteValueARM64_OpARM64MOVHUloadidx2_0(v) 206 case OpARM64MOVHUreg: 207 return rewriteValueARM64_OpARM64MOVHUreg_0(v) || rewriteValueARM64_OpARM64MOVHUreg_10(v) 208 case OpARM64MOVHload: 209 return rewriteValueARM64_OpARM64MOVHload_0(v) 210 case OpARM64MOVHloadidx: 211 return rewriteValueARM64_OpARM64MOVHloadidx_0(v) 212 case OpARM64MOVHloadidx2: 213 return rewriteValueARM64_OpARM64MOVHloadidx2_0(v) 214 case OpARM64MOVHreg: 215 return rewriteValueARM64_OpARM64MOVHreg_0(v) || rewriteValueARM64_OpARM64MOVHreg_10(v) 216 case OpARM64MOVHstore: 217 return rewriteValueARM64_OpARM64MOVHstore_0(v) || rewriteValueARM64_OpARM64MOVHstore_10(v) || rewriteValueARM64_OpARM64MOVHstore_20(v) 218 case OpARM64MOVHstoreidx: 219 return rewriteValueARM64_OpARM64MOVHstoreidx_0(v) || rewriteValueARM64_OpARM64MOVHstoreidx_10(v) 220 case OpARM64MOVHstoreidx2: 221 return rewriteValueARM64_OpARM64MOVHstoreidx2_0(v) 222 case OpARM64MOVHstorezero: 223 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 224 case OpARM64MOVHstorezeroidx: 225 return rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v) 226 case OpARM64MOVHstorezeroidx2: 227 return rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v) 228 case OpARM64MOVQstorezero: 229 return rewriteValueARM64_OpARM64MOVQstorezero_0(v) 230 case OpARM64MOVWUload: 231 return rewriteValueARM64_OpARM64MOVWUload_0(v) 232 case OpARM64MOVWUloadidx: 233 return rewriteValueARM64_OpARM64MOVWUloadidx_0(v) 234 case OpARM64MOVWUloadidx4: 235 return rewriteValueARM64_OpARM64MOVWUloadidx4_0(v) 236 case OpARM64MOVWUreg: 237 return rewriteValueARM64_OpARM64MOVWUreg_0(v) || rewriteValueARM64_OpARM64MOVWUreg_10(v) 238 case OpARM64MOVWload: 239 return rewriteValueARM64_OpARM64MOVWload_0(v) 240 case OpARM64MOVWloadidx: 241 return rewriteValueARM64_OpARM64MOVWloadidx_0(v) 242 case OpARM64MOVWloadidx4: 243 return rewriteValueARM64_OpARM64MOVWloadidx4_0(v) 244 case OpARM64MOVWreg: 245 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 246 case OpARM64MOVWstore: 247 return rewriteValueARM64_OpARM64MOVWstore_0(v) || rewriteValueARM64_OpARM64MOVWstore_10(v) 248 case OpARM64MOVWstoreidx: 249 return rewriteValueARM64_OpARM64MOVWstoreidx_0(v) 250 case OpARM64MOVWstoreidx4: 251 return rewriteValueARM64_OpARM64MOVWstoreidx4_0(v) 252 case OpARM64MOVWstorezero: 253 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 254 case OpARM64MOVWstorezeroidx: 255 return rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v) 256 case OpARM64MOVWstorezeroidx4: 257 return rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v) 258 case OpARM64MSUB: 259 return rewriteValueARM64_OpARM64MSUB_0(v) || rewriteValueARM64_OpARM64MSUB_10(v) || rewriteValueARM64_OpARM64MSUB_20(v) 260 case OpARM64MSUBW: 261 return rewriteValueARM64_OpARM64MSUBW_0(v) || rewriteValueARM64_OpARM64MSUBW_10(v) || rewriteValueARM64_OpARM64MSUBW_20(v) 262 case OpARM64MUL: 263 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 264 case OpARM64MULW: 265 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 266 case OpARM64MVN: 267 return rewriteValueARM64_OpARM64MVN_0(v) 268 case OpARM64MVNshiftLL: 269 return rewriteValueARM64_OpARM64MVNshiftLL_0(v) 270 case OpARM64MVNshiftRA: 271 return rewriteValueARM64_OpARM64MVNshiftRA_0(v) 272 case OpARM64MVNshiftRL: 273 return rewriteValueARM64_OpARM64MVNshiftRL_0(v) 274 case OpARM64NEG: 275 return rewriteValueARM64_OpARM64NEG_0(v) 276 case OpARM64NEGshiftLL: 277 return rewriteValueARM64_OpARM64NEGshiftLL_0(v) 278 case OpARM64NEGshiftRA: 279 return rewriteValueARM64_OpARM64NEGshiftRA_0(v) 280 case OpARM64NEGshiftRL: 281 return rewriteValueARM64_OpARM64NEGshiftRL_0(v) 282 case OpARM64NotEqual: 283 return rewriteValueARM64_OpARM64NotEqual_0(v) 284 case OpARM64OR: 285 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) || rewriteValueARM64_OpARM64OR_20(v) || rewriteValueARM64_OpARM64OR_30(v) || rewriteValueARM64_OpARM64OR_40(v) 286 case OpARM64ORN: 287 return rewriteValueARM64_OpARM64ORN_0(v) 288 case OpARM64ORNshiftLL: 289 return rewriteValueARM64_OpARM64ORNshiftLL_0(v) 290 case OpARM64ORNshiftRA: 291 return rewriteValueARM64_OpARM64ORNshiftRA_0(v) 292 case OpARM64ORNshiftRL: 293 return rewriteValueARM64_OpARM64ORNshiftRL_0(v) 294 case OpARM64ORconst: 295 return rewriteValueARM64_OpARM64ORconst_0(v) 296 case OpARM64ORshiftLL: 297 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) || rewriteValueARM64_OpARM64ORshiftLL_20(v) 298 case OpARM64ORshiftRA: 299 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 300 case OpARM64ORshiftRL: 301 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 302 case OpARM64RORWconst: 303 return rewriteValueARM64_OpARM64RORWconst_0(v) 304 case OpARM64RORconst: 305 return rewriteValueARM64_OpARM64RORconst_0(v) 306 case OpARM64SLL: 307 return rewriteValueARM64_OpARM64SLL_0(v) 308 case OpARM64SLLconst: 309 return rewriteValueARM64_OpARM64SLLconst_0(v) 310 case OpARM64SRA: 311 return rewriteValueARM64_OpARM64SRA_0(v) 312 case OpARM64SRAconst: 313 return rewriteValueARM64_OpARM64SRAconst_0(v) 314 case OpARM64SRL: 315 return rewriteValueARM64_OpARM64SRL_0(v) 316 case OpARM64SRLconst: 317 return rewriteValueARM64_OpARM64SRLconst_0(v) || rewriteValueARM64_OpARM64SRLconst_10(v) 318 case OpARM64STP: 319 return rewriteValueARM64_OpARM64STP_0(v) 320 case OpARM64SUB: 321 return rewriteValueARM64_OpARM64SUB_0(v) || rewriteValueARM64_OpARM64SUB_10(v) 322 case OpARM64SUBconst: 323 return rewriteValueARM64_OpARM64SUBconst_0(v) 324 case OpARM64SUBshiftLL: 325 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 326 case OpARM64SUBshiftRA: 327 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 328 case OpARM64SUBshiftRL: 329 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 330 case OpARM64TST: 331 return rewriteValueARM64_OpARM64TST_0(v) 332 case OpARM64TSTW: 333 return rewriteValueARM64_OpARM64TSTW_0(v) 334 case OpARM64TSTWconst: 335 return rewriteValueARM64_OpARM64TSTWconst_0(v) 336 case OpARM64TSTconst: 337 return rewriteValueARM64_OpARM64TSTconst_0(v) 338 case OpARM64TSTshiftLL: 339 return rewriteValueARM64_OpARM64TSTshiftLL_0(v) 340 case OpARM64TSTshiftRA: 341 return rewriteValueARM64_OpARM64TSTshiftRA_0(v) 342 case OpARM64TSTshiftRL: 343 return rewriteValueARM64_OpARM64TSTshiftRL_0(v) 344 case OpARM64UBFIZ: 345 return rewriteValueARM64_OpARM64UBFIZ_0(v) 346 case OpARM64UBFX: 347 return rewriteValueARM64_OpARM64UBFX_0(v) 348 case OpARM64UDIV: 349 return rewriteValueARM64_OpARM64UDIV_0(v) 350 case OpARM64UDIVW: 351 return rewriteValueARM64_OpARM64UDIVW_0(v) 352 case OpARM64UMOD: 353 return rewriteValueARM64_OpARM64UMOD_0(v) 354 case OpARM64UMODW: 355 return rewriteValueARM64_OpARM64UMODW_0(v) 356 case OpARM64XOR: 357 return rewriteValueARM64_OpARM64XOR_0(v) || rewriteValueARM64_OpARM64XOR_10(v) 358 case OpARM64XORconst: 359 return rewriteValueARM64_OpARM64XORconst_0(v) 360 case OpARM64XORshiftLL: 361 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 362 case OpARM64XORshiftRA: 363 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 364 case OpARM64XORshiftRL: 365 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 366 case OpAbs: 367 return rewriteValueARM64_OpAbs_0(v) 368 case OpAdd16: 369 return rewriteValueARM64_OpAdd16_0(v) 370 case OpAdd32: 371 return rewriteValueARM64_OpAdd32_0(v) 372 case OpAdd32F: 373 return rewriteValueARM64_OpAdd32F_0(v) 374 case OpAdd64: 375 return rewriteValueARM64_OpAdd64_0(v) 376 case OpAdd64F: 377 return rewriteValueARM64_OpAdd64F_0(v) 378 case OpAdd8: 379 return rewriteValueARM64_OpAdd8_0(v) 380 case OpAddPtr: 381 return rewriteValueARM64_OpAddPtr_0(v) 382 case OpAddr: 383 return rewriteValueARM64_OpAddr_0(v) 384 case OpAnd16: 385 return rewriteValueARM64_OpAnd16_0(v) 386 case OpAnd32: 387 return rewriteValueARM64_OpAnd32_0(v) 388 case OpAnd64: 389 return rewriteValueARM64_OpAnd64_0(v) 390 case OpAnd8: 391 return rewriteValueARM64_OpAnd8_0(v) 392 case OpAndB: 393 return rewriteValueARM64_OpAndB_0(v) 394 case OpAtomicAdd32: 395 return rewriteValueARM64_OpAtomicAdd32_0(v) 396 case OpAtomicAdd32Variant: 397 return rewriteValueARM64_OpAtomicAdd32Variant_0(v) 398 case OpAtomicAdd64: 399 return rewriteValueARM64_OpAtomicAdd64_0(v) 400 case OpAtomicAdd64Variant: 401 return rewriteValueARM64_OpAtomicAdd64Variant_0(v) 402 case OpAtomicAnd8: 403 return rewriteValueARM64_OpAtomicAnd8_0(v) 404 case OpAtomicCompareAndSwap32: 405 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 406 case OpAtomicCompareAndSwap64: 407 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 408 case OpAtomicExchange32: 409 return rewriteValueARM64_OpAtomicExchange32_0(v) 410 case OpAtomicExchange64: 411 return rewriteValueARM64_OpAtomicExchange64_0(v) 412 case OpAtomicLoad32: 413 return rewriteValueARM64_OpAtomicLoad32_0(v) 414 case OpAtomicLoad64: 415 return rewriteValueARM64_OpAtomicLoad64_0(v) 416 case OpAtomicLoadPtr: 417 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 418 case OpAtomicOr8: 419 return rewriteValueARM64_OpAtomicOr8_0(v) 420 case OpAtomicStore32: 421 return rewriteValueARM64_OpAtomicStore32_0(v) 422 case OpAtomicStore64: 423 return rewriteValueARM64_OpAtomicStore64_0(v) 424 case OpAtomicStorePtrNoWB: 425 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 426 case OpAvg64u: 427 return rewriteValueARM64_OpAvg64u_0(v) 428 case OpBitLen64: 429 return rewriteValueARM64_OpBitLen64_0(v) 430 case OpBitRev16: 431 return rewriteValueARM64_OpBitRev16_0(v) 432 case OpBitRev32: 433 return rewriteValueARM64_OpBitRev32_0(v) 434 case OpBitRev64: 435 return rewriteValueARM64_OpBitRev64_0(v) 436 case OpBitRev8: 437 return rewriteValueARM64_OpBitRev8_0(v) 438 case OpBswap32: 439 return rewriteValueARM64_OpBswap32_0(v) 440 case OpBswap64: 441 return rewriteValueARM64_OpBswap64_0(v) 442 case OpCeil: 443 return rewriteValueARM64_OpCeil_0(v) 444 case OpClosureCall: 445 return rewriteValueARM64_OpClosureCall_0(v) 446 case OpCom16: 447 return rewriteValueARM64_OpCom16_0(v) 448 case OpCom32: 449 return rewriteValueARM64_OpCom32_0(v) 450 case OpCom64: 451 return rewriteValueARM64_OpCom64_0(v) 452 case OpCom8: 453 return rewriteValueARM64_OpCom8_0(v) 454 case OpCondSelect: 455 return rewriteValueARM64_OpCondSelect_0(v) 456 case OpConst16: 457 return rewriteValueARM64_OpConst16_0(v) 458 case OpConst32: 459 return rewriteValueARM64_OpConst32_0(v) 460 case OpConst32F: 461 return rewriteValueARM64_OpConst32F_0(v) 462 case OpConst64: 463 return rewriteValueARM64_OpConst64_0(v) 464 case OpConst64F: 465 return rewriteValueARM64_OpConst64F_0(v) 466 case OpConst8: 467 return rewriteValueARM64_OpConst8_0(v) 468 case OpConstBool: 469 return rewriteValueARM64_OpConstBool_0(v) 470 case OpConstNil: 471 return rewriteValueARM64_OpConstNil_0(v) 472 case OpCtz32: 473 return rewriteValueARM64_OpCtz32_0(v) 474 case OpCtz32NonZero: 475 return rewriteValueARM64_OpCtz32NonZero_0(v) 476 case OpCtz64: 477 return rewriteValueARM64_OpCtz64_0(v) 478 case OpCtz64NonZero: 479 return rewriteValueARM64_OpCtz64NonZero_0(v) 480 case OpCvt32Fto32: 481 return rewriteValueARM64_OpCvt32Fto32_0(v) 482 case OpCvt32Fto32U: 483 return rewriteValueARM64_OpCvt32Fto32U_0(v) 484 case OpCvt32Fto64: 485 return rewriteValueARM64_OpCvt32Fto64_0(v) 486 case OpCvt32Fto64F: 487 return rewriteValueARM64_OpCvt32Fto64F_0(v) 488 case OpCvt32Fto64U: 489 return rewriteValueARM64_OpCvt32Fto64U_0(v) 490 case OpCvt32Uto32F: 491 return rewriteValueARM64_OpCvt32Uto32F_0(v) 492 case OpCvt32Uto64F: 493 return rewriteValueARM64_OpCvt32Uto64F_0(v) 494 case OpCvt32to32F: 495 return rewriteValueARM64_OpCvt32to32F_0(v) 496 case OpCvt32to64F: 497 return rewriteValueARM64_OpCvt32to64F_0(v) 498 case OpCvt64Fto32: 499 return rewriteValueARM64_OpCvt64Fto32_0(v) 500 case OpCvt64Fto32F: 501 return rewriteValueARM64_OpCvt64Fto32F_0(v) 502 case OpCvt64Fto32U: 503 return rewriteValueARM64_OpCvt64Fto32U_0(v) 504 case OpCvt64Fto64: 505 return rewriteValueARM64_OpCvt64Fto64_0(v) 506 case OpCvt64Fto64U: 507 return rewriteValueARM64_OpCvt64Fto64U_0(v) 508 case OpCvt64Uto32F: 509 return rewriteValueARM64_OpCvt64Uto32F_0(v) 510 case OpCvt64Uto64F: 511 return rewriteValueARM64_OpCvt64Uto64F_0(v) 512 case OpCvt64to32F: 513 return rewriteValueARM64_OpCvt64to32F_0(v) 514 case OpCvt64to64F: 515 return rewriteValueARM64_OpCvt64to64F_0(v) 516 case OpDiv16: 517 return rewriteValueARM64_OpDiv16_0(v) 518 case OpDiv16u: 519 return rewriteValueARM64_OpDiv16u_0(v) 520 case OpDiv32: 521 return rewriteValueARM64_OpDiv32_0(v) 522 case OpDiv32F: 523 return rewriteValueARM64_OpDiv32F_0(v) 524 case OpDiv32u: 525 return rewriteValueARM64_OpDiv32u_0(v) 526 case OpDiv64: 527 return rewriteValueARM64_OpDiv64_0(v) 528 case OpDiv64F: 529 return rewriteValueARM64_OpDiv64F_0(v) 530 case OpDiv64u: 531 return rewriteValueARM64_OpDiv64u_0(v) 532 case OpDiv8: 533 return rewriteValueARM64_OpDiv8_0(v) 534 case OpDiv8u: 535 return rewriteValueARM64_OpDiv8u_0(v) 536 case OpEq16: 537 return rewriteValueARM64_OpEq16_0(v) 538 case OpEq32: 539 return rewriteValueARM64_OpEq32_0(v) 540 case OpEq32F: 541 return rewriteValueARM64_OpEq32F_0(v) 542 case OpEq64: 543 return rewriteValueARM64_OpEq64_0(v) 544 case OpEq64F: 545 return rewriteValueARM64_OpEq64F_0(v) 546 case OpEq8: 547 return rewriteValueARM64_OpEq8_0(v) 548 case OpEqB: 549 return rewriteValueARM64_OpEqB_0(v) 550 case OpEqPtr: 551 return rewriteValueARM64_OpEqPtr_0(v) 552 case OpFloor: 553 return rewriteValueARM64_OpFloor_0(v) 554 case OpGeq16: 555 return rewriteValueARM64_OpGeq16_0(v) 556 case OpGeq16U: 557 return rewriteValueARM64_OpGeq16U_0(v) 558 case OpGeq32: 559 return rewriteValueARM64_OpGeq32_0(v) 560 case OpGeq32F: 561 return rewriteValueARM64_OpGeq32F_0(v) 562 case OpGeq32U: 563 return rewriteValueARM64_OpGeq32U_0(v) 564 case OpGeq64: 565 return rewriteValueARM64_OpGeq64_0(v) 566 case OpGeq64F: 567 return rewriteValueARM64_OpGeq64F_0(v) 568 case OpGeq64U: 569 return rewriteValueARM64_OpGeq64U_0(v) 570 case OpGeq8: 571 return rewriteValueARM64_OpGeq8_0(v) 572 case OpGeq8U: 573 return rewriteValueARM64_OpGeq8U_0(v) 574 case OpGetCallerPC: 575 return rewriteValueARM64_OpGetCallerPC_0(v) 576 case OpGetCallerSP: 577 return rewriteValueARM64_OpGetCallerSP_0(v) 578 case OpGetClosurePtr: 579 return rewriteValueARM64_OpGetClosurePtr_0(v) 580 case OpGreater16: 581 return rewriteValueARM64_OpGreater16_0(v) 582 case OpGreater16U: 583 return rewriteValueARM64_OpGreater16U_0(v) 584 case OpGreater32: 585 return rewriteValueARM64_OpGreater32_0(v) 586 case OpGreater32F: 587 return rewriteValueARM64_OpGreater32F_0(v) 588 case OpGreater32U: 589 return rewriteValueARM64_OpGreater32U_0(v) 590 case OpGreater64: 591 return rewriteValueARM64_OpGreater64_0(v) 592 case OpGreater64F: 593 return rewriteValueARM64_OpGreater64F_0(v) 594 case OpGreater64U: 595 return rewriteValueARM64_OpGreater64U_0(v) 596 case OpGreater8: 597 return rewriteValueARM64_OpGreater8_0(v) 598 case OpGreater8U: 599 return rewriteValueARM64_OpGreater8U_0(v) 600 case OpHmul32: 601 return rewriteValueARM64_OpHmul32_0(v) 602 case OpHmul32u: 603 return rewriteValueARM64_OpHmul32u_0(v) 604 case OpHmul64: 605 return rewriteValueARM64_OpHmul64_0(v) 606 case OpHmul64u: 607 return rewriteValueARM64_OpHmul64u_0(v) 608 case OpInterCall: 609 return rewriteValueARM64_OpInterCall_0(v) 610 case OpIsInBounds: 611 return rewriteValueARM64_OpIsInBounds_0(v) 612 case OpIsNonNil: 613 return rewriteValueARM64_OpIsNonNil_0(v) 614 case OpIsSliceInBounds: 615 return rewriteValueARM64_OpIsSliceInBounds_0(v) 616 case OpLeq16: 617 return rewriteValueARM64_OpLeq16_0(v) 618 case OpLeq16U: 619 return rewriteValueARM64_OpLeq16U_0(v) 620 case OpLeq32: 621 return rewriteValueARM64_OpLeq32_0(v) 622 case OpLeq32F: 623 return rewriteValueARM64_OpLeq32F_0(v) 624 case OpLeq32U: 625 return rewriteValueARM64_OpLeq32U_0(v) 626 case OpLeq64: 627 return rewriteValueARM64_OpLeq64_0(v) 628 case OpLeq64F: 629 return rewriteValueARM64_OpLeq64F_0(v) 630 case OpLeq64U: 631 return rewriteValueARM64_OpLeq64U_0(v) 632 case OpLeq8: 633 return rewriteValueARM64_OpLeq8_0(v) 634 case OpLeq8U: 635 return rewriteValueARM64_OpLeq8U_0(v) 636 case OpLess16: 637 return rewriteValueARM64_OpLess16_0(v) 638 case OpLess16U: 639 return rewriteValueARM64_OpLess16U_0(v) 640 case OpLess32: 641 return rewriteValueARM64_OpLess32_0(v) 642 case OpLess32F: 643 return rewriteValueARM64_OpLess32F_0(v) 644 case OpLess32U: 645 return rewriteValueARM64_OpLess32U_0(v) 646 case OpLess64: 647 return rewriteValueARM64_OpLess64_0(v) 648 case OpLess64F: 649 return rewriteValueARM64_OpLess64F_0(v) 650 case OpLess64U: 651 return rewriteValueARM64_OpLess64U_0(v) 652 case OpLess8: 653 return rewriteValueARM64_OpLess8_0(v) 654 case OpLess8U: 655 return rewriteValueARM64_OpLess8U_0(v) 656 case OpLoad: 657 return rewriteValueARM64_OpLoad_0(v) 658 case OpLocalAddr: 659 return rewriteValueARM64_OpLocalAddr_0(v) 660 case OpLsh16x16: 661 return rewriteValueARM64_OpLsh16x16_0(v) 662 case OpLsh16x32: 663 return rewriteValueARM64_OpLsh16x32_0(v) 664 case OpLsh16x64: 665 return rewriteValueARM64_OpLsh16x64_0(v) 666 case OpLsh16x8: 667 return rewriteValueARM64_OpLsh16x8_0(v) 668 case OpLsh32x16: 669 return rewriteValueARM64_OpLsh32x16_0(v) 670 case OpLsh32x32: 671 return rewriteValueARM64_OpLsh32x32_0(v) 672 case OpLsh32x64: 673 return rewriteValueARM64_OpLsh32x64_0(v) 674 case OpLsh32x8: 675 return rewriteValueARM64_OpLsh32x8_0(v) 676 case OpLsh64x16: 677 return rewriteValueARM64_OpLsh64x16_0(v) 678 case OpLsh64x32: 679 return rewriteValueARM64_OpLsh64x32_0(v) 680 case OpLsh64x64: 681 return rewriteValueARM64_OpLsh64x64_0(v) 682 case OpLsh64x8: 683 return rewriteValueARM64_OpLsh64x8_0(v) 684 case OpLsh8x16: 685 return rewriteValueARM64_OpLsh8x16_0(v) 686 case OpLsh8x32: 687 return rewriteValueARM64_OpLsh8x32_0(v) 688 case OpLsh8x64: 689 return rewriteValueARM64_OpLsh8x64_0(v) 690 case OpLsh8x8: 691 return rewriteValueARM64_OpLsh8x8_0(v) 692 case OpMod16: 693 return rewriteValueARM64_OpMod16_0(v) 694 case OpMod16u: 695 return rewriteValueARM64_OpMod16u_0(v) 696 case OpMod32: 697 return rewriteValueARM64_OpMod32_0(v) 698 case OpMod32u: 699 return rewriteValueARM64_OpMod32u_0(v) 700 case OpMod64: 701 return rewriteValueARM64_OpMod64_0(v) 702 case OpMod64u: 703 return rewriteValueARM64_OpMod64u_0(v) 704 case OpMod8: 705 return rewriteValueARM64_OpMod8_0(v) 706 case OpMod8u: 707 return rewriteValueARM64_OpMod8u_0(v) 708 case OpMove: 709 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 710 case OpMul16: 711 return rewriteValueARM64_OpMul16_0(v) 712 case OpMul32: 713 return rewriteValueARM64_OpMul32_0(v) 714 case OpMul32F: 715 return rewriteValueARM64_OpMul32F_0(v) 716 case OpMul64: 717 return rewriteValueARM64_OpMul64_0(v) 718 case OpMul64F: 719 return rewriteValueARM64_OpMul64F_0(v) 720 case OpMul64uhilo: 721 return rewriteValueARM64_OpMul64uhilo_0(v) 722 case OpMul8: 723 return rewriteValueARM64_OpMul8_0(v) 724 case OpNeg16: 725 return rewriteValueARM64_OpNeg16_0(v) 726 case OpNeg32: 727 return rewriteValueARM64_OpNeg32_0(v) 728 case OpNeg32F: 729 return rewriteValueARM64_OpNeg32F_0(v) 730 case OpNeg64: 731 return rewriteValueARM64_OpNeg64_0(v) 732 case OpNeg64F: 733 return rewriteValueARM64_OpNeg64F_0(v) 734 case OpNeg8: 735 return rewriteValueARM64_OpNeg8_0(v) 736 case OpNeq16: 737 return rewriteValueARM64_OpNeq16_0(v) 738 case OpNeq32: 739 return rewriteValueARM64_OpNeq32_0(v) 740 case OpNeq32F: 741 return rewriteValueARM64_OpNeq32F_0(v) 742 case OpNeq64: 743 return rewriteValueARM64_OpNeq64_0(v) 744 case OpNeq64F: 745 return rewriteValueARM64_OpNeq64F_0(v) 746 case OpNeq8: 747 return rewriteValueARM64_OpNeq8_0(v) 748 case OpNeqB: 749 return rewriteValueARM64_OpNeqB_0(v) 750 case OpNeqPtr: 751 return rewriteValueARM64_OpNeqPtr_0(v) 752 case OpNilCheck: 753 return rewriteValueARM64_OpNilCheck_0(v) 754 case OpNot: 755 return rewriteValueARM64_OpNot_0(v) 756 case OpOffPtr: 757 return rewriteValueARM64_OpOffPtr_0(v) 758 case OpOr16: 759 return rewriteValueARM64_OpOr16_0(v) 760 case OpOr32: 761 return rewriteValueARM64_OpOr32_0(v) 762 case OpOr64: 763 return rewriteValueARM64_OpOr64_0(v) 764 case OpOr8: 765 return rewriteValueARM64_OpOr8_0(v) 766 case OpOrB: 767 return rewriteValueARM64_OpOrB_0(v) 768 case OpPopCount16: 769 return rewriteValueARM64_OpPopCount16_0(v) 770 case OpPopCount32: 771 return rewriteValueARM64_OpPopCount32_0(v) 772 case OpPopCount64: 773 return rewriteValueARM64_OpPopCount64_0(v) 774 case OpRotateLeft32: 775 return rewriteValueARM64_OpRotateLeft32_0(v) 776 case OpRotateLeft64: 777 return rewriteValueARM64_OpRotateLeft64_0(v) 778 case OpRound: 779 return rewriteValueARM64_OpRound_0(v) 780 case OpRound32F: 781 return rewriteValueARM64_OpRound32F_0(v) 782 case OpRound64F: 783 return rewriteValueARM64_OpRound64F_0(v) 784 case OpRoundToEven: 785 return rewriteValueARM64_OpRoundToEven_0(v) 786 case OpRsh16Ux16: 787 return rewriteValueARM64_OpRsh16Ux16_0(v) 788 case OpRsh16Ux32: 789 return rewriteValueARM64_OpRsh16Ux32_0(v) 790 case OpRsh16Ux64: 791 return rewriteValueARM64_OpRsh16Ux64_0(v) 792 case OpRsh16Ux8: 793 return rewriteValueARM64_OpRsh16Ux8_0(v) 794 case OpRsh16x16: 795 return rewriteValueARM64_OpRsh16x16_0(v) 796 case OpRsh16x32: 797 return rewriteValueARM64_OpRsh16x32_0(v) 798 case OpRsh16x64: 799 return rewriteValueARM64_OpRsh16x64_0(v) 800 case OpRsh16x8: 801 return rewriteValueARM64_OpRsh16x8_0(v) 802 case OpRsh32Ux16: 803 return rewriteValueARM64_OpRsh32Ux16_0(v) 804 case OpRsh32Ux32: 805 return rewriteValueARM64_OpRsh32Ux32_0(v) 806 case OpRsh32Ux64: 807 return rewriteValueARM64_OpRsh32Ux64_0(v) 808 case OpRsh32Ux8: 809 return rewriteValueARM64_OpRsh32Ux8_0(v) 810 case OpRsh32x16: 811 return rewriteValueARM64_OpRsh32x16_0(v) 812 case OpRsh32x32: 813 return rewriteValueARM64_OpRsh32x32_0(v) 814 case OpRsh32x64: 815 return rewriteValueARM64_OpRsh32x64_0(v) 816 case OpRsh32x8: 817 return rewriteValueARM64_OpRsh32x8_0(v) 818 case OpRsh64Ux16: 819 return rewriteValueARM64_OpRsh64Ux16_0(v) 820 case OpRsh64Ux32: 821 return rewriteValueARM64_OpRsh64Ux32_0(v) 822 case OpRsh64Ux64: 823 return rewriteValueARM64_OpRsh64Ux64_0(v) 824 case OpRsh64Ux8: 825 return rewriteValueARM64_OpRsh64Ux8_0(v) 826 case OpRsh64x16: 827 return rewriteValueARM64_OpRsh64x16_0(v) 828 case OpRsh64x32: 829 return rewriteValueARM64_OpRsh64x32_0(v) 830 case OpRsh64x64: 831 return rewriteValueARM64_OpRsh64x64_0(v) 832 case OpRsh64x8: 833 return rewriteValueARM64_OpRsh64x8_0(v) 834 case OpRsh8Ux16: 835 return rewriteValueARM64_OpRsh8Ux16_0(v) 836 case OpRsh8Ux32: 837 return rewriteValueARM64_OpRsh8Ux32_0(v) 838 case OpRsh8Ux64: 839 return rewriteValueARM64_OpRsh8Ux64_0(v) 840 case OpRsh8Ux8: 841 return rewriteValueARM64_OpRsh8Ux8_0(v) 842 case OpRsh8x16: 843 return rewriteValueARM64_OpRsh8x16_0(v) 844 case OpRsh8x32: 845 return rewriteValueARM64_OpRsh8x32_0(v) 846 case OpRsh8x64: 847 return rewriteValueARM64_OpRsh8x64_0(v) 848 case OpRsh8x8: 849 return rewriteValueARM64_OpRsh8x8_0(v) 850 case OpSignExt16to32: 851 return rewriteValueARM64_OpSignExt16to32_0(v) 852 case OpSignExt16to64: 853 return rewriteValueARM64_OpSignExt16to64_0(v) 854 case OpSignExt32to64: 855 return rewriteValueARM64_OpSignExt32to64_0(v) 856 case OpSignExt8to16: 857 return rewriteValueARM64_OpSignExt8to16_0(v) 858 case OpSignExt8to32: 859 return rewriteValueARM64_OpSignExt8to32_0(v) 860 case OpSignExt8to64: 861 return rewriteValueARM64_OpSignExt8to64_0(v) 862 case OpSlicemask: 863 return rewriteValueARM64_OpSlicemask_0(v) 864 case OpSqrt: 865 return rewriteValueARM64_OpSqrt_0(v) 866 case OpStaticCall: 867 return rewriteValueARM64_OpStaticCall_0(v) 868 case OpStore: 869 return rewriteValueARM64_OpStore_0(v) 870 case OpSub16: 871 return rewriteValueARM64_OpSub16_0(v) 872 case OpSub32: 873 return rewriteValueARM64_OpSub32_0(v) 874 case OpSub32F: 875 return rewriteValueARM64_OpSub32F_0(v) 876 case OpSub64: 877 return rewriteValueARM64_OpSub64_0(v) 878 case OpSub64F: 879 return rewriteValueARM64_OpSub64F_0(v) 880 case OpSub8: 881 return rewriteValueARM64_OpSub8_0(v) 882 case OpSubPtr: 883 return rewriteValueARM64_OpSubPtr_0(v) 884 case OpTrunc: 885 return rewriteValueARM64_OpTrunc_0(v) 886 case OpTrunc16to8: 887 return rewriteValueARM64_OpTrunc16to8_0(v) 888 case OpTrunc32to16: 889 return rewriteValueARM64_OpTrunc32to16_0(v) 890 case OpTrunc32to8: 891 return rewriteValueARM64_OpTrunc32to8_0(v) 892 case OpTrunc64to16: 893 return rewriteValueARM64_OpTrunc64to16_0(v) 894 case OpTrunc64to32: 895 return rewriteValueARM64_OpTrunc64to32_0(v) 896 case OpTrunc64to8: 897 return rewriteValueARM64_OpTrunc64to8_0(v) 898 case OpWB: 899 return rewriteValueARM64_OpWB_0(v) 900 case OpXor16: 901 return rewriteValueARM64_OpXor16_0(v) 902 case OpXor32: 903 return rewriteValueARM64_OpXor32_0(v) 904 case OpXor64: 905 return rewriteValueARM64_OpXor64_0(v) 906 case OpXor8: 907 return rewriteValueARM64_OpXor8_0(v) 908 case OpZero: 909 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v) 910 case OpZeroExt16to32: 911 return rewriteValueARM64_OpZeroExt16to32_0(v) 912 case OpZeroExt16to64: 913 return rewriteValueARM64_OpZeroExt16to64_0(v) 914 case OpZeroExt32to64: 915 return rewriteValueARM64_OpZeroExt32to64_0(v) 916 case OpZeroExt8to16: 917 return rewriteValueARM64_OpZeroExt8to16_0(v) 918 case OpZeroExt8to32: 919 return rewriteValueARM64_OpZeroExt8to32_0(v) 920 case OpZeroExt8to64: 921 return rewriteValueARM64_OpZeroExt8to64_0(v) 922 } 923 return false 924 } 925 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 926 // match: (ADD x (MOVDconst [c])) 927 // cond: 928 // result: (ADDconst [c] x) 929 for { 930 _ = v.Args[1] 931 x := v.Args[0] 932 v_1 := v.Args[1] 933 if v_1.Op != OpARM64MOVDconst { 934 break 935 } 936 c := v_1.AuxInt 937 v.reset(OpARM64ADDconst) 938 v.AuxInt = c 939 v.AddArg(x) 940 return true 941 } 942 // match: (ADD (MOVDconst [c]) x) 943 // cond: 944 // result: (ADDconst [c] x) 945 for { 946 _ = v.Args[1] 947 v_0 := v.Args[0] 948 if v_0.Op != OpARM64MOVDconst { 949 break 950 } 951 c := v_0.AuxInt 952 x := v.Args[1] 953 v.reset(OpARM64ADDconst) 954 v.AuxInt = c 955 v.AddArg(x) 956 return true 957 } 958 // match: (ADD a l:(MUL x y)) 959 // cond: l.Uses==1 && clobber(l) 960 // result: (MADD a x y) 961 for { 962 _ = v.Args[1] 963 a := v.Args[0] 964 l := v.Args[1] 965 if l.Op != OpARM64MUL { 966 break 967 } 968 _ = l.Args[1] 969 x := l.Args[0] 970 y := l.Args[1] 971 if !(l.Uses == 1 && clobber(l)) { 972 break 973 } 974 v.reset(OpARM64MADD) 975 v.AddArg(a) 976 v.AddArg(x) 977 v.AddArg(y) 978 return true 979 } 980 // match: (ADD l:(MUL x y) a) 981 // cond: l.Uses==1 && clobber(l) 982 // result: (MADD a x y) 983 for { 984 _ = v.Args[1] 985 l := v.Args[0] 986 if l.Op != OpARM64MUL { 987 break 988 } 989 _ = l.Args[1] 990 x := l.Args[0] 991 y := l.Args[1] 992 a := v.Args[1] 993 if !(l.Uses == 1 && clobber(l)) { 994 break 995 } 996 v.reset(OpARM64MADD) 997 v.AddArg(a) 998 v.AddArg(x) 999 v.AddArg(y) 1000 return true 1001 } 1002 // match: (ADD a l:(MNEG x y)) 1003 // cond: l.Uses==1 && clobber(l) 1004 // result: (MSUB a x y) 1005 for { 1006 _ = v.Args[1] 1007 a := v.Args[0] 1008 l := v.Args[1] 1009 if l.Op != OpARM64MNEG { 1010 break 1011 } 1012 _ = l.Args[1] 1013 x := l.Args[0] 1014 y := l.Args[1] 1015 if !(l.Uses == 1 && clobber(l)) { 1016 break 1017 } 1018 v.reset(OpARM64MSUB) 1019 v.AddArg(a) 1020 v.AddArg(x) 1021 v.AddArg(y) 1022 return true 1023 } 1024 // match: (ADD l:(MNEG x y) a) 1025 // cond: l.Uses==1 && clobber(l) 1026 // result: (MSUB a x y) 1027 for { 1028 _ = v.Args[1] 1029 l := v.Args[0] 1030 if l.Op != OpARM64MNEG { 1031 break 1032 } 1033 _ = l.Args[1] 1034 x := l.Args[0] 1035 y := l.Args[1] 1036 a := v.Args[1] 1037 if !(l.Uses == 1 && clobber(l)) { 1038 break 1039 } 1040 v.reset(OpARM64MSUB) 1041 v.AddArg(a) 1042 v.AddArg(x) 1043 v.AddArg(y) 1044 return true 1045 } 1046 // match: (ADD a l:(MULW x y)) 1047 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1048 // result: (MADDW a x y) 1049 for { 1050 _ = v.Args[1] 1051 a := v.Args[0] 1052 l := v.Args[1] 1053 if l.Op != OpARM64MULW { 1054 break 1055 } 1056 _ = l.Args[1] 1057 x := l.Args[0] 1058 y := l.Args[1] 1059 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1060 break 1061 } 1062 v.reset(OpARM64MADDW) 1063 v.AddArg(a) 1064 v.AddArg(x) 1065 v.AddArg(y) 1066 return true 1067 } 1068 // match: (ADD l:(MULW x y) a) 1069 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1070 // result: (MADDW a x y) 1071 for { 1072 _ = v.Args[1] 1073 l := v.Args[0] 1074 if l.Op != OpARM64MULW { 1075 break 1076 } 1077 _ = l.Args[1] 1078 x := l.Args[0] 1079 y := l.Args[1] 1080 a := v.Args[1] 1081 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1082 break 1083 } 1084 v.reset(OpARM64MADDW) 1085 v.AddArg(a) 1086 v.AddArg(x) 1087 v.AddArg(y) 1088 return true 1089 } 1090 // match: (ADD a l:(MNEGW x y)) 1091 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1092 // result: (MSUBW a x y) 1093 for { 1094 _ = v.Args[1] 1095 a := v.Args[0] 1096 l := v.Args[1] 1097 if l.Op != OpARM64MNEGW { 1098 break 1099 } 1100 _ = l.Args[1] 1101 x := l.Args[0] 1102 y := l.Args[1] 1103 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1104 break 1105 } 1106 v.reset(OpARM64MSUBW) 1107 v.AddArg(a) 1108 v.AddArg(x) 1109 v.AddArg(y) 1110 return true 1111 } 1112 // match: (ADD l:(MNEGW x y) a) 1113 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1114 // result: (MSUBW a x y) 1115 for { 1116 _ = v.Args[1] 1117 l := v.Args[0] 1118 if l.Op != OpARM64MNEGW { 1119 break 1120 } 1121 _ = l.Args[1] 1122 x := l.Args[0] 1123 y := l.Args[1] 1124 a := v.Args[1] 1125 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1126 break 1127 } 1128 v.reset(OpARM64MSUBW) 1129 v.AddArg(a) 1130 v.AddArg(x) 1131 v.AddArg(y) 1132 return true 1133 } 1134 return false 1135 } 1136 func rewriteValueARM64_OpARM64ADD_10(v *Value) bool { 1137 b := v.Block 1138 _ = b 1139 typ := &b.Func.Config.Types 1140 _ = typ 1141 // match: (ADD x (NEG y)) 1142 // cond: 1143 // result: (SUB x y) 1144 for { 1145 _ = v.Args[1] 1146 x := v.Args[0] 1147 v_1 := v.Args[1] 1148 if v_1.Op != OpARM64NEG { 1149 break 1150 } 1151 y := v_1.Args[0] 1152 v.reset(OpARM64SUB) 1153 v.AddArg(x) 1154 v.AddArg(y) 1155 return true 1156 } 1157 // match: (ADD (NEG y) x) 1158 // cond: 1159 // result: (SUB x y) 1160 for { 1161 _ = v.Args[1] 1162 v_0 := v.Args[0] 1163 if v_0.Op != OpARM64NEG { 1164 break 1165 } 1166 y := v_0.Args[0] 1167 x := v.Args[1] 1168 v.reset(OpARM64SUB) 1169 v.AddArg(x) 1170 v.AddArg(y) 1171 return true 1172 } 1173 // match: (ADD x0 x1:(SLLconst [c] y)) 1174 // cond: clobberIfDead(x1) 1175 // result: (ADDshiftLL x0 y [c]) 1176 for { 1177 _ = v.Args[1] 1178 x0 := v.Args[0] 1179 x1 := v.Args[1] 1180 if x1.Op != OpARM64SLLconst { 1181 break 1182 } 1183 c := x1.AuxInt 1184 y := x1.Args[0] 1185 if !(clobberIfDead(x1)) { 1186 break 1187 } 1188 v.reset(OpARM64ADDshiftLL) 1189 v.AuxInt = c 1190 v.AddArg(x0) 1191 v.AddArg(y) 1192 return true 1193 } 1194 // match: (ADD x1:(SLLconst [c] y) x0) 1195 // cond: clobberIfDead(x1) 1196 // result: (ADDshiftLL x0 y [c]) 1197 for { 1198 _ = v.Args[1] 1199 x1 := v.Args[0] 1200 if x1.Op != OpARM64SLLconst { 1201 break 1202 } 1203 c := x1.AuxInt 1204 y := x1.Args[0] 1205 x0 := v.Args[1] 1206 if !(clobberIfDead(x1)) { 1207 break 1208 } 1209 v.reset(OpARM64ADDshiftLL) 1210 v.AuxInt = c 1211 v.AddArg(x0) 1212 v.AddArg(y) 1213 return true 1214 } 1215 // match: (ADD x0 x1:(SRLconst [c] y)) 1216 // cond: clobberIfDead(x1) 1217 // result: (ADDshiftRL x0 y [c]) 1218 for { 1219 _ = v.Args[1] 1220 x0 := v.Args[0] 1221 x1 := v.Args[1] 1222 if x1.Op != OpARM64SRLconst { 1223 break 1224 } 1225 c := x1.AuxInt 1226 y := x1.Args[0] 1227 if !(clobberIfDead(x1)) { 1228 break 1229 } 1230 v.reset(OpARM64ADDshiftRL) 1231 v.AuxInt = c 1232 v.AddArg(x0) 1233 v.AddArg(y) 1234 return true 1235 } 1236 // match: (ADD x1:(SRLconst [c] y) x0) 1237 // cond: clobberIfDead(x1) 1238 // result: (ADDshiftRL x0 y [c]) 1239 for { 1240 _ = v.Args[1] 1241 x1 := v.Args[0] 1242 if x1.Op != OpARM64SRLconst { 1243 break 1244 } 1245 c := x1.AuxInt 1246 y := x1.Args[0] 1247 x0 := v.Args[1] 1248 if !(clobberIfDead(x1)) { 1249 break 1250 } 1251 v.reset(OpARM64ADDshiftRL) 1252 v.AuxInt = c 1253 v.AddArg(x0) 1254 v.AddArg(y) 1255 return true 1256 } 1257 // match: (ADD x0 x1:(SRAconst [c] y)) 1258 // cond: clobberIfDead(x1) 1259 // result: (ADDshiftRA x0 y [c]) 1260 for { 1261 _ = v.Args[1] 1262 x0 := v.Args[0] 1263 x1 := v.Args[1] 1264 if x1.Op != OpARM64SRAconst { 1265 break 1266 } 1267 c := x1.AuxInt 1268 y := x1.Args[0] 1269 if !(clobberIfDead(x1)) { 1270 break 1271 } 1272 v.reset(OpARM64ADDshiftRA) 1273 v.AuxInt = c 1274 v.AddArg(x0) 1275 v.AddArg(y) 1276 return true 1277 } 1278 // match: (ADD x1:(SRAconst [c] y) x0) 1279 // cond: clobberIfDead(x1) 1280 // result: (ADDshiftRA x0 y [c]) 1281 for { 1282 _ = v.Args[1] 1283 x1 := v.Args[0] 1284 if x1.Op != OpARM64SRAconst { 1285 break 1286 } 1287 c := x1.AuxInt 1288 y := x1.Args[0] 1289 x0 := v.Args[1] 1290 if !(clobberIfDead(x1)) { 1291 break 1292 } 1293 v.reset(OpARM64ADDshiftRA) 1294 v.AuxInt = c 1295 v.AddArg(x0) 1296 v.AddArg(y) 1297 return true 1298 } 1299 // match: (ADD (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 1300 // cond: cc.(Op) == OpARM64LessThanU 1301 // result: (ROR x (NEG <t> y)) 1302 for { 1303 _ = v.Args[1] 1304 v_0 := v.Args[0] 1305 if v_0.Op != OpARM64SLL { 1306 break 1307 } 1308 _ = v_0.Args[1] 1309 x := v_0.Args[0] 1310 v_0_1 := v_0.Args[1] 1311 if v_0_1.Op != OpARM64ANDconst { 1312 break 1313 } 1314 t := v_0_1.Type 1315 if v_0_1.AuxInt != 63 { 1316 break 1317 } 1318 y := v_0_1.Args[0] 1319 v_1 := v.Args[1] 1320 if v_1.Op != OpARM64CSEL0 { 1321 break 1322 } 1323 if v_1.Type != typ.UInt64 { 1324 break 1325 } 1326 cc := v_1.Aux 1327 _ = v_1.Args[1] 1328 v_1_0 := v_1.Args[0] 1329 if v_1_0.Op != OpARM64SRL { 1330 break 1331 } 1332 if v_1_0.Type != typ.UInt64 { 1333 break 1334 } 1335 _ = v_1_0.Args[1] 1336 if x != v_1_0.Args[0] { 1337 break 1338 } 1339 v_1_0_1 := v_1_0.Args[1] 1340 if v_1_0_1.Op != OpARM64SUB { 1341 break 1342 } 1343 if v_1_0_1.Type != t { 1344 break 1345 } 1346 _ = v_1_0_1.Args[1] 1347 v_1_0_1_0 := v_1_0_1.Args[0] 1348 if v_1_0_1_0.Op != OpARM64MOVDconst { 1349 break 1350 } 1351 if v_1_0_1_0.AuxInt != 64 { 1352 break 1353 } 1354 v_1_0_1_1 := v_1_0_1.Args[1] 1355 if v_1_0_1_1.Op != OpARM64ANDconst { 1356 break 1357 } 1358 if v_1_0_1_1.Type != t { 1359 break 1360 } 1361 if v_1_0_1_1.AuxInt != 63 { 1362 break 1363 } 1364 if y != v_1_0_1_1.Args[0] { 1365 break 1366 } 1367 v_1_1 := v_1.Args[1] 1368 if v_1_1.Op != OpARM64CMPconst { 1369 break 1370 } 1371 if v_1_1.AuxInt != 64 { 1372 break 1373 } 1374 v_1_1_0 := v_1_1.Args[0] 1375 if v_1_1_0.Op != OpARM64SUB { 1376 break 1377 } 1378 if v_1_1_0.Type != t { 1379 break 1380 } 1381 _ = v_1_1_0.Args[1] 1382 v_1_1_0_0 := v_1_1_0.Args[0] 1383 if v_1_1_0_0.Op != OpARM64MOVDconst { 1384 break 1385 } 1386 if v_1_1_0_0.AuxInt != 64 { 1387 break 1388 } 1389 v_1_1_0_1 := v_1_1_0.Args[1] 1390 if v_1_1_0_1.Op != OpARM64ANDconst { 1391 break 1392 } 1393 if v_1_1_0_1.Type != t { 1394 break 1395 } 1396 if v_1_1_0_1.AuxInt != 63 { 1397 break 1398 } 1399 if y != v_1_1_0_1.Args[0] { 1400 break 1401 } 1402 if !(cc.(Op) == OpARM64LessThanU) { 1403 break 1404 } 1405 v.reset(OpARM64ROR) 1406 v.AddArg(x) 1407 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1408 v0.AddArg(y) 1409 v.AddArg(v0) 1410 return true 1411 } 1412 // match: (ADD (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 1413 // cond: cc.(Op) == OpARM64LessThanU 1414 // result: (ROR x (NEG <t> y)) 1415 for { 1416 _ = v.Args[1] 1417 v_0 := v.Args[0] 1418 if v_0.Op != OpARM64CSEL0 { 1419 break 1420 } 1421 if v_0.Type != typ.UInt64 { 1422 break 1423 } 1424 cc := v_0.Aux 1425 _ = v_0.Args[1] 1426 v_0_0 := v_0.Args[0] 1427 if v_0_0.Op != OpARM64SRL { 1428 break 1429 } 1430 if v_0_0.Type != typ.UInt64 { 1431 break 1432 } 1433 _ = v_0_0.Args[1] 1434 x := v_0_0.Args[0] 1435 v_0_0_1 := v_0_0.Args[1] 1436 if v_0_0_1.Op != OpARM64SUB { 1437 break 1438 } 1439 t := v_0_0_1.Type 1440 _ = v_0_0_1.Args[1] 1441 v_0_0_1_0 := v_0_0_1.Args[0] 1442 if v_0_0_1_0.Op != OpARM64MOVDconst { 1443 break 1444 } 1445 if v_0_0_1_0.AuxInt != 64 { 1446 break 1447 } 1448 v_0_0_1_1 := v_0_0_1.Args[1] 1449 if v_0_0_1_1.Op != OpARM64ANDconst { 1450 break 1451 } 1452 if v_0_0_1_1.Type != t { 1453 break 1454 } 1455 if v_0_0_1_1.AuxInt != 63 { 1456 break 1457 } 1458 y := v_0_0_1_1.Args[0] 1459 v_0_1 := v_0.Args[1] 1460 if v_0_1.Op != OpARM64CMPconst { 1461 break 1462 } 1463 if v_0_1.AuxInt != 64 { 1464 break 1465 } 1466 v_0_1_0 := v_0_1.Args[0] 1467 if v_0_1_0.Op != OpARM64SUB { 1468 break 1469 } 1470 if v_0_1_0.Type != t { 1471 break 1472 } 1473 _ = v_0_1_0.Args[1] 1474 v_0_1_0_0 := v_0_1_0.Args[0] 1475 if v_0_1_0_0.Op != OpARM64MOVDconst { 1476 break 1477 } 1478 if v_0_1_0_0.AuxInt != 64 { 1479 break 1480 } 1481 v_0_1_0_1 := v_0_1_0.Args[1] 1482 if v_0_1_0_1.Op != OpARM64ANDconst { 1483 break 1484 } 1485 if v_0_1_0_1.Type != t { 1486 break 1487 } 1488 if v_0_1_0_1.AuxInt != 63 { 1489 break 1490 } 1491 if y != v_0_1_0_1.Args[0] { 1492 break 1493 } 1494 v_1 := v.Args[1] 1495 if v_1.Op != OpARM64SLL { 1496 break 1497 } 1498 _ = v_1.Args[1] 1499 if x != v_1.Args[0] { 1500 break 1501 } 1502 v_1_1 := v_1.Args[1] 1503 if v_1_1.Op != OpARM64ANDconst { 1504 break 1505 } 1506 if v_1_1.Type != t { 1507 break 1508 } 1509 if v_1_1.AuxInt != 63 { 1510 break 1511 } 1512 if y != v_1_1.Args[0] { 1513 break 1514 } 1515 if !(cc.(Op) == OpARM64LessThanU) { 1516 break 1517 } 1518 v.reset(OpARM64ROR) 1519 v.AddArg(x) 1520 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1521 v0.AddArg(y) 1522 v.AddArg(v0) 1523 return true 1524 } 1525 return false 1526 } 1527 func rewriteValueARM64_OpARM64ADD_20(v *Value) bool { 1528 b := v.Block 1529 _ = b 1530 typ := &b.Func.Config.Types 1531 _ = typ 1532 // match: (ADD (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 1533 // cond: cc.(Op) == OpARM64LessThanU 1534 // result: (ROR x y) 1535 for { 1536 _ = v.Args[1] 1537 v_0 := v.Args[0] 1538 if v_0.Op != OpARM64SRL { 1539 break 1540 } 1541 if v_0.Type != typ.UInt64 { 1542 break 1543 } 1544 _ = v_0.Args[1] 1545 x := v_0.Args[0] 1546 v_0_1 := v_0.Args[1] 1547 if v_0_1.Op != OpARM64ANDconst { 1548 break 1549 } 1550 t := v_0_1.Type 1551 if v_0_1.AuxInt != 63 { 1552 break 1553 } 1554 y := v_0_1.Args[0] 1555 v_1 := v.Args[1] 1556 if v_1.Op != OpARM64CSEL0 { 1557 break 1558 } 1559 if v_1.Type != typ.UInt64 { 1560 break 1561 } 1562 cc := v_1.Aux 1563 _ = v_1.Args[1] 1564 v_1_0 := v_1.Args[0] 1565 if v_1_0.Op != OpARM64SLL { 1566 break 1567 } 1568 _ = v_1_0.Args[1] 1569 if x != v_1_0.Args[0] { 1570 break 1571 } 1572 v_1_0_1 := v_1_0.Args[1] 1573 if v_1_0_1.Op != OpARM64SUB { 1574 break 1575 } 1576 if v_1_0_1.Type != t { 1577 break 1578 } 1579 _ = v_1_0_1.Args[1] 1580 v_1_0_1_0 := v_1_0_1.Args[0] 1581 if v_1_0_1_0.Op != OpARM64MOVDconst { 1582 break 1583 } 1584 if v_1_0_1_0.AuxInt != 64 { 1585 break 1586 } 1587 v_1_0_1_1 := v_1_0_1.Args[1] 1588 if v_1_0_1_1.Op != OpARM64ANDconst { 1589 break 1590 } 1591 if v_1_0_1_1.Type != t { 1592 break 1593 } 1594 if v_1_0_1_1.AuxInt != 63 { 1595 break 1596 } 1597 if y != v_1_0_1_1.Args[0] { 1598 break 1599 } 1600 v_1_1 := v_1.Args[1] 1601 if v_1_1.Op != OpARM64CMPconst { 1602 break 1603 } 1604 if v_1_1.AuxInt != 64 { 1605 break 1606 } 1607 v_1_1_0 := v_1_1.Args[0] 1608 if v_1_1_0.Op != OpARM64SUB { 1609 break 1610 } 1611 if v_1_1_0.Type != t { 1612 break 1613 } 1614 _ = v_1_1_0.Args[1] 1615 v_1_1_0_0 := v_1_1_0.Args[0] 1616 if v_1_1_0_0.Op != OpARM64MOVDconst { 1617 break 1618 } 1619 if v_1_1_0_0.AuxInt != 64 { 1620 break 1621 } 1622 v_1_1_0_1 := v_1_1_0.Args[1] 1623 if v_1_1_0_1.Op != OpARM64ANDconst { 1624 break 1625 } 1626 if v_1_1_0_1.Type != t { 1627 break 1628 } 1629 if v_1_1_0_1.AuxInt != 63 { 1630 break 1631 } 1632 if y != v_1_1_0_1.Args[0] { 1633 break 1634 } 1635 if !(cc.(Op) == OpARM64LessThanU) { 1636 break 1637 } 1638 v.reset(OpARM64ROR) 1639 v.AddArg(x) 1640 v.AddArg(y) 1641 return true 1642 } 1643 // match: (ADD (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 1644 // cond: cc.(Op) == OpARM64LessThanU 1645 // result: (ROR x y) 1646 for { 1647 _ = v.Args[1] 1648 v_0 := v.Args[0] 1649 if v_0.Op != OpARM64CSEL0 { 1650 break 1651 } 1652 if v_0.Type != typ.UInt64 { 1653 break 1654 } 1655 cc := v_0.Aux 1656 _ = v_0.Args[1] 1657 v_0_0 := v_0.Args[0] 1658 if v_0_0.Op != OpARM64SLL { 1659 break 1660 } 1661 _ = v_0_0.Args[1] 1662 x := v_0_0.Args[0] 1663 v_0_0_1 := v_0_0.Args[1] 1664 if v_0_0_1.Op != OpARM64SUB { 1665 break 1666 } 1667 t := v_0_0_1.Type 1668 _ = v_0_0_1.Args[1] 1669 v_0_0_1_0 := v_0_0_1.Args[0] 1670 if v_0_0_1_0.Op != OpARM64MOVDconst { 1671 break 1672 } 1673 if v_0_0_1_0.AuxInt != 64 { 1674 break 1675 } 1676 v_0_0_1_1 := v_0_0_1.Args[1] 1677 if v_0_0_1_1.Op != OpARM64ANDconst { 1678 break 1679 } 1680 if v_0_0_1_1.Type != t { 1681 break 1682 } 1683 if v_0_0_1_1.AuxInt != 63 { 1684 break 1685 } 1686 y := v_0_0_1_1.Args[0] 1687 v_0_1 := v_0.Args[1] 1688 if v_0_1.Op != OpARM64CMPconst { 1689 break 1690 } 1691 if v_0_1.AuxInt != 64 { 1692 break 1693 } 1694 v_0_1_0 := v_0_1.Args[0] 1695 if v_0_1_0.Op != OpARM64SUB { 1696 break 1697 } 1698 if v_0_1_0.Type != t { 1699 break 1700 } 1701 _ = v_0_1_0.Args[1] 1702 v_0_1_0_0 := v_0_1_0.Args[0] 1703 if v_0_1_0_0.Op != OpARM64MOVDconst { 1704 break 1705 } 1706 if v_0_1_0_0.AuxInt != 64 { 1707 break 1708 } 1709 v_0_1_0_1 := v_0_1_0.Args[1] 1710 if v_0_1_0_1.Op != OpARM64ANDconst { 1711 break 1712 } 1713 if v_0_1_0_1.Type != t { 1714 break 1715 } 1716 if v_0_1_0_1.AuxInt != 63 { 1717 break 1718 } 1719 if y != v_0_1_0_1.Args[0] { 1720 break 1721 } 1722 v_1 := v.Args[1] 1723 if v_1.Op != OpARM64SRL { 1724 break 1725 } 1726 if v_1.Type != typ.UInt64 { 1727 break 1728 } 1729 _ = v_1.Args[1] 1730 if x != v_1.Args[0] { 1731 break 1732 } 1733 v_1_1 := v_1.Args[1] 1734 if v_1_1.Op != OpARM64ANDconst { 1735 break 1736 } 1737 if v_1_1.Type != t { 1738 break 1739 } 1740 if v_1_1.AuxInt != 63 { 1741 break 1742 } 1743 if y != v_1_1.Args[0] { 1744 break 1745 } 1746 if !(cc.(Op) == OpARM64LessThanU) { 1747 break 1748 } 1749 v.reset(OpARM64ROR) 1750 v.AddArg(x) 1751 v.AddArg(y) 1752 return true 1753 } 1754 // match: (ADD (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 1755 // cond: cc.(Op) == OpARM64LessThanU 1756 // result: (RORW x (NEG <t> y)) 1757 for { 1758 _ = v.Args[1] 1759 v_0 := v.Args[0] 1760 if v_0.Op != OpARM64SLL { 1761 break 1762 } 1763 _ = v_0.Args[1] 1764 x := v_0.Args[0] 1765 v_0_1 := v_0.Args[1] 1766 if v_0_1.Op != OpARM64ANDconst { 1767 break 1768 } 1769 t := v_0_1.Type 1770 if v_0_1.AuxInt != 31 { 1771 break 1772 } 1773 y := v_0_1.Args[0] 1774 v_1 := v.Args[1] 1775 if v_1.Op != OpARM64CSEL0 { 1776 break 1777 } 1778 if v_1.Type != typ.UInt32 { 1779 break 1780 } 1781 cc := v_1.Aux 1782 _ = v_1.Args[1] 1783 v_1_0 := v_1.Args[0] 1784 if v_1_0.Op != OpARM64SRL { 1785 break 1786 } 1787 if v_1_0.Type != typ.UInt32 { 1788 break 1789 } 1790 _ = v_1_0.Args[1] 1791 v_1_0_0 := v_1_0.Args[0] 1792 if v_1_0_0.Op != OpARM64MOVWUreg { 1793 break 1794 } 1795 if x != v_1_0_0.Args[0] { 1796 break 1797 } 1798 v_1_0_1 := v_1_0.Args[1] 1799 if v_1_0_1.Op != OpARM64SUB { 1800 break 1801 } 1802 if v_1_0_1.Type != t { 1803 break 1804 } 1805 _ = v_1_0_1.Args[1] 1806 v_1_0_1_0 := v_1_0_1.Args[0] 1807 if v_1_0_1_0.Op != OpARM64MOVDconst { 1808 break 1809 } 1810 if v_1_0_1_0.AuxInt != 32 { 1811 break 1812 } 1813 v_1_0_1_1 := v_1_0_1.Args[1] 1814 if v_1_0_1_1.Op != OpARM64ANDconst { 1815 break 1816 } 1817 if v_1_0_1_1.Type != t { 1818 break 1819 } 1820 if v_1_0_1_1.AuxInt != 31 { 1821 break 1822 } 1823 if y != v_1_0_1_1.Args[0] { 1824 break 1825 } 1826 v_1_1 := v_1.Args[1] 1827 if v_1_1.Op != OpARM64CMPconst { 1828 break 1829 } 1830 if v_1_1.AuxInt != 64 { 1831 break 1832 } 1833 v_1_1_0 := v_1_1.Args[0] 1834 if v_1_1_0.Op != OpARM64SUB { 1835 break 1836 } 1837 if v_1_1_0.Type != t { 1838 break 1839 } 1840 _ = v_1_1_0.Args[1] 1841 v_1_1_0_0 := v_1_1_0.Args[0] 1842 if v_1_1_0_0.Op != OpARM64MOVDconst { 1843 break 1844 } 1845 if v_1_1_0_0.AuxInt != 32 { 1846 break 1847 } 1848 v_1_1_0_1 := v_1_1_0.Args[1] 1849 if v_1_1_0_1.Op != OpARM64ANDconst { 1850 break 1851 } 1852 if v_1_1_0_1.Type != t { 1853 break 1854 } 1855 if v_1_1_0_1.AuxInt != 31 { 1856 break 1857 } 1858 if y != v_1_1_0_1.Args[0] { 1859 break 1860 } 1861 if !(cc.(Op) == OpARM64LessThanU) { 1862 break 1863 } 1864 v.reset(OpARM64RORW) 1865 v.AddArg(x) 1866 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1867 v0.AddArg(y) 1868 v.AddArg(v0) 1869 return true 1870 } 1871 // match: (ADD (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 1872 // cond: cc.(Op) == OpARM64LessThanU 1873 // result: (RORW x (NEG <t> y)) 1874 for { 1875 _ = v.Args[1] 1876 v_0 := v.Args[0] 1877 if v_0.Op != OpARM64CSEL0 { 1878 break 1879 } 1880 if v_0.Type != typ.UInt32 { 1881 break 1882 } 1883 cc := v_0.Aux 1884 _ = v_0.Args[1] 1885 v_0_0 := v_0.Args[0] 1886 if v_0_0.Op != OpARM64SRL { 1887 break 1888 } 1889 if v_0_0.Type != typ.UInt32 { 1890 break 1891 } 1892 _ = v_0_0.Args[1] 1893 v_0_0_0 := v_0_0.Args[0] 1894 if v_0_0_0.Op != OpARM64MOVWUreg { 1895 break 1896 } 1897 x := v_0_0_0.Args[0] 1898 v_0_0_1 := v_0_0.Args[1] 1899 if v_0_0_1.Op != OpARM64SUB { 1900 break 1901 } 1902 t := v_0_0_1.Type 1903 _ = v_0_0_1.Args[1] 1904 v_0_0_1_0 := v_0_0_1.Args[0] 1905 if v_0_0_1_0.Op != OpARM64MOVDconst { 1906 break 1907 } 1908 if v_0_0_1_0.AuxInt != 32 { 1909 break 1910 } 1911 v_0_0_1_1 := v_0_0_1.Args[1] 1912 if v_0_0_1_1.Op != OpARM64ANDconst { 1913 break 1914 } 1915 if v_0_0_1_1.Type != t { 1916 break 1917 } 1918 if v_0_0_1_1.AuxInt != 31 { 1919 break 1920 } 1921 y := v_0_0_1_1.Args[0] 1922 v_0_1 := v_0.Args[1] 1923 if v_0_1.Op != OpARM64CMPconst { 1924 break 1925 } 1926 if v_0_1.AuxInt != 64 { 1927 break 1928 } 1929 v_0_1_0 := v_0_1.Args[0] 1930 if v_0_1_0.Op != OpARM64SUB { 1931 break 1932 } 1933 if v_0_1_0.Type != t { 1934 break 1935 } 1936 _ = v_0_1_0.Args[1] 1937 v_0_1_0_0 := v_0_1_0.Args[0] 1938 if v_0_1_0_0.Op != OpARM64MOVDconst { 1939 break 1940 } 1941 if v_0_1_0_0.AuxInt != 32 { 1942 break 1943 } 1944 v_0_1_0_1 := v_0_1_0.Args[1] 1945 if v_0_1_0_1.Op != OpARM64ANDconst { 1946 break 1947 } 1948 if v_0_1_0_1.Type != t { 1949 break 1950 } 1951 if v_0_1_0_1.AuxInt != 31 { 1952 break 1953 } 1954 if y != v_0_1_0_1.Args[0] { 1955 break 1956 } 1957 v_1 := v.Args[1] 1958 if v_1.Op != OpARM64SLL { 1959 break 1960 } 1961 _ = v_1.Args[1] 1962 if x != v_1.Args[0] { 1963 break 1964 } 1965 v_1_1 := v_1.Args[1] 1966 if v_1_1.Op != OpARM64ANDconst { 1967 break 1968 } 1969 if v_1_1.Type != t { 1970 break 1971 } 1972 if v_1_1.AuxInt != 31 { 1973 break 1974 } 1975 if y != v_1_1.Args[0] { 1976 break 1977 } 1978 if !(cc.(Op) == OpARM64LessThanU) { 1979 break 1980 } 1981 v.reset(OpARM64RORW) 1982 v.AddArg(x) 1983 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1984 v0.AddArg(y) 1985 v.AddArg(v0) 1986 return true 1987 } 1988 // match: (ADD (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 1989 // cond: cc.(Op) == OpARM64LessThanU 1990 // result: (RORW x y) 1991 for { 1992 _ = v.Args[1] 1993 v_0 := v.Args[0] 1994 if v_0.Op != OpARM64SRL { 1995 break 1996 } 1997 if v_0.Type != typ.UInt32 { 1998 break 1999 } 2000 _ = v_0.Args[1] 2001 v_0_0 := v_0.Args[0] 2002 if v_0_0.Op != OpARM64MOVWUreg { 2003 break 2004 } 2005 x := v_0_0.Args[0] 2006 v_0_1 := v_0.Args[1] 2007 if v_0_1.Op != OpARM64ANDconst { 2008 break 2009 } 2010 t := v_0_1.Type 2011 if v_0_1.AuxInt != 31 { 2012 break 2013 } 2014 y := v_0_1.Args[0] 2015 v_1 := v.Args[1] 2016 if v_1.Op != OpARM64CSEL0 { 2017 break 2018 } 2019 if v_1.Type != typ.UInt32 { 2020 break 2021 } 2022 cc := v_1.Aux 2023 _ = v_1.Args[1] 2024 v_1_0 := v_1.Args[0] 2025 if v_1_0.Op != OpARM64SLL { 2026 break 2027 } 2028 _ = v_1_0.Args[1] 2029 if x != v_1_0.Args[0] { 2030 break 2031 } 2032 v_1_0_1 := v_1_0.Args[1] 2033 if v_1_0_1.Op != OpARM64SUB { 2034 break 2035 } 2036 if v_1_0_1.Type != t { 2037 break 2038 } 2039 _ = v_1_0_1.Args[1] 2040 v_1_0_1_0 := v_1_0_1.Args[0] 2041 if v_1_0_1_0.Op != OpARM64MOVDconst { 2042 break 2043 } 2044 if v_1_0_1_0.AuxInt != 32 { 2045 break 2046 } 2047 v_1_0_1_1 := v_1_0_1.Args[1] 2048 if v_1_0_1_1.Op != OpARM64ANDconst { 2049 break 2050 } 2051 if v_1_0_1_1.Type != t { 2052 break 2053 } 2054 if v_1_0_1_1.AuxInt != 31 { 2055 break 2056 } 2057 if y != v_1_0_1_1.Args[0] { 2058 break 2059 } 2060 v_1_1 := v_1.Args[1] 2061 if v_1_1.Op != OpARM64CMPconst { 2062 break 2063 } 2064 if v_1_1.AuxInt != 64 { 2065 break 2066 } 2067 v_1_1_0 := v_1_1.Args[0] 2068 if v_1_1_0.Op != OpARM64SUB { 2069 break 2070 } 2071 if v_1_1_0.Type != t { 2072 break 2073 } 2074 _ = v_1_1_0.Args[1] 2075 v_1_1_0_0 := v_1_1_0.Args[0] 2076 if v_1_1_0_0.Op != OpARM64MOVDconst { 2077 break 2078 } 2079 if v_1_1_0_0.AuxInt != 32 { 2080 break 2081 } 2082 v_1_1_0_1 := v_1_1_0.Args[1] 2083 if v_1_1_0_1.Op != OpARM64ANDconst { 2084 break 2085 } 2086 if v_1_1_0_1.Type != t { 2087 break 2088 } 2089 if v_1_1_0_1.AuxInt != 31 { 2090 break 2091 } 2092 if y != v_1_1_0_1.Args[0] { 2093 break 2094 } 2095 if !(cc.(Op) == OpARM64LessThanU) { 2096 break 2097 } 2098 v.reset(OpARM64RORW) 2099 v.AddArg(x) 2100 v.AddArg(y) 2101 return true 2102 } 2103 // match: (ADD (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 2104 // cond: cc.(Op) == OpARM64LessThanU 2105 // result: (RORW x y) 2106 for { 2107 _ = v.Args[1] 2108 v_0 := v.Args[0] 2109 if v_0.Op != OpARM64CSEL0 { 2110 break 2111 } 2112 if v_0.Type != typ.UInt32 { 2113 break 2114 } 2115 cc := v_0.Aux 2116 _ = v_0.Args[1] 2117 v_0_0 := v_0.Args[0] 2118 if v_0_0.Op != OpARM64SLL { 2119 break 2120 } 2121 _ = v_0_0.Args[1] 2122 x := v_0_0.Args[0] 2123 v_0_0_1 := v_0_0.Args[1] 2124 if v_0_0_1.Op != OpARM64SUB { 2125 break 2126 } 2127 t := v_0_0_1.Type 2128 _ = v_0_0_1.Args[1] 2129 v_0_0_1_0 := v_0_0_1.Args[0] 2130 if v_0_0_1_0.Op != OpARM64MOVDconst { 2131 break 2132 } 2133 if v_0_0_1_0.AuxInt != 32 { 2134 break 2135 } 2136 v_0_0_1_1 := v_0_0_1.Args[1] 2137 if v_0_0_1_1.Op != OpARM64ANDconst { 2138 break 2139 } 2140 if v_0_0_1_1.Type != t { 2141 break 2142 } 2143 if v_0_0_1_1.AuxInt != 31 { 2144 break 2145 } 2146 y := v_0_0_1_1.Args[0] 2147 v_0_1 := v_0.Args[1] 2148 if v_0_1.Op != OpARM64CMPconst { 2149 break 2150 } 2151 if v_0_1.AuxInt != 64 { 2152 break 2153 } 2154 v_0_1_0 := v_0_1.Args[0] 2155 if v_0_1_0.Op != OpARM64SUB { 2156 break 2157 } 2158 if v_0_1_0.Type != t { 2159 break 2160 } 2161 _ = v_0_1_0.Args[1] 2162 v_0_1_0_0 := v_0_1_0.Args[0] 2163 if v_0_1_0_0.Op != OpARM64MOVDconst { 2164 break 2165 } 2166 if v_0_1_0_0.AuxInt != 32 { 2167 break 2168 } 2169 v_0_1_0_1 := v_0_1_0.Args[1] 2170 if v_0_1_0_1.Op != OpARM64ANDconst { 2171 break 2172 } 2173 if v_0_1_0_1.Type != t { 2174 break 2175 } 2176 if v_0_1_0_1.AuxInt != 31 { 2177 break 2178 } 2179 if y != v_0_1_0_1.Args[0] { 2180 break 2181 } 2182 v_1 := v.Args[1] 2183 if v_1.Op != OpARM64SRL { 2184 break 2185 } 2186 if v_1.Type != typ.UInt32 { 2187 break 2188 } 2189 _ = v_1.Args[1] 2190 v_1_0 := v_1.Args[0] 2191 if v_1_0.Op != OpARM64MOVWUreg { 2192 break 2193 } 2194 if x != v_1_0.Args[0] { 2195 break 2196 } 2197 v_1_1 := v_1.Args[1] 2198 if v_1_1.Op != OpARM64ANDconst { 2199 break 2200 } 2201 if v_1_1.Type != t { 2202 break 2203 } 2204 if v_1_1.AuxInt != 31 { 2205 break 2206 } 2207 if y != v_1_1.Args[0] { 2208 break 2209 } 2210 if !(cc.(Op) == OpARM64LessThanU) { 2211 break 2212 } 2213 v.reset(OpARM64RORW) 2214 v.AddArg(x) 2215 v.AddArg(y) 2216 return true 2217 } 2218 return false 2219 } 2220 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 2221 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 2222 // cond: 2223 // result: (MOVDaddr [off1+off2] {sym} ptr) 2224 for { 2225 off1 := v.AuxInt 2226 v_0 := v.Args[0] 2227 if v_0.Op != OpARM64MOVDaddr { 2228 break 2229 } 2230 off2 := v_0.AuxInt 2231 sym := v_0.Aux 2232 ptr := v_0.Args[0] 2233 v.reset(OpARM64MOVDaddr) 2234 v.AuxInt = off1 + off2 2235 v.Aux = sym 2236 v.AddArg(ptr) 2237 return true 2238 } 2239 // match: (ADDconst [0] x) 2240 // cond: 2241 // result: x 2242 for { 2243 if v.AuxInt != 0 { 2244 break 2245 } 2246 x := v.Args[0] 2247 v.reset(OpCopy) 2248 v.Type = x.Type 2249 v.AddArg(x) 2250 return true 2251 } 2252 // match: (ADDconst [c] (MOVDconst [d])) 2253 // cond: 2254 // result: (MOVDconst [c+d]) 2255 for { 2256 c := v.AuxInt 2257 v_0 := v.Args[0] 2258 if v_0.Op != OpARM64MOVDconst { 2259 break 2260 } 2261 d := v_0.AuxInt 2262 v.reset(OpARM64MOVDconst) 2263 v.AuxInt = c + d 2264 return true 2265 } 2266 // match: (ADDconst [c] (ADDconst [d] x)) 2267 // cond: 2268 // result: (ADDconst [c+d] x) 2269 for { 2270 c := v.AuxInt 2271 v_0 := v.Args[0] 2272 if v_0.Op != OpARM64ADDconst { 2273 break 2274 } 2275 d := v_0.AuxInt 2276 x := v_0.Args[0] 2277 v.reset(OpARM64ADDconst) 2278 v.AuxInt = c + d 2279 v.AddArg(x) 2280 return true 2281 } 2282 // match: (ADDconst [c] (SUBconst [d] x)) 2283 // cond: 2284 // result: (ADDconst [c-d] x) 2285 for { 2286 c := v.AuxInt 2287 v_0 := v.Args[0] 2288 if v_0.Op != OpARM64SUBconst { 2289 break 2290 } 2291 d := v_0.AuxInt 2292 x := v_0.Args[0] 2293 v.reset(OpARM64ADDconst) 2294 v.AuxInt = c - d 2295 v.AddArg(x) 2296 return true 2297 } 2298 return false 2299 } 2300 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 2301 b := v.Block 2302 _ = b 2303 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 2304 // cond: 2305 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 2306 for { 2307 d := v.AuxInt 2308 _ = v.Args[1] 2309 v_0 := v.Args[0] 2310 if v_0.Op != OpARM64MOVDconst { 2311 break 2312 } 2313 c := v_0.AuxInt 2314 x := v.Args[1] 2315 v.reset(OpARM64ADDconst) 2316 v.AuxInt = c 2317 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2318 v0.AuxInt = d 2319 v0.AddArg(x) 2320 v.AddArg(v0) 2321 return true 2322 } 2323 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 2324 // cond: 2325 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 2326 for { 2327 d := v.AuxInt 2328 _ = v.Args[1] 2329 x := v.Args[0] 2330 v_1 := v.Args[1] 2331 if v_1.Op != OpARM64MOVDconst { 2332 break 2333 } 2334 c := v_1.AuxInt 2335 v.reset(OpARM64ADDconst) 2336 v.AuxInt = int64(uint64(c) << uint64(d)) 2337 v.AddArg(x) 2338 return true 2339 } 2340 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 2341 // cond: 2342 // result: (RORconst [64-c] x) 2343 for { 2344 c := v.AuxInt 2345 _ = v.Args[1] 2346 v_0 := v.Args[0] 2347 if v_0.Op != OpARM64SRLconst { 2348 break 2349 } 2350 if v_0.AuxInt != 64-c { 2351 break 2352 } 2353 x := v_0.Args[0] 2354 if x != v.Args[1] { 2355 break 2356 } 2357 v.reset(OpARM64RORconst) 2358 v.AuxInt = 64 - c 2359 v.AddArg(x) 2360 return true 2361 } 2362 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x) 2363 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 2364 // result: (RORWconst [32-c] x) 2365 for { 2366 t := v.Type 2367 c := v.AuxInt 2368 _ = v.Args[1] 2369 v_0 := v.Args[0] 2370 if v_0.Op != OpARM64UBFX { 2371 break 2372 } 2373 bfc := v_0.AuxInt 2374 x := v_0.Args[0] 2375 if x != v.Args[1] { 2376 break 2377 } 2378 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 2379 break 2380 } 2381 v.reset(OpARM64RORWconst) 2382 v.AuxInt = 32 - c 2383 v.AddArg(x) 2384 return true 2385 } 2386 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x2) 2387 // cond: 2388 // result: (EXTRconst [64-c] x2 x) 2389 for { 2390 c := v.AuxInt 2391 _ = v.Args[1] 2392 v_0 := v.Args[0] 2393 if v_0.Op != OpARM64SRLconst { 2394 break 2395 } 2396 if v_0.AuxInt != 64-c { 2397 break 2398 } 2399 x := v_0.Args[0] 2400 x2 := v.Args[1] 2401 v.reset(OpARM64EXTRconst) 2402 v.AuxInt = 64 - c 2403 v.AddArg(x2) 2404 v.AddArg(x) 2405 return true 2406 } 2407 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2) 2408 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 2409 // result: (EXTRWconst [32-c] x2 x) 2410 for { 2411 t := v.Type 2412 c := v.AuxInt 2413 _ = v.Args[1] 2414 v_0 := v.Args[0] 2415 if v_0.Op != OpARM64UBFX { 2416 break 2417 } 2418 bfc := v_0.AuxInt 2419 x := v_0.Args[0] 2420 x2 := v.Args[1] 2421 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 2422 break 2423 } 2424 v.reset(OpARM64EXTRWconst) 2425 v.AuxInt = 32 - c 2426 v.AddArg(x2) 2427 v.AddArg(x) 2428 return true 2429 } 2430 return false 2431 } 2432 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 2433 b := v.Block 2434 _ = b 2435 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 2436 // cond: 2437 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 2438 for { 2439 d := v.AuxInt 2440 _ = v.Args[1] 2441 v_0 := v.Args[0] 2442 if v_0.Op != OpARM64MOVDconst { 2443 break 2444 } 2445 c := v_0.AuxInt 2446 x := v.Args[1] 2447 v.reset(OpARM64ADDconst) 2448 v.AuxInt = c 2449 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2450 v0.AuxInt = d 2451 v0.AddArg(x) 2452 v.AddArg(v0) 2453 return true 2454 } 2455 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 2456 // cond: 2457 // result: (ADDconst x [c>>uint64(d)]) 2458 for { 2459 d := v.AuxInt 2460 _ = v.Args[1] 2461 x := v.Args[0] 2462 v_1 := v.Args[1] 2463 if v_1.Op != OpARM64MOVDconst { 2464 break 2465 } 2466 c := v_1.AuxInt 2467 v.reset(OpARM64ADDconst) 2468 v.AuxInt = c >> uint64(d) 2469 v.AddArg(x) 2470 return true 2471 } 2472 return false 2473 } 2474 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 2475 b := v.Block 2476 _ = b 2477 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 2478 // cond: 2479 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 2480 for { 2481 d := v.AuxInt 2482 _ = v.Args[1] 2483 v_0 := v.Args[0] 2484 if v_0.Op != OpARM64MOVDconst { 2485 break 2486 } 2487 c := v_0.AuxInt 2488 x := v.Args[1] 2489 v.reset(OpARM64ADDconst) 2490 v.AuxInt = c 2491 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2492 v0.AuxInt = d 2493 v0.AddArg(x) 2494 v.AddArg(v0) 2495 return true 2496 } 2497 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 2498 // cond: 2499 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 2500 for { 2501 d := v.AuxInt 2502 _ = v.Args[1] 2503 x := v.Args[0] 2504 v_1 := v.Args[1] 2505 if v_1.Op != OpARM64MOVDconst { 2506 break 2507 } 2508 c := v_1.AuxInt 2509 v.reset(OpARM64ADDconst) 2510 v.AuxInt = int64(uint64(c) >> uint64(d)) 2511 v.AddArg(x) 2512 return true 2513 } 2514 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 2515 // cond: 2516 // result: (RORconst [ c] x) 2517 for { 2518 c := v.AuxInt 2519 _ = v.Args[1] 2520 v_0 := v.Args[0] 2521 if v_0.Op != OpARM64SLLconst { 2522 break 2523 } 2524 if v_0.AuxInt != 64-c { 2525 break 2526 } 2527 x := v_0.Args[0] 2528 if x != v.Args[1] { 2529 break 2530 } 2531 v.reset(OpARM64RORconst) 2532 v.AuxInt = c 2533 v.AddArg(x) 2534 return true 2535 } 2536 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 2537 // cond: c < 32 && t.Size() == 4 2538 // result: (RORWconst [c] x) 2539 for { 2540 t := v.Type 2541 c := v.AuxInt 2542 _ = v.Args[1] 2543 v_0 := v.Args[0] 2544 if v_0.Op != OpARM64SLLconst { 2545 break 2546 } 2547 if v_0.AuxInt != 32-c { 2548 break 2549 } 2550 x := v_0.Args[0] 2551 v_1 := v.Args[1] 2552 if v_1.Op != OpARM64MOVWUreg { 2553 break 2554 } 2555 if x != v_1.Args[0] { 2556 break 2557 } 2558 if !(c < 32 && t.Size() == 4) { 2559 break 2560 } 2561 v.reset(OpARM64RORWconst) 2562 v.AuxInt = c 2563 v.AddArg(x) 2564 return true 2565 } 2566 return false 2567 } 2568 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 2569 // match: (AND x (MOVDconst [c])) 2570 // cond: 2571 // result: (ANDconst [c] x) 2572 for { 2573 _ = v.Args[1] 2574 x := v.Args[0] 2575 v_1 := v.Args[1] 2576 if v_1.Op != OpARM64MOVDconst { 2577 break 2578 } 2579 c := v_1.AuxInt 2580 v.reset(OpARM64ANDconst) 2581 v.AuxInt = c 2582 v.AddArg(x) 2583 return true 2584 } 2585 // match: (AND (MOVDconst [c]) x) 2586 // cond: 2587 // result: (ANDconst [c] x) 2588 for { 2589 _ = v.Args[1] 2590 v_0 := v.Args[0] 2591 if v_0.Op != OpARM64MOVDconst { 2592 break 2593 } 2594 c := v_0.AuxInt 2595 x := v.Args[1] 2596 v.reset(OpARM64ANDconst) 2597 v.AuxInt = c 2598 v.AddArg(x) 2599 return true 2600 } 2601 // match: (AND x x) 2602 // cond: 2603 // result: x 2604 for { 2605 _ = v.Args[1] 2606 x := v.Args[0] 2607 if x != v.Args[1] { 2608 break 2609 } 2610 v.reset(OpCopy) 2611 v.Type = x.Type 2612 v.AddArg(x) 2613 return true 2614 } 2615 // match: (AND x (MVN y)) 2616 // cond: 2617 // result: (BIC x y) 2618 for { 2619 _ = v.Args[1] 2620 x := v.Args[0] 2621 v_1 := v.Args[1] 2622 if v_1.Op != OpARM64MVN { 2623 break 2624 } 2625 y := v_1.Args[0] 2626 v.reset(OpARM64BIC) 2627 v.AddArg(x) 2628 v.AddArg(y) 2629 return true 2630 } 2631 // match: (AND (MVN y) x) 2632 // cond: 2633 // result: (BIC x y) 2634 for { 2635 _ = v.Args[1] 2636 v_0 := v.Args[0] 2637 if v_0.Op != OpARM64MVN { 2638 break 2639 } 2640 y := v_0.Args[0] 2641 x := v.Args[1] 2642 v.reset(OpARM64BIC) 2643 v.AddArg(x) 2644 v.AddArg(y) 2645 return true 2646 } 2647 // match: (AND x0 x1:(SLLconst [c] y)) 2648 // cond: clobberIfDead(x1) 2649 // result: (ANDshiftLL x0 y [c]) 2650 for { 2651 _ = v.Args[1] 2652 x0 := v.Args[0] 2653 x1 := v.Args[1] 2654 if x1.Op != OpARM64SLLconst { 2655 break 2656 } 2657 c := x1.AuxInt 2658 y := x1.Args[0] 2659 if !(clobberIfDead(x1)) { 2660 break 2661 } 2662 v.reset(OpARM64ANDshiftLL) 2663 v.AuxInt = c 2664 v.AddArg(x0) 2665 v.AddArg(y) 2666 return true 2667 } 2668 // match: (AND x1:(SLLconst [c] y) x0) 2669 // cond: clobberIfDead(x1) 2670 // result: (ANDshiftLL x0 y [c]) 2671 for { 2672 _ = v.Args[1] 2673 x1 := v.Args[0] 2674 if x1.Op != OpARM64SLLconst { 2675 break 2676 } 2677 c := x1.AuxInt 2678 y := x1.Args[0] 2679 x0 := v.Args[1] 2680 if !(clobberIfDead(x1)) { 2681 break 2682 } 2683 v.reset(OpARM64ANDshiftLL) 2684 v.AuxInt = c 2685 v.AddArg(x0) 2686 v.AddArg(y) 2687 return true 2688 } 2689 // match: (AND x0 x1:(SRLconst [c] y)) 2690 // cond: clobberIfDead(x1) 2691 // result: (ANDshiftRL x0 y [c]) 2692 for { 2693 _ = v.Args[1] 2694 x0 := v.Args[0] 2695 x1 := v.Args[1] 2696 if x1.Op != OpARM64SRLconst { 2697 break 2698 } 2699 c := x1.AuxInt 2700 y := x1.Args[0] 2701 if !(clobberIfDead(x1)) { 2702 break 2703 } 2704 v.reset(OpARM64ANDshiftRL) 2705 v.AuxInt = c 2706 v.AddArg(x0) 2707 v.AddArg(y) 2708 return true 2709 } 2710 // match: (AND x1:(SRLconst [c] y) x0) 2711 // cond: clobberIfDead(x1) 2712 // result: (ANDshiftRL x0 y [c]) 2713 for { 2714 _ = v.Args[1] 2715 x1 := v.Args[0] 2716 if x1.Op != OpARM64SRLconst { 2717 break 2718 } 2719 c := x1.AuxInt 2720 y := x1.Args[0] 2721 x0 := v.Args[1] 2722 if !(clobberIfDead(x1)) { 2723 break 2724 } 2725 v.reset(OpARM64ANDshiftRL) 2726 v.AuxInt = c 2727 v.AddArg(x0) 2728 v.AddArg(y) 2729 return true 2730 } 2731 // match: (AND x0 x1:(SRAconst [c] y)) 2732 // cond: clobberIfDead(x1) 2733 // result: (ANDshiftRA x0 y [c]) 2734 for { 2735 _ = v.Args[1] 2736 x0 := v.Args[0] 2737 x1 := v.Args[1] 2738 if x1.Op != OpARM64SRAconst { 2739 break 2740 } 2741 c := x1.AuxInt 2742 y := x1.Args[0] 2743 if !(clobberIfDead(x1)) { 2744 break 2745 } 2746 v.reset(OpARM64ANDshiftRA) 2747 v.AuxInt = c 2748 v.AddArg(x0) 2749 v.AddArg(y) 2750 return true 2751 } 2752 return false 2753 } 2754 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 2755 // match: (AND x1:(SRAconst [c] y) x0) 2756 // cond: clobberIfDead(x1) 2757 // result: (ANDshiftRA x0 y [c]) 2758 for { 2759 _ = v.Args[1] 2760 x1 := v.Args[0] 2761 if x1.Op != OpARM64SRAconst { 2762 break 2763 } 2764 c := x1.AuxInt 2765 y := x1.Args[0] 2766 x0 := v.Args[1] 2767 if !(clobberIfDead(x1)) { 2768 break 2769 } 2770 v.reset(OpARM64ANDshiftRA) 2771 v.AuxInt = c 2772 v.AddArg(x0) 2773 v.AddArg(y) 2774 return true 2775 } 2776 return false 2777 } 2778 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 2779 // match: (ANDconst [0] _) 2780 // cond: 2781 // result: (MOVDconst [0]) 2782 for { 2783 if v.AuxInt != 0 { 2784 break 2785 } 2786 v.reset(OpARM64MOVDconst) 2787 v.AuxInt = 0 2788 return true 2789 } 2790 // match: (ANDconst [-1] x) 2791 // cond: 2792 // result: x 2793 for { 2794 if v.AuxInt != -1 { 2795 break 2796 } 2797 x := v.Args[0] 2798 v.reset(OpCopy) 2799 v.Type = x.Type 2800 v.AddArg(x) 2801 return true 2802 } 2803 // match: (ANDconst [c] (MOVDconst [d])) 2804 // cond: 2805 // result: (MOVDconst [c&d]) 2806 for { 2807 c := v.AuxInt 2808 v_0 := v.Args[0] 2809 if v_0.Op != OpARM64MOVDconst { 2810 break 2811 } 2812 d := v_0.AuxInt 2813 v.reset(OpARM64MOVDconst) 2814 v.AuxInt = c & d 2815 return true 2816 } 2817 // match: (ANDconst [c] (ANDconst [d] x)) 2818 // cond: 2819 // result: (ANDconst [c&d] x) 2820 for { 2821 c := v.AuxInt 2822 v_0 := v.Args[0] 2823 if v_0.Op != OpARM64ANDconst { 2824 break 2825 } 2826 d := v_0.AuxInt 2827 x := v_0.Args[0] 2828 v.reset(OpARM64ANDconst) 2829 v.AuxInt = c & d 2830 v.AddArg(x) 2831 return true 2832 } 2833 // match: (ANDconst [c] (MOVWUreg x)) 2834 // cond: 2835 // result: (ANDconst [c&(1<<32-1)] x) 2836 for { 2837 c := v.AuxInt 2838 v_0 := v.Args[0] 2839 if v_0.Op != OpARM64MOVWUreg { 2840 break 2841 } 2842 x := v_0.Args[0] 2843 v.reset(OpARM64ANDconst) 2844 v.AuxInt = c & (1<<32 - 1) 2845 v.AddArg(x) 2846 return true 2847 } 2848 // match: (ANDconst [c] (MOVHUreg x)) 2849 // cond: 2850 // result: (ANDconst [c&(1<<16-1)] x) 2851 for { 2852 c := v.AuxInt 2853 v_0 := v.Args[0] 2854 if v_0.Op != OpARM64MOVHUreg { 2855 break 2856 } 2857 x := v_0.Args[0] 2858 v.reset(OpARM64ANDconst) 2859 v.AuxInt = c & (1<<16 - 1) 2860 v.AddArg(x) 2861 return true 2862 } 2863 // match: (ANDconst [c] (MOVBUreg x)) 2864 // cond: 2865 // result: (ANDconst [c&(1<<8-1)] x) 2866 for { 2867 c := v.AuxInt 2868 v_0 := v.Args[0] 2869 if v_0.Op != OpARM64MOVBUreg { 2870 break 2871 } 2872 x := v_0.Args[0] 2873 v.reset(OpARM64ANDconst) 2874 v.AuxInt = c & (1<<8 - 1) 2875 v.AddArg(x) 2876 return true 2877 } 2878 // match: (ANDconst [ac] (SLLconst [sc] x)) 2879 // cond: isARM64BFMask(sc, ac, sc) 2880 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(ac, sc))] x) 2881 for { 2882 ac := v.AuxInt 2883 v_0 := v.Args[0] 2884 if v_0.Op != OpARM64SLLconst { 2885 break 2886 } 2887 sc := v_0.AuxInt 2888 x := v_0.Args[0] 2889 if !(isARM64BFMask(sc, ac, sc)) { 2890 break 2891 } 2892 v.reset(OpARM64UBFIZ) 2893 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, sc)) 2894 v.AddArg(x) 2895 return true 2896 } 2897 // match: (ANDconst [ac] (SRLconst [sc] x)) 2898 // cond: isARM64BFMask(sc, ac, 0) 2899 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(ac, 0))] x) 2900 for { 2901 ac := v.AuxInt 2902 v_0 := v.Args[0] 2903 if v_0.Op != OpARM64SRLconst { 2904 break 2905 } 2906 sc := v_0.AuxInt 2907 x := v_0.Args[0] 2908 if !(isARM64BFMask(sc, ac, 0)) { 2909 break 2910 } 2911 v.reset(OpARM64UBFX) 2912 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, 0)) 2913 v.AddArg(x) 2914 return true 2915 } 2916 return false 2917 } 2918 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 2919 b := v.Block 2920 _ = b 2921 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 2922 // cond: 2923 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 2924 for { 2925 d := v.AuxInt 2926 _ = v.Args[1] 2927 v_0 := v.Args[0] 2928 if v_0.Op != OpARM64MOVDconst { 2929 break 2930 } 2931 c := v_0.AuxInt 2932 x := v.Args[1] 2933 v.reset(OpARM64ANDconst) 2934 v.AuxInt = c 2935 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2936 v0.AuxInt = d 2937 v0.AddArg(x) 2938 v.AddArg(v0) 2939 return true 2940 } 2941 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 2942 // cond: 2943 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 2944 for { 2945 d := v.AuxInt 2946 _ = v.Args[1] 2947 x := v.Args[0] 2948 v_1 := v.Args[1] 2949 if v_1.Op != OpARM64MOVDconst { 2950 break 2951 } 2952 c := v_1.AuxInt 2953 v.reset(OpARM64ANDconst) 2954 v.AuxInt = int64(uint64(c) << uint64(d)) 2955 v.AddArg(x) 2956 return true 2957 } 2958 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 2959 // cond: c==d 2960 // result: y 2961 for { 2962 d := v.AuxInt 2963 _ = v.Args[1] 2964 x := v.Args[0] 2965 y := v.Args[1] 2966 if y.Op != OpARM64SLLconst { 2967 break 2968 } 2969 c := y.AuxInt 2970 if x != y.Args[0] { 2971 break 2972 } 2973 if !(c == d) { 2974 break 2975 } 2976 v.reset(OpCopy) 2977 v.Type = y.Type 2978 v.AddArg(y) 2979 return true 2980 } 2981 return false 2982 } 2983 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 2984 b := v.Block 2985 _ = b 2986 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 2987 // cond: 2988 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 2989 for { 2990 d := v.AuxInt 2991 _ = v.Args[1] 2992 v_0 := v.Args[0] 2993 if v_0.Op != OpARM64MOVDconst { 2994 break 2995 } 2996 c := v_0.AuxInt 2997 x := v.Args[1] 2998 v.reset(OpARM64ANDconst) 2999 v.AuxInt = c 3000 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3001 v0.AuxInt = d 3002 v0.AddArg(x) 3003 v.AddArg(v0) 3004 return true 3005 } 3006 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 3007 // cond: 3008 // result: (ANDconst x [c>>uint64(d)]) 3009 for { 3010 d := v.AuxInt 3011 _ = v.Args[1] 3012 x := v.Args[0] 3013 v_1 := v.Args[1] 3014 if v_1.Op != OpARM64MOVDconst { 3015 break 3016 } 3017 c := v_1.AuxInt 3018 v.reset(OpARM64ANDconst) 3019 v.AuxInt = c >> uint64(d) 3020 v.AddArg(x) 3021 return true 3022 } 3023 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 3024 // cond: c==d 3025 // result: y 3026 for { 3027 d := v.AuxInt 3028 _ = v.Args[1] 3029 x := v.Args[0] 3030 y := v.Args[1] 3031 if y.Op != OpARM64SRAconst { 3032 break 3033 } 3034 c := y.AuxInt 3035 if x != y.Args[0] { 3036 break 3037 } 3038 if !(c == d) { 3039 break 3040 } 3041 v.reset(OpCopy) 3042 v.Type = y.Type 3043 v.AddArg(y) 3044 return true 3045 } 3046 return false 3047 } 3048 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 3049 b := v.Block 3050 _ = b 3051 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 3052 // cond: 3053 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 3054 for { 3055 d := v.AuxInt 3056 _ = v.Args[1] 3057 v_0 := v.Args[0] 3058 if v_0.Op != OpARM64MOVDconst { 3059 break 3060 } 3061 c := v_0.AuxInt 3062 x := v.Args[1] 3063 v.reset(OpARM64ANDconst) 3064 v.AuxInt = c 3065 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3066 v0.AuxInt = d 3067 v0.AddArg(x) 3068 v.AddArg(v0) 3069 return true 3070 } 3071 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 3072 // cond: 3073 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 3074 for { 3075 d := v.AuxInt 3076 _ = v.Args[1] 3077 x := v.Args[0] 3078 v_1 := v.Args[1] 3079 if v_1.Op != OpARM64MOVDconst { 3080 break 3081 } 3082 c := v_1.AuxInt 3083 v.reset(OpARM64ANDconst) 3084 v.AuxInt = int64(uint64(c) >> uint64(d)) 3085 v.AddArg(x) 3086 return true 3087 } 3088 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 3089 // cond: c==d 3090 // result: y 3091 for { 3092 d := v.AuxInt 3093 _ = v.Args[1] 3094 x := v.Args[0] 3095 y := v.Args[1] 3096 if y.Op != OpARM64SRLconst { 3097 break 3098 } 3099 c := y.AuxInt 3100 if x != y.Args[0] { 3101 break 3102 } 3103 if !(c == d) { 3104 break 3105 } 3106 v.reset(OpCopy) 3107 v.Type = y.Type 3108 v.AddArg(y) 3109 return true 3110 } 3111 return false 3112 } 3113 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 3114 // match: (BIC x (MOVDconst [c])) 3115 // cond: 3116 // result: (ANDconst [^c] x) 3117 for { 3118 _ = v.Args[1] 3119 x := v.Args[0] 3120 v_1 := v.Args[1] 3121 if v_1.Op != OpARM64MOVDconst { 3122 break 3123 } 3124 c := v_1.AuxInt 3125 v.reset(OpARM64ANDconst) 3126 v.AuxInt = ^c 3127 v.AddArg(x) 3128 return true 3129 } 3130 // match: (BIC x x) 3131 // cond: 3132 // result: (MOVDconst [0]) 3133 for { 3134 _ = v.Args[1] 3135 x := v.Args[0] 3136 if x != v.Args[1] { 3137 break 3138 } 3139 v.reset(OpARM64MOVDconst) 3140 v.AuxInt = 0 3141 return true 3142 } 3143 // match: (BIC x0 x1:(SLLconst [c] y)) 3144 // cond: clobberIfDead(x1) 3145 // result: (BICshiftLL x0 y [c]) 3146 for { 3147 _ = v.Args[1] 3148 x0 := v.Args[0] 3149 x1 := v.Args[1] 3150 if x1.Op != OpARM64SLLconst { 3151 break 3152 } 3153 c := x1.AuxInt 3154 y := x1.Args[0] 3155 if !(clobberIfDead(x1)) { 3156 break 3157 } 3158 v.reset(OpARM64BICshiftLL) 3159 v.AuxInt = c 3160 v.AddArg(x0) 3161 v.AddArg(y) 3162 return true 3163 } 3164 // match: (BIC x0 x1:(SRLconst [c] y)) 3165 // cond: clobberIfDead(x1) 3166 // result: (BICshiftRL x0 y [c]) 3167 for { 3168 _ = v.Args[1] 3169 x0 := v.Args[0] 3170 x1 := v.Args[1] 3171 if x1.Op != OpARM64SRLconst { 3172 break 3173 } 3174 c := x1.AuxInt 3175 y := x1.Args[0] 3176 if !(clobberIfDead(x1)) { 3177 break 3178 } 3179 v.reset(OpARM64BICshiftRL) 3180 v.AuxInt = c 3181 v.AddArg(x0) 3182 v.AddArg(y) 3183 return true 3184 } 3185 // match: (BIC x0 x1:(SRAconst [c] y)) 3186 // cond: clobberIfDead(x1) 3187 // result: (BICshiftRA x0 y [c]) 3188 for { 3189 _ = v.Args[1] 3190 x0 := v.Args[0] 3191 x1 := v.Args[1] 3192 if x1.Op != OpARM64SRAconst { 3193 break 3194 } 3195 c := x1.AuxInt 3196 y := x1.Args[0] 3197 if !(clobberIfDead(x1)) { 3198 break 3199 } 3200 v.reset(OpARM64BICshiftRA) 3201 v.AuxInt = c 3202 v.AddArg(x0) 3203 v.AddArg(y) 3204 return true 3205 } 3206 return false 3207 } 3208 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 3209 // match: (BICshiftLL x (MOVDconst [c]) [d]) 3210 // cond: 3211 // result: (ANDconst x [^int64(uint64(c)<<uint64(d))]) 3212 for { 3213 d := v.AuxInt 3214 _ = v.Args[1] 3215 x := v.Args[0] 3216 v_1 := v.Args[1] 3217 if v_1.Op != OpARM64MOVDconst { 3218 break 3219 } 3220 c := v_1.AuxInt 3221 v.reset(OpARM64ANDconst) 3222 v.AuxInt = ^int64(uint64(c) << uint64(d)) 3223 v.AddArg(x) 3224 return true 3225 } 3226 // match: (BICshiftLL x (SLLconst x [c]) [d]) 3227 // cond: c==d 3228 // result: (MOVDconst [0]) 3229 for { 3230 d := v.AuxInt 3231 _ = v.Args[1] 3232 x := v.Args[0] 3233 v_1 := v.Args[1] 3234 if v_1.Op != OpARM64SLLconst { 3235 break 3236 } 3237 c := v_1.AuxInt 3238 if x != v_1.Args[0] { 3239 break 3240 } 3241 if !(c == d) { 3242 break 3243 } 3244 v.reset(OpARM64MOVDconst) 3245 v.AuxInt = 0 3246 return true 3247 } 3248 return false 3249 } 3250 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 3251 // match: (BICshiftRA x (MOVDconst [c]) [d]) 3252 // cond: 3253 // result: (ANDconst x [^(c>>uint64(d))]) 3254 for { 3255 d := v.AuxInt 3256 _ = v.Args[1] 3257 x := v.Args[0] 3258 v_1 := v.Args[1] 3259 if v_1.Op != OpARM64MOVDconst { 3260 break 3261 } 3262 c := v_1.AuxInt 3263 v.reset(OpARM64ANDconst) 3264 v.AuxInt = ^(c >> uint64(d)) 3265 v.AddArg(x) 3266 return true 3267 } 3268 // match: (BICshiftRA x (SRAconst x [c]) [d]) 3269 // cond: c==d 3270 // result: (MOVDconst [0]) 3271 for { 3272 d := v.AuxInt 3273 _ = v.Args[1] 3274 x := v.Args[0] 3275 v_1 := v.Args[1] 3276 if v_1.Op != OpARM64SRAconst { 3277 break 3278 } 3279 c := v_1.AuxInt 3280 if x != v_1.Args[0] { 3281 break 3282 } 3283 if !(c == d) { 3284 break 3285 } 3286 v.reset(OpARM64MOVDconst) 3287 v.AuxInt = 0 3288 return true 3289 } 3290 return false 3291 } 3292 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 3293 // match: (BICshiftRL x (MOVDconst [c]) [d]) 3294 // cond: 3295 // result: (ANDconst x [^int64(uint64(c)>>uint64(d))]) 3296 for { 3297 d := v.AuxInt 3298 _ = v.Args[1] 3299 x := v.Args[0] 3300 v_1 := v.Args[1] 3301 if v_1.Op != OpARM64MOVDconst { 3302 break 3303 } 3304 c := v_1.AuxInt 3305 v.reset(OpARM64ANDconst) 3306 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 3307 v.AddArg(x) 3308 return true 3309 } 3310 // match: (BICshiftRL x (SRLconst x [c]) [d]) 3311 // cond: c==d 3312 // result: (MOVDconst [0]) 3313 for { 3314 d := v.AuxInt 3315 _ = v.Args[1] 3316 x := v.Args[0] 3317 v_1 := v.Args[1] 3318 if v_1.Op != OpARM64SRLconst { 3319 break 3320 } 3321 c := v_1.AuxInt 3322 if x != v_1.Args[0] { 3323 break 3324 } 3325 if !(c == d) { 3326 break 3327 } 3328 v.reset(OpARM64MOVDconst) 3329 v.AuxInt = 0 3330 return true 3331 } 3332 return false 3333 } 3334 func rewriteValueARM64_OpARM64CMN_0(v *Value) bool { 3335 // match: (CMN x (MOVDconst [c])) 3336 // cond: 3337 // result: (CMNconst [c] x) 3338 for { 3339 _ = v.Args[1] 3340 x := v.Args[0] 3341 v_1 := v.Args[1] 3342 if v_1.Op != OpARM64MOVDconst { 3343 break 3344 } 3345 c := v_1.AuxInt 3346 v.reset(OpARM64CMNconst) 3347 v.AuxInt = c 3348 v.AddArg(x) 3349 return true 3350 } 3351 // match: (CMN (MOVDconst [c]) x) 3352 // cond: 3353 // result: (CMNconst [c] x) 3354 for { 3355 _ = v.Args[1] 3356 v_0 := v.Args[0] 3357 if v_0.Op != OpARM64MOVDconst { 3358 break 3359 } 3360 c := v_0.AuxInt 3361 x := v.Args[1] 3362 v.reset(OpARM64CMNconst) 3363 v.AuxInt = c 3364 v.AddArg(x) 3365 return true 3366 } 3367 // match: (CMN x0 x1:(SLLconst [c] y)) 3368 // cond: clobberIfDead(x1) 3369 // result: (CMNshiftLL x0 y [c]) 3370 for { 3371 _ = v.Args[1] 3372 x0 := v.Args[0] 3373 x1 := v.Args[1] 3374 if x1.Op != OpARM64SLLconst { 3375 break 3376 } 3377 c := x1.AuxInt 3378 y := x1.Args[0] 3379 if !(clobberIfDead(x1)) { 3380 break 3381 } 3382 v.reset(OpARM64CMNshiftLL) 3383 v.AuxInt = c 3384 v.AddArg(x0) 3385 v.AddArg(y) 3386 return true 3387 } 3388 // match: (CMN x1:(SLLconst [c] y) x0) 3389 // cond: clobberIfDead(x1) 3390 // result: (CMNshiftLL x0 y [c]) 3391 for { 3392 _ = v.Args[1] 3393 x1 := v.Args[0] 3394 if x1.Op != OpARM64SLLconst { 3395 break 3396 } 3397 c := x1.AuxInt 3398 y := x1.Args[0] 3399 x0 := v.Args[1] 3400 if !(clobberIfDead(x1)) { 3401 break 3402 } 3403 v.reset(OpARM64CMNshiftLL) 3404 v.AuxInt = c 3405 v.AddArg(x0) 3406 v.AddArg(y) 3407 return true 3408 } 3409 // match: (CMN x0 x1:(SRLconst [c] y)) 3410 // cond: clobberIfDead(x1) 3411 // result: (CMNshiftRL x0 y [c]) 3412 for { 3413 _ = v.Args[1] 3414 x0 := v.Args[0] 3415 x1 := v.Args[1] 3416 if x1.Op != OpARM64SRLconst { 3417 break 3418 } 3419 c := x1.AuxInt 3420 y := x1.Args[0] 3421 if !(clobberIfDead(x1)) { 3422 break 3423 } 3424 v.reset(OpARM64CMNshiftRL) 3425 v.AuxInt = c 3426 v.AddArg(x0) 3427 v.AddArg(y) 3428 return true 3429 } 3430 // match: (CMN x1:(SRLconst [c] y) x0) 3431 // cond: clobberIfDead(x1) 3432 // result: (CMNshiftRL x0 y [c]) 3433 for { 3434 _ = v.Args[1] 3435 x1 := v.Args[0] 3436 if x1.Op != OpARM64SRLconst { 3437 break 3438 } 3439 c := x1.AuxInt 3440 y := x1.Args[0] 3441 x0 := v.Args[1] 3442 if !(clobberIfDead(x1)) { 3443 break 3444 } 3445 v.reset(OpARM64CMNshiftRL) 3446 v.AuxInt = c 3447 v.AddArg(x0) 3448 v.AddArg(y) 3449 return true 3450 } 3451 // match: (CMN x0 x1:(SRAconst [c] y)) 3452 // cond: clobberIfDead(x1) 3453 // result: (CMNshiftRA x0 y [c]) 3454 for { 3455 _ = v.Args[1] 3456 x0 := v.Args[0] 3457 x1 := v.Args[1] 3458 if x1.Op != OpARM64SRAconst { 3459 break 3460 } 3461 c := x1.AuxInt 3462 y := x1.Args[0] 3463 if !(clobberIfDead(x1)) { 3464 break 3465 } 3466 v.reset(OpARM64CMNshiftRA) 3467 v.AuxInt = c 3468 v.AddArg(x0) 3469 v.AddArg(y) 3470 return true 3471 } 3472 // match: (CMN x1:(SRAconst [c] y) x0) 3473 // cond: clobberIfDead(x1) 3474 // result: (CMNshiftRA x0 y [c]) 3475 for { 3476 _ = v.Args[1] 3477 x1 := v.Args[0] 3478 if x1.Op != OpARM64SRAconst { 3479 break 3480 } 3481 c := x1.AuxInt 3482 y := x1.Args[0] 3483 x0 := v.Args[1] 3484 if !(clobberIfDead(x1)) { 3485 break 3486 } 3487 v.reset(OpARM64CMNshiftRA) 3488 v.AuxInt = c 3489 v.AddArg(x0) 3490 v.AddArg(y) 3491 return true 3492 } 3493 return false 3494 } 3495 func rewriteValueARM64_OpARM64CMNW_0(v *Value) bool { 3496 // match: (CMNW x (MOVDconst [c])) 3497 // cond: 3498 // result: (CMNWconst [c] x) 3499 for { 3500 _ = v.Args[1] 3501 x := v.Args[0] 3502 v_1 := v.Args[1] 3503 if v_1.Op != OpARM64MOVDconst { 3504 break 3505 } 3506 c := v_1.AuxInt 3507 v.reset(OpARM64CMNWconst) 3508 v.AuxInt = c 3509 v.AddArg(x) 3510 return true 3511 } 3512 // match: (CMNW (MOVDconst [c]) x) 3513 // cond: 3514 // result: (CMNWconst [c] x) 3515 for { 3516 _ = v.Args[1] 3517 v_0 := v.Args[0] 3518 if v_0.Op != OpARM64MOVDconst { 3519 break 3520 } 3521 c := v_0.AuxInt 3522 x := v.Args[1] 3523 v.reset(OpARM64CMNWconst) 3524 v.AuxInt = c 3525 v.AddArg(x) 3526 return true 3527 } 3528 return false 3529 } 3530 func rewriteValueARM64_OpARM64CMNWconst_0(v *Value) bool { 3531 // match: (CMNWconst (MOVDconst [x]) [y]) 3532 // cond: int32(x)==int32(-y) 3533 // result: (FlagEQ) 3534 for { 3535 y := v.AuxInt 3536 v_0 := v.Args[0] 3537 if v_0.Op != OpARM64MOVDconst { 3538 break 3539 } 3540 x := v_0.AuxInt 3541 if !(int32(x) == int32(-y)) { 3542 break 3543 } 3544 v.reset(OpARM64FlagEQ) 3545 return true 3546 } 3547 // match: (CMNWconst (MOVDconst [x]) [y]) 3548 // cond: int32(x)<int32(-y) && uint32(x)<uint32(-y) 3549 // result: (FlagLT_ULT) 3550 for { 3551 y := v.AuxInt 3552 v_0 := v.Args[0] 3553 if v_0.Op != OpARM64MOVDconst { 3554 break 3555 } 3556 x := v_0.AuxInt 3557 if !(int32(x) < int32(-y) && uint32(x) < uint32(-y)) { 3558 break 3559 } 3560 v.reset(OpARM64FlagLT_ULT) 3561 return true 3562 } 3563 // match: (CMNWconst (MOVDconst [x]) [y]) 3564 // cond: int32(x)<int32(-y) && uint32(x)>uint32(-y) 3565 // result: (FlagLT_UGT) 3566 for { 3567 y := v.AuxInt 3568 v_0 := v.Args[0] 3569 if v_0.Op != OpARM64MOVDconst { 3570 break 3571 } 3572 x := v_0.AuxInt 3573 if !(int32(x) < int32(-y) && uint32(x) > uint32(-y)) { 3574 break 3575 } 3576 v.reset(OpARM64FlagLT_UGT) 3577 return true 3578 } 3579 // match: (CMNWconst (MOVDconst [x]) [y]) 3580 // cond: int32(x)>int32(-y) && uint32(x)<uint32(-y) 3581 // result: (FlagGT_ULT) 3582 for { 3583 y := v.AuxInt 3584 v_0 := v.Args[0] 3585 if v_0.Op != OpARM64MOVDconst { 3586 break 3587 } 3588 x := v_0.AuxInt 3589 if !(int32(x) > int32(-y) && uint32(x) < uint32(-y)) { 3590 break 3591 } 3592 v.reset(OpARM64FlagGT_ULT) 3593 return true 3594 } 3595 // match: (CMNWconst (MOVDconst [x]) [y]) 3596 // cond: int32(x)>int32(-y) && uint32(x)>uint32(-y) 3597 // result: (FlagGT_UGT) 3598 for { 3599 y := v.AuxInt 3600 v_0 := v.Args[0] 3601 if v_0.Op != OpARM64MOVDconst { 3602 break 3603 } 3604 x := v_0.AuxInt 3605 if !(int32(x) > int32(-y) && uint32(x) > uint32(-y)) { 3606 break 3607 } 3608 v.reset(OpARM64FlagGT_UGT) 3609 return true 3610 } 3611 return false 3612 } 3613 func rewriteValueARM64_OpARM64CMNconst_0(v *Value) bool { 3614 // match: (CMNconst (MOVDconst [x]) [y]) 3615 // cond: int64(x)==int64(-y) 3616 // result: (FlagEQ) 3617 for { 3618 y := v.AuxInt 3619 v_0 := v.Args[0] 3620 if v_0.Op != OpARM64MOVDconst { 3621 break 3622 } 3623 x := v_0.AuxInt 3624 if !(int64(x) == int64(-y)) { 3625 break 3626 } 3627 v.reset(OpARM64FlagEQ) 3628 return true 3629 } 3630 // match: (CMNconst (MOVDconst [x]) [y]) 3631 // cond: int64(x)<int64(-y) && uint64(x)<uint64(-y) 3632 // result: (FlagLT_ULT) 3633 for { 3634 y := v.AuxInt 3635 v_0 := v.Args[0] 3636 if v_0.Op != OpARM64MOVDconst { 3637 break 3638 } 3639 x := v_0.AuxInt 3640 if !(int64(x) < int64(-y) && uint64(x) < uint64(-y)) { 3641 break 3642 } 3643 v.reset(OpARM64FlagLT_ULT) 3644 return true 3645 } 3646 // match: (CMNconst (MOVDconst [x]) [y]) 3647 // cond: int64(x)<int64(-y) && uint64(x)>uint64(-y) 3648 // result: (FlagLT_UGT) 3649 for { 3650 y := v.AuxInt 3651 v_0 := v.Args[0] 3652 if v_0.Op != OpARM64MOVDconst { 3653 break 3654 } 3655 x := v_0.AuxInt 3656 if !(int64(x) < int64(-y) && uint64(x) > uint64(-y)) { 3657 break 3658 } 3659 v.reset(OpARM64FlagLT_UGT) 3660 return true 3661 } 3662 // match: (CMNconst (MOVDconst [x]) [y]) 3663 // cond: int64(x)>int64(-y) && uint64(x)<uint64(-y) 3664 // result: (FlagGT_ULT) 3665 for { 3666 y := v.AuxInt 3667 v_0 := v.Args[0] 3668 if v_0.Op != OpARM64MOVDconst { 3669 break 3670 } 3671 x := v_0.AuxInt 3672 if !(int64(x) > int64(-y) && uint64(x) < uint64(-y)) { 3673 break 3674 } 3675 v.reset(OpARM64FlagGT_ULT) 3676 return true 3677 } 3678 // match: (CMNconst (MOVDconst [x]) [y]) 3679 // cond: int64(x)>int64(-y) && uint64(x)>uint64(-y) 3680 // result: (FlagGT_UGT) 3681 for { 3682 y := v.AuxInt 3683 v_0 := v.Args[0] 3684 if v_0.Op != OpARM64MOVDconst { 3685 break 3686 } 3687 x := v_0.AuxInt 3688 if !(int64(x) > int64(-y) && uint64(x) > uint64(-y)) { 3689 break 3690 } 3691 v.reset(OpARM64FlagGT_UGT) 3692 return true 3693 } 3694 return false 3695 } 3696 func rewriteValueARM64_OpARM64CMNshiftLL_0(v *Value) bool { 3697 b := v.Block 3698 _ = b 3699 // match: (CMNshiftLL (MOVDconst [c]) x [d]) 3700 // cond: 3701 // result: (CMNconst [c] (SLLconst <x.Type> x [d])) 3702 for { 3703 d := v.AuxInt 3704 _ = v.Args[1] 3705 v_0 := v.Args[0] 3706 if v_0.Op != OpARM64MOVDconst { 3707 break 3708 } 3709 c := v_0.AuxInt 3710 x := v.Args[1] 3711 v.reset(OpARM64CMNconst) 3712 v.AuxInt = c 3713 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 3714 v0.AuxInt = d 3715 v0.AddArg(x) 3716 v.AddArg(v0) 3717 return true 3718 } 3719 // match: (CMNshiftLL x (MOVDconst [c]) [d]) 3720 // cond: 3721 // result: (CMNconst x [int64(uint64(c)<<uint64(d))]) 3722 for { 3723 d := v.AuxInt 3724 _ = v.Args[1] 3725 x := v.Args[0] 3726 v_1 := v.Args[1] 3727 if v_1.Op != OpARM64MOVDconst { 3728 break 3729 } 3730 c := v_1.AuxInt 3731 v.reset(OpARM64CMNconst) 3732 v.AuxInt = int64(uint64(c) << uint64(d)) 3733 v.AddArg(x) 3734 return true 3735 } 3736 return false 3737 } 3738 func rewriteValueARM64_OpARM64CMNshiftRA_0(v *Value) bool { 3739 b := v.Block 3740 _ = b 3741 // match: (CMNshiftRA (MOVDconst [c]) x [d]) 3742 // cond: 3743 // result: (CMNconst [c] (SRAconst <x.Type> x [d])) 3744 for { 3745 d := v.AuxInt 3746 _ = v.Args[1] 3747 v_0 := v.Args[0] 3748 if v_0.Op != OpARM64MOVDconst { 3749 break 3750 } 3751 c := v_0.AuxInt 3752 x := v.Args[1] 3753 v.reset(OpARM64CMNconst) 3754 v.AuxInt = c 3755 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3756 v0.AuxInt = d 3757 v0.AddArg(x) 3758 v.AddArg(v0) 3759 return true 3760 } 3761 // match: (CMNshiftRA x (MOVDconst [c]) [d]) 3762 // cond: 3763 // result: (CMNconst x [c>>uint64(d)]) 3764 for { 3765 d := v.AuxInt 3766 _ = v.Args[1] 3767 x := v.Args[0] 3768 v_1 := v.Args[1] 3769 if v_1.Op != OpARM64MOVDconst { 3770 break 3771 } 3772 c := v_1.AuxInt 3773 v.reset(OpARM64CMNconst) 3774 v.AuxInt = c >> uint64(d) 3775 v.AddArg(x) 3776 return true 3777 } 3778 return false 3779 } 3780 func rewriteValueARM64_OpARM64CMNshiftRL_0(v *Value) bool { 3781 b := v.Block 3782 _ = b 3783 // match: (CMNshiftRL (MOVDconst [c]) x [d]) 3784 // cond: 3785 // result: (CMNconst [c] (SRLconst <x.Type> x [d])) 3786 for { 3787 d := v.AuxInt 3788 _ = v.Args[1] 3789 v_0 := v.Args[0] 3790 if v_0.Op != OpARM64MOVDconst { 3791 break 3792 } 3793 c := v_0.AuxInt 3794 x := v.Args[1] 3795 v.reset(OpARM64CMNconst) 3796 v.AuxInt = c 3797 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3798 v0.AuxInt = d 3799 v0.AddArg(x) 3800 v.AddArg(v0) 3801 return true 3802 } 3803 // match: (CMNshiftRL x (MOVDconst [c]) [d]) 3804 // cond: 3805 // result: (CMNconst x [int64(uint64(c)>>uint64(d))]) 3806 for { 3807 d := v.AuxInt 3808 _ = v.Args[1] 3809 x := v.Args[0] 3810 v_1 := v.Args[1] 3811 if v_1.Op != OpARM64MOVDconst { 3812 break 3813 } 3814 c := v_1.AuxInt 3815 v.reset(OpARM64CMNconst) 3816 v.AuxInt = int64(uint64(c) >> uint64(d)) 3817 v.AddArg(x) 3818 return true 3819 } 3820 return false 3821 } 3822 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 3823 b := v.Block 3824 _ = b 3825 // match: (CMP x (MOVDconst [c])) 3826 // cond: 3827 // result: (CMPconst [c] x) 3828 for { 3829 _ = v.Args[1] 3830 x := v.Args[0] 3831 v_1 := v.Args[1] 3832 if v_1.Op != OpARM64MOVDconst { 3833 break 3834 } 3835 c := v_1.AuxInt 3836 v.reset(OpARM64CMPconst) 3837 v.AuxInt = c 3838 v.AddArg(x) 3839 return true 3840 } 3841 // match: (CMP (MOVDconst [c]) x) 3842 // cond: 3843 // result: (InvertFlags (CMPconst [c] x)) 3844 for { 3845 _ = v.Args[1] 3846 v_0 := v.Args[0] 3847 if v_0.Op != OpARM64MOVDconst { 3848 break 3849 } 3850 c := v_0.AuxInt 3851 x := v.Args[1] 3852 v.reset(OpARM64InvertFlags) 3853 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3854 v0.AuxInt = c 3855 v0.AddArg(x) 3856 v.AddArg(v0) 3857 return true 3858 } 3859 // match: (CMP x0 x1:(SLLconst [c] y)) 3860 // cond: clobberIfDead(x1) 3861 // result: (CMPshiftLL x0 y [c]) 3862 for { 3863 _ = v.Args[1] 3864 x0 := v.Args[0] 3865 x1 := v.Args[1] 3866 if x1.Op != OpARM64SLLconst { 3867 break 3868 } 3869 c := x1.AuxInt 3870 y := x1.Args[0] 3871 if !(clobberIfDead(x1)) { 3872 break 3873 } 3874 v.reset(OpARM64CMPshiftLL) 3875 v.AuxInt = c 3876 v.AddArg(x0) 3877 v.AddArg(y) 3878 return true 3879 } 3880 // match: (CMP x0:(SLLconst [c] y) x1) 3881 // cond: clobberIfDead(x0) 3882 // result: (InvertFlags (CMPshiftLL x1 y [c])) 3883 for { 3884 _ = v.Args[1] 3885 x0 := v.Args[0] 3886 if x0.Op != OpARM64SLLconst { 3887 break 3888 } 3889 c := x0.AuxInt 3890 y := x0.Args[0] 3891 x1 := v.Args[1] 3892 if !(clobberIfDead(x0)) { 3893 break 3894 } 3895 v.reset(OpARM64InvertFlags) 3896 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 3897 v0.AuxInt = c 3898 v0.AddArg(x1) 3899 v0.AddArg(y) 3900 v.AddArg(v0) 3901 return true 3902 } 3903 // match: (CMP x0 x1:(SRLconst [c] y)) 3904 // cond: clobberIfDead(x1) 3905 // result: (CMPshiftRL x0 y [c]) 3906 for { 3907 _ = v.Args[1] 3908 x0 := v.Args[0] 3909 x1 := v.Args[1] 3910 if x1.Op != OpARM64SRLconst { 3911 break 3912 } 3913 c := x1.AuxInt 3914 y := x1.Args[0] 3915 if !(clobberIfDead(x1)) { 3916 break 3917 } 3918 v.reset(OpARM64CMPshiftRL) 3919 v.AuxInt = c 3920 v.AddArg(x0) 3921 v.AddArg(y) 3922 return true 3923 } 3924 // match: (CMP x0:(SRLconst [c] y) x1) 3925 // cond: clobberIfDead(x0) 3926 // result: (InvertFlags (CMPshiftRL x1 y [c])) 3927 for { 3928 _ = v.Args[1] 3929 x0 := v.Args[0] 3930 if x0.Op != OpARM64SRLconst { 3931 break 3932 } 3933 c := x0.AuxInt 3934 y := x0.Args[0] 3935 x1 := v.Args[1] 3936 if !(clobberIfDead(x0)) { 3937 break 3938 } 3939 v.reset(OpARM64InvertFlags) 3940 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 3941 v0.AuxInt = c 3942 v0.AddArg(x1) 3943 v0.AddArg(y) 3944 v.AddArg(v0) 3945 return true 3946 } 3947 // match: (CMP x0 x1:(SRAconst [c] y)) 3948 // cond: clobberIfDead(x1) 3949 // result: (CMPshiftRA x0 y [c]) 3950 for { 3951 _ = v.Args[1] 3952 x0 := v.Args[0] 3953 x1 := v.Args[1] 3954 if x1.Op != OpARM64SRAconst { 3955 break 3956 } 3957 c := x1.AuxInt 3958 y := x1.Args[0] 3959 if !(clobberIfDead(x1)) { 3960 break 3961 } 3962 v.reset(OpARM64CMPshiftRA) 3963 v.AuxInt = c 3964 v.AddArg(x0) 3965 v.AddArg(y) 3966 return true 3967 } 3968 // match: (CMP x0:(SRAconst [c] y) x1) 3969 // cond: clobberIfDead(x0) 3970 // result: (InvertFlags (CMPshiftRA x1 y [c])) 3971 for { 3972 _ = v.Args[1] 3973 x0 := v.Args[0] 3974 if x0.Op != OpARM64SRAconst { 3975 break 3976 } 3977 c := x0.AuxInt 3978 y := x0.Args[0] 3979 x1 := v.Args[1] 3980 if !(clobberIfDead(x0)) { 3981 break 3982 } 3983 v.reset(OpARM64InvertFlags) 3984 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 3985 v0.AuxInt = c 3986 v0.AddArg(x1) 3987 v0.AddArg(y) 3988 v.AddArg(v0) 3989 return true 3990 } 3991 return false 3992 } 3993 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 3994 b := v.Block 3995 _ = b 3996 // match: (CMPW x (MOVDconst [c])) 3997 // cond: 3998 // result: (CMPWconst [int64(int32(c))] x) 3999 for { 4000 _ = v.Args[1] 4001 x := v.Args[0] 4002 v_1 := v.Args[1] 4003 if v_1.Op != OpARM64MOVDconst { 4004 break 4005 } 4006 c := v_1.AuxInt 4007 v.reset(OpARM64CMPWconst) 4008 v.AuxInt = int64(int32(c)) 4009 v.AddArg(x) 4010 return true 4011 } 4012 // match: (CMPW (MOVDconst [c]) x) 4013 // cond: 4014 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 4015 for { 4016 _ = v.Args[1] 4017 v_0 := v.Args[0] 4018 if v_0.Op != OpARM64MOVDconst { 4019 break 4020 } 4021 c := v_0.AuxInt 4022 x := v.Args[1] 4023 v.reset(OpARM64InvertFlags) 4024 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 4025 v0.AuxInt = int64(int32(c)) 4026 v0.AddArg(x) 4027 v.AddArg(v0) 4028 return true 4029 } 4030 return false 4031 } 4032 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 4033 // match: (CMPWconst (MOVDconst [x]) [y]) 4034 // cond: int32(x)==int32(y) 4035 // result: (FlagEQ) 4036 for { 4037 y := v.AuxInt 4038 v_0 := v.Args[0] 4039 if v_0.Op != OpARM64MOVDconst { 4040 break 4041 } 4042 x := v_0.AuxInt 4043 if !(int32(x) == int32(y)) { 4044 break 4045 } 4046 v.reset(OpARM64FlagEQ) 4047 return true 4048 } 4049 // match: (CMPWconst (MOVDconst [x]) [y]) 4050 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 4051 // result: (FlagLT_ULT) 4052 for { 4053 y := v.AuxInt 4054 v_0 := v.Args[0] 4055 if v_0.Op != OpARM64MOVDconst { 4056 break 4057 } 4058 x := v_0.AuxInt 4059 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 4060 break 4061 } 4062 v.reset(OpARM64FlagLT_ULT) 4063 return true 4064 } 4065 // match: (CMPWconst (MOVDconst [x]) [y]) 4066 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 4067 // result: (FlagLT_UGT) 4068 for { 4069 y := v.AuxInt 4070 v_0 := v.Args[0] 4071 if v_0.Op != OpARM64MOVDconst { 4072 break 4073 } 4074 x := v_0.AuxInt 4075 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 4076 break 4077 } 4078 v.reset(OpARM64FlagLT_UGT) 4079 return true 4080 } 4081 // match: (CMPWconst (MOVDconst [x]) [y]) 4082 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 4083 // result: (FlagGT_ULT) 4084 for { 4085 y := v.AuxInt 4086 v_0 := v.Args[0] 4087 if v_0.Op != OpARM64MOVDconst { 4088 break 4089 } 4090 x := v_0.AuxInt 4091 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 4092 break 4093 } 4094 v.reset(OpARM64FlagGT_ULT) 4095 return true 4096 } 4097 // match: (CMPWconst (MOVDconst [x]) [y]) 4098 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 4099 // result: (FlagGT_UGT) 4100 for { 4101 y := v.AuxInt 4102 v_0 := v.Args[0] 4103 if v_0.Op != OpARM64MOVDconst { 4104 break 4105 } 4106 x := v_0.AuxInt 4107 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 4108 break 4109 } 4110 v.reset(OpARM64FlagGT_UGT) 4111 return true 4112 } 4113 // match: (CMPWconst (MOVBUreg _) [c]) 4114 // cond: 0xff < int32(c) 4115 // result: (FlagLT_ULT) 4116 for { 4117 c := v.AuxInt 4118 v_0 := v.Args[0] 4119 if v_0.Op != OpARM64MOVBUreg { 4120 break 4121 } 4122 if !(0xff < int32(c)) { 4123 break 4124 } 4125 v.reset(OpARM64FlagLT_ULT) 4126 return true 4127 } 4128 // match: (CMPWconst (MOVHUreg _) [c]) 4129 // cond: 0xffff < int32(c) 4130 // result: (FlagLT_ULT) 4131 for { 4132 c := v.AuxInt 4133 v_0 := v.Args[0] 4134 if v_0.Op != OpARM64MOVHUreg { 4135 break 4136 } 4137 if !(0xffff < int32(c)) { 4138 break 4139 } 4140 v.reset(OpARM64FlagLT_ULT) 4141 return true 4142 } 4143 return false 4144 } 4145 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 4146 // match: (CMPconst (MOVDconst [x]) [y]) 4147 // cond: x==y 4148 // result: (FlagEQ) 4149 for { 4150 y := v.AuxInt 4151 v_0 := v.Args[0] 4152 if v_0.Op != OpARM64MOVDconst { 4153 break 4154 } 4155 x := v_0.AuxInt 4156 if !(x == y) { 4157 break 4158 } 4159 v.reset(OpARM64FlagEQ) 4160 return true 4161 } 4162 // match: (CMPconst (MOVDconst [x]) [y]) 4163 // cond: x<y && uint64(x)<uint64(y) 4164 // result: (FlagLT_ULT) 4165 for { 4166 y := v.AuxInt 4167 v_0 := v.Args[0] 4168 if v_0.Op != OpARM64MOVDconst { 4169 break 4170 } 4171 x := v_0.AuxInt 4172 if !(x < y && uint64(x) < uint64(y)) { 4173 break 4174 } 4175 v.reset(OpARM64FlagLT_ULT) 4176 return true 4177 } 4178 // match: (CMPconst (MOVDconst [x]) [y]) 4179 // cond: x<y && uint64(x)>uint64(y) 4180 // result: (FlagLT_UGT) 4181 for { 4182 y := v.AuxInt 4183 v_0 := v.Args[0] 4184 if v_0.Op != OpARM64MOVDconst { 4185 break 4186 } 4187 x := v_0.AuxInt 4188 if !(x < y && uint64(x) > uint64(y)) { 4189 break 4190 } 4191 v.reset(OpARM64FlagLT_UGT) 4192 return true 4193 } 4194 // match: (CMPconst (MOVDconst [x]) [y]) 4195 // cond: x>y && uint64(x)<uint64(y) 4196 // result: (FlagGT_ULT) 4197 for { 4198 y := v.AuxInt 4199 v_0 := v.Args[0] 4200 if v_0.Op != OpARM64MOVDconst { 4201 break 4202 } 4203 x := v_0.AuxInt 4204 if !(x > y && uint64(x) < uint64(y)) { 4205 break 4206 } 4207 v.reset(OpARM64FlagGT_ULT) 4208 return true 4209 } 4210 // match: (CMPconst (MOVDconst [x]) [y]) 4211 // cond: x>y && uint64(x)>uint64(y) 4212 // result: (FlagGT_UGT) 4213 for { 4214 y := v.AuxInt 4215 v_0 := v.Args[0] 4216 if v_0.Op != OpARM64MOVDconst { 4217 break 4218 } 4219 x := v_0.AuxInt 4220 if !(x > y && uint64(x) > uint64(y)) { 4221 break 4222 } 4223 v.reset(OpARM64FlagGT_UGT) 4224 return true 4225 } 4226 // match: (CMPconst (MOVBUreg _) [c]) 4227 // cond: 0xff < c 4228 // result: (FlagLT_ULT) 4229 for { 4230 c := v.AuxInt 4231 v_0 := v.Args[0] 4232 if v_0.Op != OpARM64MOVBUreg { 4233 break 4234 } 4235 if !(0xff < c) { 4236 break 4237 } 4238 v.reset(OpARM64FlagLT_ULT) 4239 return true 4240 } 4241 // match: (CMPconst (MOVHUreg _) [c]) 4242 // cond: 0xffff < c 4243 // result: (FlagLT_ULT) 4244 for { 4245 c := v.AuxInt 4246 v_0 := v.Args[0] 4247 if v_0.Op != OpARM64MOVHUreg { 4248 break 4249 } 4250 if !(0xffff < c) { 4251 break 4252 } 4253 v.reset(OpARM64FlagLT_ULT) 4254 return true 4255 } 4256 // match: (CMPconst (MOVWUreg _) [c]) 4257 // cond: 0xffffffff < c 4258 // result: (FlagLT_ULT) 4259 for { 4260 c := v.AuxInt 4261 v_0 := v.Args[0] 4262 if v_0.Op != OpARM64MOVWUreg { 4263 break 4264 } 4265 if !(0xffffffff < c) { 4266 break 4267 } 4268 v.reset(OpARM64FlagLT_ULT) 4269 return true 4270 } 4271 // match: (CMPconst (ANDconst _ [m]) [n]) 4272 // cond: 0 <= m && m < n 4273 // result: (FlagLT_ULT) 4274 for { 4275 n := v.AuxInt 4276 v_0 := v.Args[0] 4277 if v_0.Op != OpARM64ANDconst { 4278 break 4279 } 4280 m := v_0.AuxInt 4281 if !(0 <= m && m < n) { 4282 break 4283 } 4284 v.reset(OpARM64FlagLT_ULT) 4285 return true 4286 } 4287 // match: (CMPconst (SRLconst _ [c]) [n]) 4288 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 4289 // result: (FlagLT_ULT) 4290 for { 4291 n := v.AuxInt 4292 v_0 := v.Args[0] 4293 if v_0.Op != OpARM64SRLconst { 4294 break 4295 } 4296 c := v_0.AuxInt 4297 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 4298 break 4299 } 4300 v.reset(OpARM64FlagLT_ULT) 4301 return true 4302 } 4303 return false 4304 } 4305 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 4306 b := v.Block 4307 _ = b 4308 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 4309 // cond: 4310 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 4311 for { 4312 d := v.AuxInt 4313 _ = v.Args[1] 4314 v_0 := v.Args[0] 4315 if v_0.Op != OpARM64MOVDconst { 4316 break 4317 } 4318 c := v_0.AuxInt 4319 x := v.Args[1] 4320 v.reset(OpARM64InvertFlags) 4321 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 4322 v0.AuxInt = c 4323 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4324 v1.AuxInt = d 4325 v1.AddArg(x) 4326 v0.AddArg(v1) 4327 v.AddArg(v0) 4328 return true 4329 } 4330 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 4331 // cond: 4332 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 4333 for { 4334 d := v.AuxInt 4335 _ = v.Args[1] 4336 x := v.Args[0] 4337 v_1 := v.Args[1] 4338 if v_1.Op != OpARM64MOVDconst { 4339 break 4340 } 4341 c := v_1.AuxInt 4342 v.reset(OpARM64CMPconst) 4343 v.AuxInt = int64(uint64(c) << uint64(d)) 4344 v.AddArg(x) 4345 return true 4346 } 4347 return false 4348 } 4349 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 4350 b := v.Block 4351 _ = b 4352 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 4353 // cond: 4354 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 4355 for { 4356 d := v.AuxInt 4357 _ = v.Args[1] 4358 v_0 := v.Args[0] 4359 if v_0.Op != OpARM64MOVDconst { 4360 break 4361 } 4362 c := v_0.AuxInt 4363 x := v.Args[1] 4364 v.reset(OpARM64InvertFlags) 4365 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 4366 v0.AuxInt = c 4367 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 4368 v1.AuxInt = d 4369 v1.AddArg(x) 4370 v0.AddArg(v1) 4371 v.AddArg(v0) 4372 return true 4373 } 4374 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 4375 // cond: 4376 // result: (CMPconst x [c>>uint64(d)]) 4377 for { 4378 d := v.AuxInt 4379 _ = v.Args[1] 4380 x := v.Args[0] 4381 v_1 := v.Args[1] 4382 if v_1.Op != OpARM64MOVDconst { 4383 break 4384 } 4385 c := v_1.AuxInt 4386 v.reset(OpARM64CMPconst) 4387 v.AuxInt = c >> uint64(d) 4388 v.AddArg(x) 4389 return true 4390 } 4391 return false 4392 } 4393 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 4394 b := v.Block 4395 _ = b 4396 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 4397 // cond: 4398 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 4399 for { 4400 d := v.AuxInt 4401 _ = v.Args[1] 4402 v_0 := v.Args[0] 4403 if v_0.Op != OpARM64MOVDconst { 4404 break 4405 } 4406 c := v_0.AuxInt 4407 x := v.Args[1] 4408 v.reset(OpARM64InvertFlags) 4409 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 4410 v0.AuxInt = c 4411 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 4412 v1.AuxInt = d 4413 v1.AddArg(x) 4414 v0.AddArg(v1) 4415 v.AddArg(v0) 4416 return true 4417 } 4418 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 4419 // cond: 4420 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 4421 for { 4422 d := v.AuxInt 4423 _ = v.Args[1] 4424 x := v.Args[0] 4425 v_1 := v.Args[1] 4426 if v_1.Op != OpARM64MOVDconst { 4427 break 4428 } 4429 c := v_1.AuxInt 4430 v.reset(OpARM64CMPconst) 4431 v.AuxInt = int64(uint64(c) >> uint64(d)) 4432 v.AddArg(x) 4433 return true 4434 } 4435 return false 4436 } 4437 func rewriteValueARM64_OpARM64CSEL_0(v *Value) bool { 4438 // match: (CSEL {cc} x (MOVDconst [0]) flag) 4439 // cond: 4440 // result: (CSEL0 {cc} x flag) 4441 for { 4442 cc := v.Aux 4443 _ = v.Args[2] 4444 x := v.Args[0] 4445 v_1 := v.Args[1] 4446 if v_1.Op != OpARM64MOVDconst { 4447 break 4448 } 4449 if v_1.AuxInt != 0 { 4450 break 4451 } 4452 flag := v.Args[2] 4453 v.reset(OpARM64CSEL0) 4454 v.Aux = cc 4455 v.AddArg(x) 4456 v.AddArg(flag) 4457 return true 4458 } 4459 // match: (CSEL {cc} (MOVDconst [0]) y flag) 4460 // cond: 4461 // result: (CSEL0 {arm64Negate(cc.(Op))} y flag) 4462 for { 4463 cc := v.Aux 4464 _ = v.Args[2] 4465 v_0 := v.Args[0] 4466 if v_0.Op != OpARM64MOVDconst { 4467 break 4468 } 4469 if v_0.AuxInt != 0 { 4470 break 4471 } 4472 y := v.Args[1] 4473 flag := v.Args[2] 4474 v.reset(OpARM64CSEL0) 4475 v.Aux = arm64Negate(cc.(Op)) 4476 v.AddArg(y) 4477 v.AddArg(flag) 4478 return true 4479 } 4480 // match: (CSEL {cc} x y (InvertFlags cmp)) 4481 // cond: 4482 // result: (CSEL {arm64Invert(cc.(Op))} x y cmp) 4483 for { 4484 cc := v.Aux 4485 _ = v.Args[2] 4486 x := v.Args[0] 4487 y := v.Args[1] 4488 v_2 := v.Args[2] 4489 if v_2.Op != OpARM64InvertFlags { 4490 break 4491 } 4492 cmp := v_2.Args[0] 4493 v.reset(OpARM64CSEL) 4494 v.Aux = arm64Invert(cc.(Op)) 4495 v.AddArg(x) 4496 v.AddArg(y) 4497 v.AddArg(cmp) 4498 return true 4499 } 4500 // match: (CSEL {cc} x _ flag) 4501 // cond: ccARM64Eval(cc, flag) > 0 4502 // result: x 4503 for { 4504 cc := v.Aux 4505 _ = v.Args[2] 4506 x := v.Args[0] 4507 flag := v.Args[2] 4508 if !(ccARM64Eval(cc, flag) > 0) { 4509 break 4510 } 4511 v.reset(OpCopy) 4512 v.Type = x.Type 4513 v.AddArg(x) 4514 return true 4515 } 4516 // match: (CSEL {cc} _ y flag) 4517 // cond: ccARM64Eval(cc, flag) < 0 4518 // result: y 4519 for { 4520 cc := v.Aux 4521 _ = v.Args[2] 4522 y := v.Args[1] 4523 flag := v.Args[2] 4524 if !(ccARM64Eval(cc, flag) < 0) { 4525 break 4526 } 4527 v.reset(OpCopy) 4528 v.Type = y.Type 4529 v.AddArg(y) 4530 return true 4531 } 4532 // match: (CSEL {cc} x y (CMPWconst [0] bool)) 4533 // cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil 4534 // result: (CSEL {bool.Op} x y flagArg(bool)) 4535 for { 4536 cc := v.Aux 4537 _ = v.Args[2] 4538 x := v.Args[0] 4539 y := v.Args[1] 4540 v_2 := v.Args[2] 4541 if v_2.Op != OpARM64CMPWconst { 4542 break 4543 } 4544 if v_2.AuxInt != 0 { 4545 break 4546 } 4547 bool := v_2.Args[0] 4548 if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) { 4549 break 4550 } 4551 v.reset(OpARM64CSEL) 4552 v.Aux = bool.Op 4553 v.AddArg(x) 4554 v.AddArg(y) 4555 v.AddArg(flagArg(bool)) 4556 return true 4557 } 4558 // match: (CSEL {cc} x y (CMPWconst [0] bool)) 4559 // cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil 4560 // result: (CSEL {arm64Negate(bool.Op)} x y flagArg(bool)) 4561 for { 4562 cc := v.Aux 4563 _ = v.Args[2] 4564 x := v.Args[0] 4565 y := v.Args[1] 4566 v_2 := v.Args[2] 4567 if v_2.Op != OpARM64CMPWconst { 4568 break 4569 } 4570 if v_2.AuxInt != 0 { 4571 break 4572 } 4573 bool := v_2.Args[0] 4574 if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) { 4575 break 4576 } 4577 v.reset(OpARM64CSEL) 4578 v.Aux = arm64Negate(bool.Op) 4579 v.AddArg(x) 4580 v.AddArg(y) 4581 v.AddArg(flagArg(bool)) 4582 return true 4583 } 4584 return false 4585 } 4586 func rewriteValueARM64_OpARM64CSEL0_0(v *Value) bool { 4587 // match: (CSEL0 {cc} x (InvertFlags cmp)) 4588 // cond: 4589 // result: (CSEL0 {arm64Invert(cc.(Op))} x cmp) 4590 for { 4591 cc := v.Aux 4592 _ = v.Args[1] 4593 x := v.Args[0] 4594 v_1 := v.Args[1] 4595 if v_1.Op != OpARM64InvertFlags { 4596 break 4597 } 4598 cmp := v_1.Args[0] 4599 v.reset(OpARM64CSEL0) 4600 v.Aux = arm64Invert(cc.(Op)) 4601 v.AddArg(x) 4602 v.AddArg(cmp) 4603 return true 4604 } 4605 // match: (CSEL0 {cc} x flag) 4606 // cond: ccARM64Eval(cc, flag) > 0 4607 // result: x 4608 for { 4609 cc := v.Aux 4610 _ = v.Args[1] 4611 x := v.Args[0] 4612 flag := v.Args[1] 4613 if !(ccARM64Eval(cc, flag) > 0) { 4614 break 4615 } 4616 v.reset(OpCopy) 4617 v.Type = x.Type 4618 v.AddArg(x) 4619 return true 4620 } 4621 // match: (CSEL0 {cc} _ flag) 4622 // cond: ccARM64Eval(cc, flag) < 0 4623 // result: (MOVDconst [0]) 4624 for { 4625 cc := v.Aux 4626 _ = v.Args[1] 4627 flag := v.Args[1] 4628 if !(ccARM64Eval(cc, flag) < 0) { 4629 break 4630 } 4631 v.reset(OpARM64MOVDconst) 4632 v.AuxInt = 0 4633 return true 4634 } 4635 // match: (CSEL0 {cc} x (CMPWconst [0] bool)) 4636 // cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil 4637 // result: (CSEL0 {bool.Op} x flagArg(bool)) 4638 for { 4639 cc := v.Aux 4640 _ = v.Args[1] 4641 x := v.Args[0] 4642 v_1 := v.Args[1] 4643 if v_1.Op != OpARM64CMPWconst { 4644 break 4645 } 4646 if v_1.AuxInt != 0 { 4647 break 4648 } 4649 bool := v_1.Args[0] 4650 if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) { 4651 break 4652 } 4653 v.reset(OpARM64CSEL0) 4654 v.Aux = bool.Op 4655 v.AddArg(x) 4656 v.AddArg(flagArg(bool)) 4657 return true 4658 } 4659 // match: (CSEL0 {cc} x (CMPWconst [0] bool)) 4660 // cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil 4661 // result: (CSEL0 {arm64Negate(bool.Op)} x flagArg(bool)) 4662 for { 4663 cc := v.Aux 4664 _ = v.Args[1] 4665 x := v.Args[0] 4666 v_1 := v.Args[1] 4667 if v_1.Op != OpARM64CMPWconst { 4668 break 4669 } 4670 if v_1.AuxInt != 0 { 4671 break 4672 } 4673 bool := v_1.Args[0] 4674 if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) { 4675 break 4676 } 4677 v.reset(OpARM64CSEL0) 4678 v.Aux = arm64Negate(bool.Op) 4679 v.AddArg(x) 4680 v.AddArg(flagArg(bool)) 4681 return true 4682 } 4683 return false 4684 } 4685 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 4686 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 4687 // cond: 4688 // result: (MOVDconst [c/d]) 4689 for { 4690 _ = v.Args[1] 4691 v_0 := v.Args[0] 4692 if v_0.Op != OpARM64MOVDconst { 4693 break 4694 } 4695 c := v_0.AuxInt 4696 v_1 := v.Args[1] 4697 if v_1.Op != OpARM64MOVDconst { 4698 break 4699 } 4700 d := v_1.AuxInt 4701 v.reset(OpARM64MOVDconst) 4702 v.AuxInt = c / d 4703 return true 4704 } 4705 return false 4706 } 4707 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 4708 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 4709 // cond: 4710 // result: (MOVDconst [int64(int32(c)/int32(d))]) 4711 for { 4712 _ = v.Args[1] 4713 v_0 := v.Args[0] 4714 if v_0.Op != OpARM64MOVDconst { 4715 break 4716 } 4717 c := v_0.AuxInt 4718 v_1 := v.Args[1] 4719 if v_1.Op != OpARM64MOVDconst { 4720 break 4721 } 4722 d := v_1.AuxInt 4723 v.reset(OpARM64MOVDconst) 4724 v.AuxInt = int64(int32(c) / int32(d)) 4725 return true 4726 } 4727 return false 4728 } 4729 func rewriteValueARM64_OpARM64EON_0(v *Value) bool { 4730 // match: (EON x (MOVDconst [c])) 4731 // cond: 4732 // result: (XORconst [^c] x) 4733 for { 4734 _ = v.Args[1] 4735 x := v.Args[0] 4736 v_1 := v.Args[1] 4737 if v_1.Op != OpARM64MOVDconst { 4738 break 4739 } 4740 c := v_1.AuxInt 4741 v.reset(OpARM64XORconst) 4742 v.AuxInt = ^c 4743 v.AddArg(x) 4744 return true 4745 } 4746 // match: (EON x x) 4747 // cond: 4748 // result: (MOVDconst [-1]) 4749 for { 4750 _ = v.Args[1] 4751 x := v.Args[0] 4752 if x != v.Args[1] { 4753 break 4754 } 4755 v.reset(OpARM64MOVDconst) 4756 v.AuxInt = -1 4757 return true 4758 } 4759 // match: (EON x0 x1:(SLLconst [c] y)) 4760 // cond: clobberIfDead(x1) 4761 // result: (EONshiftLL x0 y [c]) 4762 for { 4763 _ = v.Args[1] 4764 x0 := v.Args[0] 4765 x1 := v.Args[1] 4766 if x1.Op != OpARM64SLLconst { 4767 break 4768 } 4769 c := x1.AuxInt 4770 y := x1.Args[0] 4771 if !(clobberIfDead(x1)) { 4772 break 4773 } 4774 v.reset(OpARM64EONshiftLL) 4775 v.AuxInt = c 4776 v.AddArg(x0) 4777 v.AddArg(y) 4778 return true 4779 } 4780 // match: (EON x0 x1:(SRLconst [c] y)) 4781 // cond: clobberIfDead(x1) 4782 // result: (EONshiftRL x0 y [c]) 4783 for { 4784 _ = v.Args[1] 4785 x0 := v.Args[0] 4786 x1 := v.Args[1] 4787 if x1.Op != OpARM64SRLconst { 4788 break 4789 } 4790 c := x1.AuxInt 4791 y := x1.Args[0] 4792 if !(clobberIfDead(x1)) { 4793 break 4794 } 4795 v.reset(OpARM64EONshiftRL) 4796 v.AuxInt = c 4797 v.AddArg(x0) 4798 v.AddArg(y) 4799 return true 4800 } 4801 // match: (EON x0 x1:(SRAconst [c] y)) 4802 // cond: clobberIfDead(x1) 4803 // result: (EONshiftRA x0 y [c]) 4804 for { 4805 _ = v.Args[1] 4806 x0 := v.Args[0] 4807 x1 := v.Args[1] 4808 if x1.Op != OpARM64SRAconst { 4809 break 4810 } 4811 c := x1.AuxInt 4812 y := x1.Args[0] 4813 if !(clobberIfDead(x1)) { 4814 break 4815 } 4816 v.reset(OpARM64EONshiftRA) 4817 v.AuxInt = c 4818 v.AddArg(x0) 4819 v.AddArg(y) 4820 return true 4821 } 4822 return false 4823 } 4824 func rewriteValueARM64_OpARM64EONshiftLL_0(v *Value) bool { 4825 // match: (EONshiftLL x (MOVDconst [c]) [d]) 4826 // cond: 4827 // result: (XORconst x [^int64(uint64(c)<<uint64(d))]) 4828 for { 4829 d := v.AuxInt 4830 _ = v.Args[1] 4831 x := v.Args[0] 4832 v_1 := v.Args[1] 4833 if v_1.Op != OpARM64MOVDconst { 4834 break 4835 } 4836 c := v_1.AuxInt 4837 v.reset(OpARM64XORconst) 4838 v.AuxInt = ^int64(uint64(c) << uint64(d)) 4839 v.AddArg(x) 4840 return true 4841 } 4842 // match: (EONshiftLL x (SLLconst x [c]) [d]) 4843 // cond: c==d 4844 // result: (MOVDconst [-1]) 4845 for { 4846 d := v.AuxInt 4847 _ = v.Args[1] 4848 x := v.Args[0] 4849 v_1 := v.Args[1] 4850 if v_1.Op != OpARM64SLLconst { 4851 break 4852 } 4853 c := v_1.AuxInt 4854 if x != v_1.Args[0] { 4855 break 4856 } 4857 if !(c == d) { 4858 break 4859 } 4860 v.reset(OpARM64MOVDconst) 4861 v.AuxInt = -1 4862 return true 4863 } 4864 return false 4865 } 4866 func rewriteValueARM64_OpARM64EONshiftRA_0(v *Value) bool { 4867 // match: (EONshiftRA x (MOVDconst [c]) [d]) 4868 // cond: 4869 // result: (XORconst x [^(c>>uint64(d))]) 4870 for { 4871 d := v.AuxInt 4872 _ = v.Args[1] 4873 x := v.Args[0] 4874 v_1 := v.Args[1] 4875 if v_1.Op != OpARM64MOVDconst { 4876 break 4877 } 4878 c := v_1.AuxInt 4879 v.reset(OpARM64XORconst) 4880 v.AuxInt = ^(c >> uint64(d)) 4881 v.AddArg(x) 4882 return true 4883 } 4884 // match: (EONshiftRA x (SRAconst x [c]) [d]) 4885 // cond: c==d 4886 // result: (MOVDconst [-1]) 4887 for { 4888 d := v.AuxInt 4889 _ = v.Args[1] 4890 x := v.Args[0] 4891 v_1 := v.Args[1] 4892 if v_1.Op != OpARM64SRAconst { 4893 break 4894 } 4895 c := v_1.AuxInt 4896 if x != v_1.Args[0] { 4897 break 4898 } 4899 if !(c == d) { 4900 break 4901 } 4902 v.reset(OpARM64MOVDconst) 4903 v.AuxInt = -1 4904 return true 4905 } 4906 return false 4907 } 4908 func rewriteValueARM64_OpARM64EONshiftRL_0(v *Value) bool { 4909 // match: (EONshiftRL x (MOVDconst [c]) [d]) 4910 // cond: 4911 // result: (XORconst x [^int64(uint64(c)>>uint64(d))]) 4912 for { 4913 d := v.AuxInt 4914 _ = v.Args[1] 4915 x := v.Args[0] 4916 v_1 := v.Args[1] 4917 if v_1.Op != OpARM64MOVDconst { 4918 break 4919 } 4920 c := v_1.AuxInt 4921 v.reset(OpARM64XORconst) 4922 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 4923 v.AddArg(x) 4924 return true 4925 } 4926 // match: (EONshiftRL x (SRLconst x [c]) [d]) 4927 // cond: c==d 4928 // result: (MOVDconst [-1]) 4929 for { 4930 d := v.AuxInt 4931 _ = v.Args[1] 4932 x := v.Args[0] 4933 v_1 := v.Args[1] 4934 if v_1.Op != OpARM64SRLconst { 4935 break 4936 } 4937 c := v_1.AuxInt 4938 if x != v_1.Args[0] { 4939 break 4940 } 4941 if !(c == d) { 4942 break 4943 } 4944 v.reset(OpARM64MOVDconst) 4945 v.AuxInt = -1 4946 return true 4947 } 4948 return false 4949 } 4950 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 4951 // match: (Equal (FlagEQ)) 4952 // cond: 4953 // result: (MOVDconst [1]) 4954 for { 4955 v_0 := v.Args[0] 4956 if v_0.Op != OpARM64FlagEQ { 4957 break 4958 } 4959 v.reset(OpARM64MOVDconst) 4960 v.AuxInt = 1 4961 return true 4962 } 4963 // match: (Equal (FlagLT_ULT)) 4964 // cond: 4965 // result: (MOVDconst [0]) 4966 for { 4967 v_0 := v.Args[0] 4968 if v_0.Op != OpARM64FlagLT_ULT { 4969 break 4970 } 4971 v.reset(OpARM64MOVDconst) 4972 v.AuxInt = 0 4973 return true 4974 } 4975 // match: (Equal (FlagLT_UGT)) 4976 // cond: 4977 // result: (MOVDconst [0]) 4978 for { 4979 v_0 := v.Args[0] 4980 if v_0.Op != OpARM64FlagLT_UGT { 4981 break 4982 } 4983 v.reset(OpARM64MOVDconst) 4984 v.AuxInt = 0 4985 return true 4986 } 4987 // match: (Equal (FlagGT_ULT)) 4988 // cond: 4989 // result: (MOVDconst [0]) 4990 for { 4991 v_0 := v.Args[0] 4992 if v_0.Op != OpARM64FlagGT_ULT { 4993 break 4994 } 4995 v.reset(OpARM64MOVDconst) 4996 v.AuxInt = 0 4997 return true 4998 } 4999 // match: (Equal (FlagGT_UGT)) 5000 // cond: 5001 // result: (MOVDconst [0]) 5002 for { 5003 v_0 := v.Args[0] 5004 if v_0.Op != OpARM64FlagGT_UGT { 5005 break 5006 } 5007 v.reset(OpARM64MOVDconst) 5008 v.AuxInt = 0 5009 return true 5010 } 5011 // match: (Equal (InvertFlags x)) 5012 // cond: 5013 // result: (Equal x) 5014 for { 5015 v_0 := v.Args[0] 5016 if v_0.Op != OpARM64InvertFlags { 5017 break 5018 } 5019 x := v_0.Args[0] 5020 v.reset(OpARM64Equal) 5021 v.AddArg(x) 5022 return true 5023 } 5024 return false 5025 } 5026 func rewriteValueARM64_OpARM64FADDD_0(v *Value) bool { 5027 // match: (FADDD a (FMULD x y)) 5028 // cond: 5029 // result: (FMADDD a x y) 5030 for { 5031 _ = v.Args[1] 5032 a := v.Args[0] 5033 v_1 := v.Args[1] 5034 if v_1.Op != OpARM64FMULD { 5035 break 5036 } 5037 _ = v_1.Args[1] 5038 x := v_1.Args[0] 5039 y := v_1.Args[1] 5040 v.reset(OpARM64FMADDD) 5041 v.AddArg(a) 5042 v.AddArg(x) 5043 v.AddArg(y) 5044 return true 5045 } 5046 // match: (FADDD (FMULD x y) a) 5047 // cond: 5048 // result: (FMADDD a x y) 5049 for { 5050 _ = v.Args[1] 5051 v_0 := v.Args[0] 5052 if v_0.Op != OpARM64FMULD { 5053 break 5054 } 5055 _ = v_0.Args[1] 5056 x := v_0.Args[0] 5057 y := v_0.Args[1] 5058 a := v.Args[1] 5059 v.reset(OpARM64FMADDD) 5060 v.AddArg(a) 5061 v.AddArg(x) 5062 v.AddArg(y) 5063 return true 5064 } 5065 // match: (FADDD a (FNMULD x y)) 5066 // cond: 5067 // result: (FMSUBD a x y) 5068 for { 5069 _ = v.Args[1] 5070 a := v.Args[0] 5071 v_1 := v.Args[1] 5072 if v_1.Op != OpARM64FNMULD { 5073 break 5074 } 5075 _ = v_1.Args[1] 5076 x := v_1.Args[0] 5077 y := v_1.Args[1] 5078 v.reset(OpARM64FMSUBD) 5079 v.AddArg(a) 5080 v.AddArg(x) 5081 v.AddArg(y) 5082 return true 5083 } 5084 // match: (FADDD (FNMULD x y) a) 5085 // cond: 5086 // result: (FMSUBD a x y) 5087 for { 5088 _ = v.Args[1] 5089 v_0 := v.Args[0] 5090 if v_0.Op != OpARM64FNMULD { 5091 break 5092 } 5093 _ = v_0.Args[1] 5094 x := v_0.Args[0] 5095 y := v_0.Args[1] 5096 a := v.Args[1] 5097 v.reset(OpARM64FMSUBD) 5098 v.AddArg(a) 5099 v.AddArg(x) 5100 v.AddArg(y) 5101 return true 5102 } 5103 return false 5104 } 5105 func rewriteValueARM64_OpARM64FADDS_0(v *Value) bool { 5106 // match: (FADDS a (FMULS x y)) 5107 // cond: 5108 // result: (FMADDS a x y) 5109 for { 5110 _ = v.Args[1] 5111 a := v.Args[0] 5112 v_1 := v.Args[1] 5113 if v_1.Op != OpARM64FMULS { 5114 break 5115 } 5116 _ = v_1.Args[1] 5117 x := v_1.Args[0] 5118 y := v_1.Args[1] 5119 v.reset(OpARM64FMADDS) 5120 v.AddArg(a) 5121 v.AddArg(x) 5122 v.AddArg(y) 5123 return true 5124 } 5125 // match: (FADDS (FMULS x y) a) 5126 // cond: 5127 // result: (FMADDS a x y) 5128 for { 5129 _ = v.Args[1] 5130 v_0 := v.Args[0] 5131 if v_0.Op != OpARM64FMULS { 5132 break 5133 } 5134 _ = v_0.Args[1] 5135 x := v_0.Args[0] 5136 y := v_0.Args[1] 5137 a := v.Args[1] 5138 v.reset(OpARM64FMADDS) 5139 v.AddArg(a) 5140 v.AddArg(x) 5141 v.AddArg(y) 5142 return true 5143 } 5144 // match: (FADDS a (FNMULS x y)) 5145 // cond: 5146 // result: (FMSUBS a x y) 5147 for { 5148 _ = v.Args[1] 5149 a := v.Args[0] 5150 v_1 := v.Args[1] 5151 if v_1.Op != OpARM64FNMULS { 5152 break 5153 } 5154 _ = v_1.Args[1] 5155 x := v_1.Args[0] 5156 y := v_1.Args[1] 5157 v.reset(OpARM64FMSUBS) 5158 v.AddArg(a) 5159 v.AddArg(x) 5160 v.AddArg(y) 5161 return true 5162 } 5163 // match: (FADDS (FNMULS x y) a) 5164 // cond: 5165 // result: (FMSUBS a x y) 5166 for { 5167 _ = v.Args[1] 5168 v_0 := v.Args[0] 5169 if v_0.Op != OpARM64FNMULS { 5170 break 5171 } 5172 _ = v_0.Args[1] 5173 x := v_0.Args[0] 5174 y := v_0.Args[1] 5175 a := v.Args[1] 5176 v.reset(OpARM64FMSUBS) 5177 v.AddArg(a) 5178 v.AddArg(x) 5179 v.AddArg(y) 5180 return true 5181 } 5182 return false 5183 } 5184 func rewriteValueARM64_OpARM64FMOVDfpgp_0(v *Value) bool { 5185 b := v.Block 5186 _ = b 5187 // match: (FMOVDfpgp <t> (Arg [off] {sym})) 5188 // cond: 5189 // result: @b.Func.Entry (Arg <t> [off] {sym}) 5190 for { 5191 t := v.Type 5192 v_0 := v.Args[0] 5193 if v_0.Op != OpArg { 5194 break 5195 } 5196 off := v_0.AuxInt 5197 sym := v_0.Aux 5198 b = b.Func.Entry 5199 v0 := b.NewValue0(v.Pos, OpArg, t) 5200 v.reset(OpCopy) 5201 v.AddArg(v0) 5202 v0.AuxInt = off 5203 v0.Aux = sym 5204 return true 5205 } 5206 return false 5207 } 5208 func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool { 5209 b := v.Block 5210 _ = b 5211 // match: (FMOVDgpfp <t> (Arg [off] {sym})) 5212 // cond: 5213 // result: @b.Func.Entry (Arg <t> [off] {sym}) 5214 for { 5215 t := v.Type 5216 v_0 := v.Args[0] 5217 if v_0.Op != OpArg { 5218 break 5219 } 5220 off := v_0.AuxInt 5221 sym := v_0.Aux 5222 b = b.Func.Entry 5223 v0 := b.NewValue0(v.Pos, OpArg, t) 5224 v.reset(OpCopy) 5225 v.AddArg(v0) 5226 v0.AuxInt = off 5227 v0.Aux = sym 5228 return true 5229 } 5230 return false 5231 } 5232 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 5233 b := v.Block 5234 _ = b 5235 config := b.Func.Config 5236 _ = config 5237 // match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _)) 5238 // cond: 5239 // result: (FMOVDgpfp val) 5240 for { 5241 off := v.AuxInt 5242 sym := v.Aux 5243 _ = v.Args[1] 5244 ptr := v.Args[0] 5245 v_1 := v.Args[1] 5246 if v_1.Op != OpARM64MOVDstore { 5247 break 5248 } 5249 if v_1.AuxInt != off { 5250 break 5251 } 5252 if v_1.Aux != sym { 5253 break 5254 } 5255 _ = v_1.Args[2] 5256 if ptr != v_1.Args[0] { 5257 break 5258 } 5259 val := v_1.Args[1] 5260 v.reset(OpARM64FMOVDgpfp) 5261 v.AddArg(val) 5262 return true 5263 } 5264 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 5265 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5266 // result: (FMOVDload [off1+off2] {sym} ptr mem) 5267 for { 5268 off1 := v.AuxInt 5269 sym := v.Aux 5270 _ = v.Args[1] 5271 v_0 := v.Args[0] 5272 if v_0.Op != OpARM64ADDconst { 5273 break 5274 } 5275 off2 := v_0.AuxInt 5276 ptr := v_0.Args[0] 5277 mem := v.Args[1] 5278 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5279 break 5280 } 5281 v.reset(OpARM64FMOVDload) 5282 v.AuxInt = off1 + off2 5283 v.Aux = sym 5284 v.AddArg(ptr) 5285 v.AddArg(mem) 5286 return true 5287 } 5288 // match: (FMOVDload [off] {sym} (ADD ptr idx) mem) 5289 // cond: off == 0 && sym == nil 5290 // result: (FMOVDloadidx ptr idx mem) 5291 for { 5292 off := v.AuxInt 5293 sym := v.Aux 5294 _ = v.Args[1] 5295 v_0 := v.Args[0] 5296 if v_0.Op != OpARM64ADD { 5297 break 5298 } 5299 _ = v_0.Args[1] 5300 ptr := v_0.Args[0] 5301 idx := v_0.Args[1] 5302 mem := v.Args[1] 5303 if !(off == 0 && sym == nil) { 5304 break 5305 } 5306 v.reset(OpARM64FMOVDloadidx) 5307 v.AddArg(ptr) 5308 v.AddArg(idx) 5309 v.AddArg(mem) 5310 return true 5311 } 5312 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5313 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5314 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5315 for { 5316 off1 := v.AuxInt 5317 sym1 := v.Aux 5318 _ = v.Args[1] 5319 v_0 := v.Args[0] 5320 if v_0.Op != OpARM64MOVDaddr { 5321 break 5322 } 5323 off2 := v_0.AuxInt 5324 sym2 := v_0.Aux 5325 ptr := v_0.Args[0] 5326 mem := v.Args[1] 5327 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5328 break 5329 } 5330 v.reset(OpARM64FMOVDload) 5331 v.AuxInt = off1 + off2 5332 v.Aux = mergeSym(sym1, sym2) 5333 v.AddArg(ptr) 5334 v.AddArg(mem) 5335 return true 5336 } 5337 return false 5338 } 5339 func rewriteValueARM64_OpARM64FMOVDloadidx_0(v *Value) bool { 5340 // match: (FMOVDloadidx ptr (MOVDconst [c]) mem) 5341 // cond: 5342 // result: (FMOVDload [c] ptr mem) 5343 for { 5344 _ = v.Args[2] 5345 ptr := v.Args[0] 5346 v_1 := v.Args[1] 5347 if v_1.Op != OpARM64MOVDconst { 5348 break 5349 } 5350 c := v_1.AuxInt 5351 mem := v.Args[2] 5352 v.reset(OpARM64FMOVDload) 5353 v.AuxInt = c 5354 v.AddArg(ptr) 5355 v.AddArg(mem) 5356 return true 5357 } 5358 // match: (FMOVDloadidx (MOVDconst [c]) ptr mem) 5359 // cond: 5360 // result: (FMOVDload [c] ptr mem) 5361 for { 5362 _ = v.Args[2] 5363 v_0 := v.Args[0] 5364 if v_0.Op != OpARM64MOVDconst { 5365 break 5366 } 5367 c := v_0.AuxInt 5368 ptr := v.Args[1] 5369 mem := v.Args[2] 5370 v.reset(OpARM64FMOVDload) 5371 v.AuxInt = c 5372 v.AddArg(ptr) 5373 v.AddArg(mem) 5374 return true 5375 } 5376 return false 5377 } 5378 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 5379 b := v.Block 5380 _ = b 5381 config := b.Func.Config 5382 _ = config 5383 // match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem) 5384 // cond: 5385 // result: (MOVDstore [off] {sym} ptr val mem) 5386 for { 5387 off := v.AuxInt 5388 sym := v.Aux 5389 _ = v.Args[2] 5390 ptr := v.Args[0] 5391 v_1 := v.Args[1] 5392 if v_1.Op != OpARM64FMOVDgpfp { 5393 break 5394 } 5395 val := v_1.Args[0] 5396 mem := v.Args[2] 5397 v.reset(OpARM64MOVDstore) 5398 v.AuxInt = off 5399 v.Aux = sym 5400 v.AddArg(ptr) 5401 v.AddArg(val) 5402 v.AddArg(mem) 5403 return true 5404 } 5405 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5406 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5407 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 5408 for { 5409 off1 := v.AuxInt 5410 sym := v.Aux 5411 _ = v.Args[2] 5412 v_0 := v.Args[0] 5413 if v_0.Op != OpARM64ADDconst { 5414 break 5415 } 5416 off2 := v_0.AuxInt 5417 ptr := v_0.Args[0] 5418 val := v.Args[1] 5419 mem := v.Args[2] 5420 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5421 break 5422 } 5423 v.reset(OpARM64FMOVDstore) 5424 v.AuxInt = off1 + off2 5425 v.Aux = sym 5426 v.AddArg(ptr) 5427 v.AddArg(val) 5428 v.AddArg(mem) 5429 return true 5430 } 5431 // match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem) 5432 // cond: off == 0 && sym == nil 5433 // result: (FMOVDstoreidx ptr idx val mem) 5434 for { 5435 off := v.AuxInt 5436 sym := v.Aux 5437 _ = v.Args[2] 5438 v_0 := v.Args[0] 5439 if v_0.Op != OpARM64ADD { 5440 break 5441 } 5442 _ = v_0.Args[1] 5443 ptr := v_0.Args[0] 5444 idx := v_0.Args[1] 5445 val := v.Args[1] 5446 mem := v.Args[2] 5447 if !(off == 0 && sym == nil) { 5448 break 5449 } 5450 v.reset(OpARM64FMOVDstoreidx) 5451 v.AddArg(ptr) 5452 v.AddArg(idx) 5453 v.AddArg(val) 5454 v.AddArg(mem) 5455 return true 5456 } 5457 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5458 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5459 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5460 for { 5461 off1 := v.AuxInt 5462 sym1 := v.Aux 5463 _ = v.Args[2] 5464 v_0 := v.Args[0] 5465 if v_0.Op != OpARM64MOVDaddr { 5466 break 5467 } 5468 off2 := v_0.AuxInt 5469 sym2 := v_0.Aux 5470 ptr := v_0.Args[0] 5471 val := v.Args[1] 5472 mem := v.Args[2] 5473 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5474 break 5475 } 5476 v.reset(OpARM64FMOVDstore) 5477 v.AuxInt = off1 + off2 5478 v.Aux = mergeSym(sym1, sym2) 5479 v.AddArg(ptr) 5480 v.AddArg(val) 5481 v.AddArg(mem) 5482 return true 5483 } 5484 return false 5485 } 5486 func rewriteValueARM64_OpARM64FMOVDstoreidx_0(v *Value) bool { 5487 // match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem) 5488 // cond: 5489 // result: (FMOVDstore [c] ptr val mem) 5490 for { 5491 _ = v.Args[3] 5492 ptr := v.Args[0] 5493 v_1 := v.Args[1] 5494 if v_1.Op != OpARM64MOVDconst { 5495 break 5496 } 5497 c := v_1.AuxInt 5498 val := v.Args[2] 5499 mem := v.Args[3] 5500 v.reset(OpARM64FMOVDstore) 5501 v.AuxInt = c 5502 v.AddArg(ptr) 5503 v.AddArg(val) 5504 v.AddArg(mem) 5505 return true 5506 } 5507 // match: (FMOVDstoreidx (MOVDconst [c]) idx val mem) 5508 // cond: 5509 // result: (FMOVDstore [c] idx val mem) 5510 for { 5511 _ = v.Args[3] 5512 v_0 := v.Args[0] 5513 if v_0.Op != OpARM64MOVDconst { 5514 break 5515 } 5516 c := v_0.AuxInt 5517 idx := v.Args[1] 5518 val := v.Args[2] 5519 mem := v.Args[3] 5520 v.reset(OpARM64FMOVDstore) 5521 v.AuxInt = c 5522 v.AddArg(idx) 5523 v.AddArg(val) 5524 v.AddArg(mem) 5525 return true 5526 } 5527 return false 5528 } 5529 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 5530 b := v.Block 5531 _ = b 5532 config := b.Func.Config 5533 _ = config 5534 // match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _)) 5535 // cond: 5536 // result: (FMOVSgpfp val) 5537 for { 5538 off := v.AuxInt 5539 sym := v.Aux 5540 _ = v.Args[1] 5541 ptr := v.Args[0] 5542 v_1 := v.Args[1] 5543 if v_1.Op != OpARM64MOVWstore { 5544 break 5545 } 5546 if v_1.AuxInt != off { 5547 break 5548 } 5549 if v_1.Aux != sym { 5550 break 5551 } 5552 _ = v_1.Args[2] 5553 if ptr != v_1.Args[0] { 5554 break 5555 } 5556 val := v_1.Args[1] 5557 v.reset(OpARM64FMOVSgpfp) 5558 v.AddArg(val) 5559 return true 5560 } 5561 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 5562 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5563 // result: (FMOVSload [off1+off2] {sym} ptr mem) 5564 for { 5565 off1 := v.AuxInt 5566 sym := v.Aux 5567 _ = v.Args[1] 5568 v_0 := v.Args[0] 5569 if v_0.Op != OpARM64ADDconst { 5570 break 5571 } 5572 off2 := v_0.AuxInt 5573 ptr := v_0.Args[0] 5574 mem := v.Args[1] 5575 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5576 break 5577 } 5578 v.reset(OpARM64FMOVSload) 5579 v.AuxInt = off1 + off2 5580 v.Aux = sym 5581 v.AddArg(ptr) 5582 v.AddArg(mem) 5583 return true 5584 } 5585 // match: (FMOVSload [off] {sym} (ADD ptr idx) mem) 5586 // cond: off == 0 && sym == nil 5587 // result: (FMOVSloadidx ptr idx mem) 5588 for { 5589 off := v.AuxInt 5590 sym := v.Aux 5591 _ = v.Args[1] 5592 v_0 := v.Args[0] 5593 if v_0.Op != OpARM64ADD { 5594 break 5595 } 5596 _ = v_0.Args[1] 5597 ptr := v_0.Args[0] 5598 idx := v_0.Args[1] 5599 mem := v.Args[1] 5600 if !(off == 0 && sym == nil) { 5601 break 5602 } 5603 v.reset(OpARM64FMOVSloadidx) 5604 v.AddArg(ptr) 5605 v.AddArg(idx) 5606 v.AddArg(mem) 5607 return true 5608 } 5609 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5610 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5611 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5612 for { 5613 off1 := v.AuxInt 5614 sym1 := v.Aux 5615 _ = v.Args[1] 5616 v_0 := v.Args[0] 5617 if v_0.Op != OpARM64MOVDaddr { 5618 break 5619 } 5620 off2 := v_0.AuxInt 5621 sym2 := v_0.Aux 5622 ptr := v_0.Args[0] 5623 mem := v.Args[1] 5624 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5625 break 5626 } 5627 v.reset(OpARM64FMOVSload) 5628 v.AuxInt = off1 + off2 5629 v.Aux = mergeSym(sym1, sym2) 5630 v.AddArg(ptr) 5631 v.AddArg(mem) 5632 return true 5633 } 5634 return false 5635 } 5636 func rewriteValueARM64_OpARM64FMOVSloadidx_0(v *Value) bool { 5637 // match: (FMOVSloadidx ptr (MOVDconst [c]) mem) 5638 // cond: 5639 // result: (FMOVSload [c] ptr mem) 5640 for { 5641 _ = v.Args[2] 5642 ptr := v.Args[0] 5643 v_1 := v.Args[1] 5644 if v_1.Op != OpARM64MOVDconst { 5645 break 5646 } 5647 c := v_1.AuxInt 5648 mem := v.Args[2] 5649 v.reset(OpARM64FMOVSload) 5650 v.AuxInt = c 5651 v.AddArg(ptr) 5652 v.AddArg(mem) 5653 return true 5654 } 5655 // match: (FMOVSloadidx (MOVDconst [c]) ptr mem) 5656 // cond: 5657 // result: (FMOVSload [c] ptr mem) 5658 for { 5659 _ = v.Args[2] 5660 v_0 := v.Args[0] 5661 if v_0.Op != OpARM64MOVDconst { 5662 break 5663 } 5664 c := v_0.AuxInt 5665 ptr := v.Args[1] 5666 mem := v.Args[2] 5667 v.reset(OpARM64FMOVSload) 5668 v.AuxInt = c 5669 v.AddArg(ptr) 5670 v.AddArg(mem) 5671 return true 5672 } 5673 return false 5674 } 5675 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 5676 b := v.Block 5677 _ = b 5678 config := b.Func.Config 5679 _ = config 5680 // match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem) 5681 // cond: 5682 // result: (MOVWstore [off] {sym} ptr val mem) 5683 for { 5684 off := v.AuxInt 5685 sym := v.Aux 5686 _ = v.Args[2] 5687 ptr := v.Args[0] 5688 v_1 := v.Args[1] 5689 if v_1.Op != OpARM64FMOVSgpfp { 5690 break 5691 } 5692 val := v_1.Args[0] 5693 mem := v.Args[2] 5694 v.reset(OpARM64MOVWstore) 5695 v.AuxInt = off 5696 v.Aux = sym 5697 v.AddArg(ptr) 5698 v.AddArg(val) 5699 v.AddArg(mem) 5700 return true 5701 } 5702 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5703 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5704 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 5705 for { 5706 off1 := v.AuxInt 5707 sym := v.Aux 5708 _ = v.Args[2] 5709 v_0 := v.Args[0] 5710 if v_0.Op != OpARM64ADDconst { 5711 break 5712 } 5713 off2 := v_0.AuxInt 5714 ptr := v_0.Args[0] 5715 val := v.Args[1] 5716 mem := v.Args[2] 5717 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5718 break 5719 } 5720 v.reset(OpARM64FMOVSstore) 5721 v.AuxInt = off1 + off2 5722 v.Aux = sym 5723 v.AddArg(ptr) 5724 v.AddArg(val) 5725 v.AddArg(mem) 5726 return true 5727 } 5728 // match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem) 5729 // cond: off == 0 && sym == nil 5730 // result: (FMOVSstoreidx ptr idx val mem) 5731 for { 5732 off := v.AuxInt 5733 sym := v.Aux 5734 _ = v.Args[2] 5735 v_0 := v.Args[0] 5736 if v_0.Op != OpARM64ADD { 5737 break 5738 } 5739 _ = v_0.Args[1] 5740 ptr := v_0.Args[0] 5741 idx := v_0.Args[1] 5742 val := v.Args[1] 5743 mem := v.Args[2] 5744 if !(off == 0 && sym == nil) { 5745 break 5746 } 5747 v.reset(OpARM64FMOVSstoreidx) 5748 v.AddArg(ptr) 5749 v.AddArg(idx) 5750 v.AddArg(val) 5751 v.AddArg(mem) 5752 return true 5753 } 5754 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5755 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5756 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5757 for { 5758 off1 := v.AuxInt 5759 sym1 := v.Aux 5760 _ = v.Args[2] 5761 v_0 := v.Args[0] 5762 if v_0.Op != OpARM64MOVDaddr { 5763 break 5764 } 5765 off2 := v_0.AuxInt 5766 sym2 := v_0.Aux 5767 ptr := v_0.Args[0] 5768 val := v.Args[1] 5769 mem := v.Args[2] 5770 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5771 break 5772 } 5773 v.reset(OpARM64FMOVSstore) 5774 v.AuxInt = off1 + off2 5775 v.Aux = mergeSym(sym1, sym2) 5776 v.AddArg(ptr) 5777 v.AddArg(val) 5778 v.AddArg(mem) 5779 return true 5780 } 5781 return false 5782 } 5783 func rewriteValueARM64_OpARM64FMOVSstoreidx_0(v *Value) bool { 5784 // match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem) 5785 // cond: 5786 // result: (FMOVSstore [c] ptr val mem) 5787 for { 5788 _ = v.Args[3] 5789 ptr := v.Args[0] 5790 v_1 := v.Args[1] 5791 if v_1.Op != OpARM64MOVDconst { 5792 break 5793 } 5794 c := v_1.AuxInt 5795 val := v.Args[2] 5796 mem := v.Args[3] 5797 v.reset(OpARM64FMOVSstore) 5798 v.AuxInt = c 5799 v.AddArg(ptr) 5800 v.AddArg(val) 5801 v.AddArg(mem) 5802 return true 5803 } 5804 // match: (FMOVSstoreidx (MOVDconst [c]) idx val mem) 5805 // cond: 5806 // result: (FMOVSstore [c] idx val mem) 5807 for { 5808 _ = v.Args[3] 5809 v_0 := v.Args[0] 5810 if v_0.Op != OpARM64MOVDconst { 5811 break 5812 } 5813 c := v_0.AuxInt 5814 idx := v.Args[1] 5815 val := v.Args[2] 5816 mem := v.Args[3] 5817 v.reset(OpARM64FMOVSstore) 5818 v.AuxInt = c 5819 v.AddArg(idx) 5820 v.AddArg(val) 5821 v.AddArg(mem) 5822 return true 5823 } 5824 return false 5825 } 5826 func rewriteValueARM64_OpARM64FMULD_0(v *Value) bool { 5827 // match: (FMULD (FNEGD x) y) 5828 // cond: 5829 // result: (FNMULD x y) 5830 for { 5831 _ = v.Args[1] 5832 v_0 := v.Args[0] 5833 if v_0.Op != OpARM64FNEGD { 5834 break 5835 } 5836 x := v_0.Args[0] 5837 y := v.Args[1] 5838 v.reset(OpARM64FNMULD) 5839 v.AddArg(x) 5840 v.AddArg(y) 5841 return true 5842 } 5843 // match: (FMULD y (FNEGD x)) 5844 // cond: 5845 // result: (FNMULD x y) 5846 for { 5847 _ = v.Args[1] 5848 y := v.Args[0] 5849 v_1 := v.Args[1] 5850 if v_1.Op != OpARM64FNEGD { 5851 break 5852 } 5853 x := v_1.Args[0] 5854 v.reset(OpARM64FNMULD) 5855 v.AddArg(x) 5856 v.AddArg(y) 5857 return true 5858 } 5859 return false 5860 } 5861 func rewriteValueARM64_OpARM64FMULS_0(v *Value) bool { 5862 // match: (FMULS (FNEGS x) y) 5863 // cond: 5864 // result: (FNMULS x y) 5865 for { 5866 _ = v.Args[1] 5867 v_0 := v.Args[0] 5868 if v_0.Op != OpARM64FNEGS { 5869 break 5870 } 5871 x := v_0.Args[0] 5872 y := v.Args[1] 5873 v.reset(OpARM64FNMULS) 5874 v.AddArg(x) 5875 v.AddArg(y) 5876 return true 5877 } 5878 // match: (FMULS y (FNEGS x)) 5879 // cond: 5880 // result: (FNMULS x y) 5881 for { 5882 _ = v.Args[1] 5883 y := v.Args[0] 5884 v_1 := v.Args[1] 5885 if v_1.Op != OpARM64FNEGS { 5886 break 5887 } 5888 x := v_1.Args[0] 5889 v.reset(OpARM64FNMULS) 5890 v.AddArg(x) 5891 v.AddArg(y) 5892 return true 5893 } 5894 return false 5895 } 5896 func rewriteValueARM64_OpARM64FNEGD_0(v *Value) bool { 5897 // match: (FNEGD (FMULD x y)) 5898 // cond: 5899 // result: (FNMULD x y) 5900 for { 5901 v_0 := v.Args[0] 5902 if v_0.Op != OpARM64FMULD { 5903 break 5904 } 5905 _ = v_0.Args[1] 5906 x := v_0.Args[0] 5907 y := v_0.Args[1] 5908 v.reset(OpARM64FNMULD) 5909 v.AddArg(x) 5910 v.AddArg(y) 5911 return true 5912 } 5913 // match: (FNEGD (FNMULD x y)) 5914 // cond: 5915 // result: (FMULD x y) 5916 for { 5917 v_0 := v.Args[0] 5918 if v_0.Op != OpARM64FNMULD { 5919 break 5920 } 5921 _ = v_0.Args[1] 5922 x := v_0.Args[0] 5923 y := v_0.Args[1] 5924 v.reset(OpARM64FMULD) 5925 v.AddArg(x) 5926 v.AddArg(y) 5927 return true 5928 } 5929 return false 5930 } 5931 func rewriteValueARM64_OpARM64FNEGS_0(v *Value) bool { 5932 // match: (FNEGS (FMULS x y)) 5933 // cond: 5934 // result: (FNMULS x y) 5935 for { 5936 v_0 := v.Args[0] 5937 if v_0.Op != OpARM64FMULS { 5938 break 5939 } 5940 _ = v_0.Args[1] 5941 x := v_0.Args[0] 5942 y := v_0.Args[1] 5943 v.reset(OpARM64FNMULS) 5944 v.AddArg(x) 5945 v.AddArg(y) 5946 return true 5947 } 5948 // match: (FNEGS (FNMULS x y)) 5949 // cond: 5950 // result: (FMULS x y) 5951 for { 5952 v_0 := v.Args[0] 5953 if v_0.Op != OpARM64FNMULS { 5954 break 5955 } 5956 _ = v_0.Args[1] 5957 x := v_0.Args[0] 5958 y := v_0.Args[1] 5959 v.reset(OpARM64FMULS) 5960 v.AddArg(x) 5961 v.AddArg(y) 5962 return true 5963 } 5964 return false 5965 } 5966 func rewriteValueARM64_OpARM64FNMULD_0(v *Value) bool { 5967 // match: (FNMULD (FNEGD x) y) 5968 // cond: 5969 // result: (FMULD x y) 5970 for { 5971 _ = v.Args[1] 5972 v_0 := v.Args[0] 5973 if v_0.Op != OpARM64FNEGD { 5974 break 5975 } 5976 x := v_0.Args[0] 5977 y := v.Args[1] 5978 v.reset(OpARM64FMULD) 5979 v.AddArg(x) 5980 v.AddArg(y) 5981 return true 5982 } 5983 // match: (FNMULD y (FNEGD x)) 5984 // cond: 5985 // result: (FMULD x y) 5986 for { 5987 _ = v.Args[1] 5988 y := v.Args[0] 5989 v_1 := v.Args[1] 5990 if v_1.Op != OpARM64FNEGD { 5991 break 5992 } 5993 x := v_1.Args[0] 5994 v.reset(OpARM64FMULD) 5995 v.AddArg(x) 5996 v.AddArg(y) 5997 return true 5998 } 5999 return false 6000 } 6001 func rewriteValueARM64_OpARM64FNMULS_0(v *Value) bool { 6002 // match: (FNMULS (FNEGS x) y) 6003 // cond: 6004 // result: (FMULS x y) 6005 for { 6006 _ = v.Args[1] 6007 v_0 := v.Args[0] 6008 if v_0.Op != OpARM64FNEGS { 6009 break 6010 } 6011 x := v_0.Args[0] 6012 y := v.Args[1] 6013 v.reset(OpARM64FMULS) 6014 v.AddArg(x) 6015 v.AddArg(y) 6016 return true 6017 } 6018 // match: (FNMULS y (FNEGS x)) 6019 // cond: 6020 // result: (FMULS x y) 6021 for { 6022 _ = v.Args[1] 6023 y := v.Args[0] 6024 v_1 := v.Args[1] 6025 if v_1.Op != OpARM64FNEGS { 6026 break 6027 } 6028 x := v_1.Args[0] 6029 v.reset(OpARM64FMULS) 6030 v.AddArg(x) 6031 v.AddArg(y) 6032 return true 6033 } 6034 return false 6035 } 6036 func rewriteValueARM64_OpARM64FSUBD_0(v *Value) bool { 6037 // match: (FSUBD a (FMULD x y)) 6038 // cond: 6039 // result: (FMSUBD a x y) 6040 for { 6041 _ = v.Args[1] 6042 a := v.Args[0] 6043 v_1 := v.Args[1] 6044 if v_1.Op != OpARM64FMULD { 6045 break 6046 } 6047 _ = v_1.Args[1] 6048 x := v_1.Args[0] 6049 y := v_1.Args[1] 6050 v.reset(OpARM64FMSUBD) 6051 v.AddArg(a) 6052 v.AddArg(x) 6053 v.AddArg(y) 6054 return true 6055 } 6056 // match: (FSUBD (FMULD x y) a) 6057 // cond: 6058 // result: (FNMSUBD a x y) 6059 for { 6060 _ = v.Args[1] 6061 v_0 := v.Args[0] 6062 if v_0.Op != OpARM64FMULD { 6063 break 6064 } 6065 _ = v_0.Args[1] 6066 x := v_0.Args[0] 6067 y := v_0.Args[1] 6068 a := v.Args[1] 6069 v.reset(OpARM64FNMSUBD) 6070 v.AddArg(a) 6071 v.AddArg(x) 6072 v.AddArg(y) 6073 return true 6074 } 6075 // match: (FSUBD a (FNMULD x y)) 6076 // cond: 6077 // result: (FMADDD a x y) 6078 for { 6079 _ = v.Args[1] 6080 a := v.Args[0] 6081 v_1 := v.Args[1] 6082 if v_1.Op != OpARM64FNMULD { 6083 break 6084 } 6085 _ = v_1.Args[1] 6086 x := v_1.Args[0] 6087 y := v_1.Args[1] 6088 v.reset(OpARM64FMADDD) 6089 v.AddArg(a) 6090 v.AddArg(x) 6091 v.AddArg(y) 6092 return true 6093 } 6094 // match: (FSUBD (FNMULD x y) a) 6095 // cond: 6096 // result: (FNMADDD a x y) 6097 for { 6098 _ = v.Args[1] 6099 v_0 := v.Args[0] 6100 if v_0.Op != OpARM64FNMULD { 6101 break 6102 } 6103 _ = v_0.Args[1] 6104 x := v_0.Args[0] 6105 y := v_0.Args[1] 6106 a := v.Args[1] 6107 v.reset(OpARM64FNMADDD) 6108 v.AddArg(a) 6109 v.AddArg(x) 6110 v.AddArg(y) 6111 return true 6112 } 6113 return false 6114 } 6115 func rewriteValueARM64_OpARM64FSUBS_0(v *Value) bool { 6116 // match: (FSUBS a (FMULS x y)) 6117 // cond: 6118 // result: (FMSUBS a x y) 6119 for { 6120 _ = v.Args[1] 6121 a := v.Args[0] 6122 v_1 := v.Args[1] 6123 if v_1.Op != OpARM64FMULS { 6124 break 6125 } 6126 _ = v_1.Args[1] 6127 x := v_1.Args[0] 6128 y := v_1.Args[1] 6129 v.reset(OpARM64FMSUBS) 6130 v.AddArg(a) 6131 v.AddArg(x) 6132 v.AddArg(y) 6133 return true 6134 } 6135 // match: (FSUBS (FMULS x y) a) 6136 // cond: 6137 // result: (FNMSUBS a x y) 6138 for { 6139 _ = v.Args[1] 6140 v_0 := v.Args[0] 6141 if v_0.Op != OpARM64FMULS { 6142 break 6143 } 6144 _ = v_0.Args[1] 6145 x := v_0.Args[0] 6146 y := v_0.Args[1] 6147 a := v.Args[1] 6148 v.reset(OpARM64FNMSUBS) 6149 v.AddArg(a) 6150 v.AddArg(x) 6151 v.AddArg(y) 6152 return true 6153 } 6154 // match: (FSUBS a (FNMULS x y)) 6155 // cond: 6156 // result: (FMADDS a x y) 6157 for { 6158 _ = v.Args[1] 6159 a := v.Args[0] 6160 v_1 := v.Args[1] 6161 if v_1.Op != OpARM64FNMULS { 6162 break 6163 } 6164 _ = v_1.Args[1] 6165 x := v_1.Args[0] 6166 y := v_1.Args[1] 6167 v.reset(OpARM64FMADDS) 6168 v.AddArg(a) 6169 v.AddArg(x) 6170 v.AddArg(y) 6171 return true 6172 } 6173 // match: (FSUBS (FNMULS x y) a) 6174 // cond: 6175 // result: (FNMADDS a x y) 6176 for { 6177 _ = v.Args[1] 6178 v_0 := v.Args[0] 6179 if v_0.Op != OpARM64FNMULS { 6180 break 6181 } 6182 _ = v_0.Args[1] 6183 x := v_0.Args[0] 6184 y := v_0.Args[1] 6185 a := v.Args[1] 6186 v.reset(OpARM64FNMADDS) 6187 v.AddArg(a) 6188 v.AddArg(x) 6189 v.AddArg(y) 6190 return true 6191 } 6192 return false 6193 } 6194 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 6195 // match: (GreaterEqual (FlagEQ)) 6196 // cond: 6197 // result: (MOVDconst [1]) 6198 for { 6199 v_0 := v.Args[0] 6200 if v_0.Op != OpARM64FlagEQ { 6201 break 6202 } 6203 v.reset(OpARM64MOVDconst) 6204 v.AuxInt = 1 6205 return true 6206 } 6207 // match: (GreaterEqual (FlagLT_ULT)) 6208 // cond: 6209 // result: (MOVDconst [0]) 6210 for { 6211 v_0 := v.Args[0] 6212 if v_0.Op != OpARM64FlagLT_ULT { 6213 break 6214 } 6215 v.reset(OpARM64MOVDconst) 6216 v.AuxInt = 0 6217 return true 6218 } 6219 // match: (GreaterEqual (FlagLT_UGT)) 6220 // cond: 6221 // result: (MOVDconst [0]) 6222 for { 6223 v_0 := v.Args[0] 6224 if v_0.Op != OpARM64FlagLT_UGT { 6225 break 6226 } 6227 v.reset(OpARM64MOVDconst) 6228 v.AuxInt = 0 6229 return true 6230 } 6231 // match: (GreaterEqual (FlagGT_ULT)) 6232 // cond: 6233 // result: (MOVDconst [1]) 6234 for { 6235 v_0 := v.Args[0] 6236 if v_0.Op != OpARM64FlagGT_ULT { 6237 break 6238 } 6239 v.reset(OpARM64MOVDconst) 6240 v.AuxInt = 1 6241 return true 6242 } 6243 // match: (GreaterEqual (FlagGT_UGT)) 6244 // cond: 6245 // result: (MOVDconst [1]) 6246 for { 6247 v_0 := v.Args[0] 6248 if v_0.Op != OpARM64FlagGT_UGT { 6249 break 6250 } 6251 v.reset(OpARM64MOVDconst) 6252 v.AuxInt = 1 6253 return true 6254 } 6255 // match: (GreaterEqual (InvertFlags x)) 6256 // cond: 6257 // result: (LessEqual x) 6258 for { 6259 v_0 := v.Args[0] 6260 if v_0.Op != OpARM64InvertFlags { 6261 break 6262 } 6263 x := v_0.Args[0] 6264 v.reset(OpARM64LessEqual) 6265 v.AddArg(x) 6266 return true 6267 } 6268 return false 6269 } 6270 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 6271 // match: (GreaterEqualU (FlagEQ)) 6272 // cond: 6273 // result: (MOVDconst [1]) 6274 for { 6275 v_0 := v.Args[0] 6276 if v_0.Op != OpARM64FlagEQ { 6277 break 6278 } 6279 v.reset(OpARM64MOVDconst) 6280 v.AuxInt = 1 6281 return true 6282 } 6283 // match: (GreaterEqualU (FlagLT_ULT)) 6284 // cond: 6285 // result: (MOVDconst [0]) 6286 for { 6287 v_0 := v.Args[0] 6288 if v_0.Op != OpARM64FlagLT_ULT { 6289 break 6290 } 6291 v.reset(OpARM64MOVDconst) 6292 v.AuxInt = 0 6293 return true 6294 } 6295 // match: (GreaterEqualU (FlagLT_UGT)) 6296 // cond: 6297 // result: (MOVDconst [1]) 6298 for { 6299 v_0 := v.Args[0] 6300 if v_0.Op != OpARM64FlagLT_UGT { 6301 break 6302 } 6303 v.reset(OpARM64MOVDconst) 6304 v.AuxInt = 1 6305 return true 6306 } 6307 // match: (GreaterEqualU (FlagGT_ULT)) 6308 // cond: 6309 // result: (MOVDconst [0]) 6310 for { 6311 v_0 := v.Args[0] 6312 if v_0.Op != OpARM64FlagGT_ULT { 6313 break 6314 } 6315 v.reset(OpARM64MOVDconst) 6316 v.AuxInt = 0 6317 return true 6318 } 6319 // match: (GreaterEqualU (FlagGT_UGT)) 6320 // cond: 6321 // result: (MOVDconst [1]) 6322 for { 6323 v_0 := v.Args[0] 6324 if v_0.Op != OpARM64FlagGT_UGT { 6325 break 6326 } 6327 v.reset(OpARM64MOVDconst) 6328 v.AuxInt = 1 6329 return true 6330 } 6331 // match: (GreaterEqualU (InvertFlags x)) 6332 // cond: 6333 // result: (LessEqualU x) 6334 for { 6335 v_0 := v.Args[0] 6336 if v_0.Op != OpARM64InvertFlags { 6337 break 6338 } 6339 x := v_0.Args[0] 6340 v.reset(OpARM64LessEqualU) 6341 v.AddArg(x) 6342 return true 6343 } 6344 return false 6345 } 6346 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 6347 // match: (GreaterThan (FlagEQ)) 6348 // cond: 6349 // result: (MOVDconst [0]) 6350 for { 6351 v_0 := v.Args[0] 6352 if v_0.Op != OpARM64FlagEQ { 6353 break 6354 } 6355 v.reset(OpARM64MOVDconst) 6356 v.AuxInt = 0 6357 return true 6358 } 6359 // match: (GreaterThan (FlagLT_ULT)) 6360 // cond: 6361 // result: (MOVDconst [0]) 6362 for { 6363 v_0 := v.Args[0] 6364 if v_0.Op != OpARM64FlagLT_ULT { 6365 break 6366 } 6367 v.reset(OpARM64MOVDconst) 6368 v.AuxInt = 0 6369 return true 6370 } 6371 // match: (GreaterThan (FlagLT_UGT)) 6372 // cond: 6373 // result: (MOVDconst [0]) 6374 for { 6375 v_0 := v.Args[0] 6376 if v_0.Op != OpARM64FlagLT_UGT { 6377 break 6378 } 6379 v.reset(OpARM64MOVDconst) 6380 v.AuxInt = 0 6381 return true 6382 } 6383 // match: (GreaterThan (FlagGT_ULT)) 6384 // cond: 6385 // result: (MOVDconst [1]) 6386 for { 6387 v_0 := v.Args[0] 6388 if v_0.Op != OpARM64FlagGT_ULT { 6389 break 6390 } 6391 v.reset(OpARM64MOVDconst) 6392 v.AuxInt = 1 6393 return true 6394 } 6395 // match: (GreaterThan (FlagGT_UGT)) 6396 // cond: 6397 // result: (MOVDconst [1]) 6398 for { 6399 v_0 := v.Args[0] 6400 if v_0.Op != OpARM64FlagGT_UGT { 6401 break 6402 } 6403 v.reset(OpARM64MOVDconst) 6404 v.AuxInt = 1 6405 return true 6406 } 6407 // match: (GreaterThan (InvertFlags x)) 6408 // cond: 6409 // result: (LessThan x) 6410 for { 6411 v_0 := v.Args[0] 6412 if v_0.Op != OpARM64InvertFlags { 6413 break 6414 } 6415 x := v_0.Args[0] 6416 v.reset(OpARM64LessThan) 6417 v.AddArg(x) 6418 return true 6419 } 6420 return false 6421 } 6422 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 6423 // match: (GreaterThanU (FlagEQ)) 6424 // cond: 6425 // result: (MOVDconst [0]) 6426 for { 6427 v_0 := v.Args[0] 6428 if v_0.Op != OpARM64FlagEQ { 6429 break 6430 } 6431 v.reset(OpARM64MOVDconst) 6432 v.AuxInt = 0 6433 return true 6434 } 6435 // match: (GreaterThanU (FlagLT_ULT)) 6436 // cond: 6437 // result: (MOVDconst [0]) 6438 for { 6439 v_0 := v.Args[0] 6440 if v_0.Op != OpARM64FlagLT_ULT { 6441 break 6442 } 6443 v.reset(OpARM64MOVDconst) 6444 v.AuxInt = 0 6445 return true 6446 } 6447 // match: (GreaterThanU (FlagLT_UGT)) 6448 // cond: 6449 // result: (MOVDconst [1]) 6450 for { 6451 v_0 := v.Args[0] 6452 if v_0.Op != OpARM64FlagLT_UGT { 6453 break 6454 } 6455 v.reset(OpARM64MOVDconst) 6456 v.AuxInt = 1 6457 return true 6458 } 6459 // match: (GreaterThanU (FlagGT_ULT)) 6460 // cond: 6461 // result: (MOVDconst [0]) 6462 for { 6463 v_0 := v.Args[0] 6464 if v_0.Op != OpARM64FlagGT_ULT { 6465 break 6466 } 6467 v.reset(OpARM64MOVDconst) 6468 v.AuxInt = 0 6469 return true 6470 } 6471 // match: (GreaterThanU (FlagGT_UGT)) 6472 // cond: 6473 // result: (MOVDconst [1]) 6474 for { 6475 v_0 := v.Args[0] 6476 if v_0.Op != OpARM64FlagGT_UGT { 6477 break 6478 } 6479 v.reset(OpARM64MOVDconst) 6480 v.AuxInt = 1 6481 return true 6482 } 6483 // match: (GreaterThanU (InvertFlags x)) 6484 // cond: 6485 // result: (LessThanU x) 6486 for { 6487 v_0 := v.Args[0] 6488 if v_0.Op != OpARM64InvertFlags { 6489 break 6490 } 6491 x := v_0.Args[0] 6492 v.reset(OpARM64LessThanU) 6493 v.AddArg(x) 6494 return true 6495 } 6496 return false 6497 } 6498 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 6499 // match: (LessEqual (FlagEQ)) 6500 // cond: 6501 // result: (MOVDconst [1]) 6502 for { 6503 v_0 := v.Args[0] 6504 if v_0.Op != OpARM64FlagEQ { 6505 break 6506 } 6507 v.reset(OpARM64MOVDconst) 6508 v.AuxInt = 1 6509 return true 6510 } 6511 // match: (LessEqual (FlagLT_ULT)) 6512 // cond: 6513 // result: (MOVDconst [1]) 6514 for { 6515 v_0 := v.Args[0] 6516 if v_0.Op != OpARM64FlagLT_ULT { 6517 break 6518 } 6519 v.reset(OpARM64MOVDconst) 6520 v.AuxInt = 1 6521 return true 6522 } 6523 // match: (LessEqual (FlagLT_UGT)) 6524 // cond: 6525 // result: (MOVDconst [1]) 6526 for { 6527 v_0 := v.Args[0] 6528 if v_0.Op != OpARM64FlagLT_UGT { 6529 break 6530 } 6531 v.reset(OpARM64MOVDconst) 6532 v.AuxInt = 1 6533 return true 6534 } 6535 // match: (LessEqual (FlagGT_ULT)) 6536 // cond: 6537 // result: (MOVDconst [0]) 6538 for { 6539 v_0 := v.Args[0] 6540 if v_0.Op != OpARM64FlagGT_ULT { 6541 break 6542 } 6543 v.reset(OpARM64MOVDconst) 6544 v.AuxInt = 0 6545 return true 6546 } 6547 // match: (LessEqual (FlagGT_UGT)) 6548 // cond: 6549 // result: (MOVDconst [0]) 6550 for { 6551 v_0 := v.Args[0] 6552 if v_0.Op != OpARM64FlagGT_UGT { 6553 break 6554 } 6555 v.reset(OpARM64MOVDconst) 6556 v.AuxInt = 0 6557 return true 6558 } 6559 // match: (LessEqual (InvertFlags x)) 6560 // cond: 6561 // result: (GreaterEqual x) 6562 for { 6563 v_0 := v.Args[0] 6564 if v_0.Op != OpARM64InvertFlags { 6565 break 6566 } 6567 x := v_0.Args[0] 6568 v.reset(OpARM64GreaterEqual) 6569 v.AddArg(x) 6570 return true 6571 } 6572 return false 6573 } 6574 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 6575 // match: (LessEqualU (FlagEQ)) 6576 // cond: 6577 // result: (MOVDconst [1]) 6578 for { 6579 v_0 := v.Args[0] 6580 if v_0.Op != OpARM64FlagEQ { 6581 break 6582 } 6583 v.reset(OpARM64MOVDconst) 6584 v.AuxInt = 1 6585 return true 6586 } 6587 // match: (LessEqualU (FlagLT_ULT)) 6588 // cond: 6589 // result: (MOVDconst [1]) 6590 for { 6591 v_0 := v.Args[0] 6592 if v_0.Op != OpARM64FlagLT_ULT { 6593 break 6594 } 6595 v.reset(OpARM64MOVDconst) 6596 v.AuxInt = 1 6597 return true 6598 } 6599 // match: (LessEqualU (FlagLT_UGT)) 6600 // cond: 6601 // result: (MOVDconst [0]) 6602 for { 6603 v_0 := v.Args[0] 6604 if v_0.Op != OpARM64FlagLT_UGT { 6605 break 6606 } 6607 v.reset(OpARM64MOVDconst) 6608 v.AuxInt = 0 6609 return true 6610 } 6611 // match: (LessEqualU (FlagGT_ULT)) 6612 // cond: 6613 // result: (MOVDconst [1]) 6614 for { 6615 v_0 := v.Args[0] 6616 if v_0.Op != OpARM64FlagGT_ULT { 6617 break 6618 } 6619 v.reset(OpARM64MOVDconst) 6620 v.AuxInt = 1 6621 return true 6622 } 6623 // match: (LessEqualU (FlagGT_UGT)) 6624 // cond: 6625 // result: (MOVDconst [0]) 6626 for { 6627 v_0 := v.Args[0] 6628 if v_0.Op != OpARM64FlagGT_UGT { 6629 break 6630 } 6631 v.reset(OpARM64MOVDconst) 6632 v.AuxInt = 0 6633 return true 6634 } 6635 // match: (LessEqualU (InvertFlags x)) 6636 // cond: 6637 // result: (GreaterEqualU x) 6638 for { 6639 v_0 := v.Args[0] 6640 if v_0.Op != OpARM64InvertFlags { 6641 break 6642 } 6643 x := v_0.Args[0] 6644 v.reset(OpARM64GreaterEqualU) 6645 v.AddArg(x) 6646 return true 6647 } 6648 return false 6649 } 6650 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 6651 // match: (LessThan (FlagEQ)) 6652 // cond: 6653 // result: (MOVDconst [0]) 6654 for { 6655 v_0 := v.Args[0] 6656 if v_0.Op != OpARM64FlagEQ { 6657 break 6658 } 6659 v.reset(OpARM64MOVDconst) 6660 v.AuxInt = 0 6661 return true 6662 } 6663 // match: (LessThan (FlagLT_ULT)) 6664 // cond: 6665 // result: (MOVDconst [1]) 6666 for { 6667 v_0 := v.Args[0] 6668 if v_0.Op != OpARM64FlagLT_ULT { 6669 break 6670 } 6671 v.reset(OpARM64MOVDconst) 6672 v.AuxInt = 1 6673 return true 6674 } 6675 // match: (LessThan (FlagLT_UGT)) 6676 // cond: 6677 // result: (MOVDconst [1]) 6678 for { 6679 v_0 := v.Args[0] 6680 if v_0.Op != OpARM64FlagLT_UGT { 6681 break 6682 } 6683 v.reset(OpARM64MOVDconst) 6684 v.AuxInt = 1 6685 return true 6686 } 6687 // match: (LessThan (FlagGT_ULT)) 6688 // cond: 6689 // result: (MOVDconst [0]) 6690 for { 6691 v_0 := v.Args[0] 6692 if v_0.Op != OpARM64FlagGT_ULT { 6693 break 6694 } 6695 v.reset(OpARM64MOVDconst) 6696 v.AuxInt = 0 6697 return true 6698 } 6699 // match: (LessThan (FlagGT_UGT)) 6700 // cond: 6701 // result: (MOVDconst [0]) 6702 for { 6703 v_0 := v.Args[0] 6704 if v_0.Op != OpARM64FlagGT_UGT { 6705 break 6706 } 6707 v.reset(OpARM64MOVDconst) 6708 v.AuxInt = 0 6709 return true 6710 } 6711 // match: (LessThan (InvertFlags x)) 6712 // cond: 6713 // result: (GreaterThan x) 6714 for { 6715 v_0 := v.Args[0] 6716 if v_0.Op != OpARM64InvertFlags { 6717 break 6718 } 6719 x := v_0.Args[0] 6720 v.reset(OpARM64GreaterThan) 6721 v.AddArg(x) 6722 return true 6723 } 6724 return false 6725 } 6726 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 6727 // match: (LessThanU (FlagEQ)) 6728 // cond: 6729 // result: (MOVDconst [0]) 6730 for { 6731 v_0 := v.Args[0] 6732 if v_0.Op != OpARM64FlagEQ { 6733 break 6734 } 6735 v.reset(OpARM64MOVDconst) 6736 v.AuxInt = 0 6737 return true 6738 } 6739 // match: (LessThanU (FlagLT_ULT)) 6740 // cond: 6741 // result: (MOVDconst [1]) 6742 for { 6743 v_0 := v.Args[0] 6744 if v_0.Op != OpARM64FlagLT_ULT { 6745 break 6746 } 6747 v.reset(OpARM64MOVDconst) 6748 v.AuxInt = 1 6749 return true 6750 } 6751 // match: (LessThanU (FlagLT_UGT)) 6752 // cond: 6753 // result: (MOVDconst [0]) 6754 for { 6755 v_0 := v.Args[0] 6756 if v_0.Op != OpARM64FlagLT_UGT { 6757 break 6758 } 6759 v.reset(OpARM64MOVDconst) 6760 v.AuxInt = 0 6761 return true 6762 } 6763 // match: (LessThanU (FlagGT_ULT)) 6764 // cond: 6765 // result: (MOVDconst [1]) 6766 for { 6767 v_0 := v.Args[0] 6768 if v_0.Op != OpARM64FlagGT_ULT { 6769 break 6770 } 6771 v.reset(OpARM64MOVDconst) 6772 v.AuxInt = 1 6773 return true 6774 } 6775 // match: (LessThanU (FlagGT_UGT)) 6776 // cond: 6777 // result: (MOVDconst [0]) 6778 for { 6779 v_0 := v.Args[0] 6780 if v_0.Op != OpARM64FlagGT_UGT { 6781 break 6782 } 6783 v.reset(OpARM64MOVDconst) 6784 v.AuxInt = 0 6785 return true 6786 } 6787 // match: (LessThanU (InvertFlags x)) 6788 // cond: 6789 // result: (GreaterThanU x) 6790 for { 6791 v_0 := v.Args[0] 6792 if v_0.Op != OpARM64InvertFlags { 6793 break 6794 } 6795 x := v_0.Args[0] 6796 v.reset(OpARM64GreaterThanU) 6797 v.AddArg(x) 6798 return true 6799 } 6800 return false 6801 } 6802 func rewriteValueARM64_OpARM64MADD_0(v *Value) bool { 6803 b := v.Block 6804 _ = b 6805 // match: (MADD a x (MOVDconst [-1])) 6806 // cond: 6807 // result: (SUB a x) 6808 for { 6809 _ = v.Args[2] 6810 a := v.Args[0] 6811 x := v.Args[1] 6812 v_2 := v.Args[2] 6813 if v_2.Op != OpARM64MOVDconst { 6814 break 6815 } 6816 if v_2.AuxInt != -1 { 6817 break 6818 } 6819 v.reset(OpARM64SUB) 6820 v.AddArg(a) 6821 v.AddArg(x) 6822 return true 6823 } 6824 // match: (MADD a _ (MOVDconst [0])) 6825 // cond: 6826 // result: a 6827 for { 6828 _ = v.Args[2] 6829 a := v.Args[0] 6830 v_2 := v.Args[2] 6831 if v_2.Op != OpARM64MOVDconst { 6832 break 6833 } 6834 if v_2.AuxInt != 0 { 6835 break 6836 } 6837 v.reset(OpCopy) 6838 v.Type = a.Type 6839 v.AddArg(a) 6840 return true 6841 } 6842 // match: (MADD a x (MOVDconst [1])) 6843 // cond: 6844 // result: (ADD a x) 6845 for { 6846 _ = v.Args[2] 6847 a := v.Args[0] 6848 x := v.Args[1] 6849 v_2 := v.Args[2] 6850 if v_2.Op != OpARM64MOVDconst { 6851 break 6852 } 6853 if v_2.AuxInt != 1 { 6854 break 6855 } 6856 v.reset(OpARM64ADD) 6857 v.AddArg(a) 6858 v.AddArg(x) 6859 return true 6860 } 6861 // match: (MADD a x (MOVDconst [c])) 6862 // cond: isPowerOfTwo(c) 6863 // result: (ADDshiftLL a x [log2(c)]) 6864 for { 6865 _ = v.Args[2] 6866 a := v.Args[0] 6867 x := v.Args[1] 6868 v_2 := v.Args[2] 6869 if v_2.Op != OpARM64MOVDconst { 6870 break 6871 } 6872 c := v_2.AuxInt 6873 if !(isPowerOfTwo(c)) { 6874 break 6875 } 6876 v.reset(OpARM64ADDshiftLL) 6877 v.AuxInt = log2(c) 6878 v.AddArg(a) 6879 v.AddArg(x) 6880 return true 6881 } 6882 // match: (MADD a x (MOVDconst [c])) 6883 // cond: isPowerOfTwo(c-1) && c>=3 6884 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 6885 for { 6886 _ = v.Args[2] 6887 a := v.Args[0] 6888 x := v.Args[1] 6889 v_2 := v.Args[2] 6890 if v_2.Op != OpARM64MOVDconst { 6891 break 6892 } 6893 c := v_2.AuxInt 6894 if !(isPowerOfTwo(c-1) && c >= 3) { 6895 break 6896 } 6897 v.reset(OpARM64ADD) 6898 v.AddArg(a) 6899 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6900 v0.AuxInt = log2(c - 1) 6901 v0.AddArg(x) 6902 v0.AddArg(x) 6903 v.AddArg(v0) 6904 return true 6905 } 6906 // match: (MADD a x (MOVDconst [c])) 6907 // cond: isPowerOfTwo(c+1) && c>=7 6908 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 6909 for { 6910 _ = v.Args[2] 6911 a := v.Args[0] 6912 x := v.Args[1] 6913 v_2 := v.Args[2] 6914 if v_2.Op != OpARM64MOVDconst { 6915 break 6916 } 6917 c := v_2.AuxInt 6918 if !(isPowerOfTwo(c+1) && c >= 7) { 6919 break 6920 } 6921 v.reset(OpARM64SUB) 6922 v.AddArg(a) 6923 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6924 v0.AuxInt = log2(c + 1) 6925 v0.AddArg(x) 6926 v0.AddArg(x) 6927 v.AddArg(v0) 6928 return true 6929 } 6930 // match: (MADD a x (MOVDconst [c])) 6931 // cond: c%3 == 0 && isPowerOfTwo(c/3) 6932 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 6933 for { 6934 _ = v.Args[2] 6935 a := v.Args[0] 6936 x := v.Args[1] 6937 v_2 := v.Args[2] 6938 if v_2.Op != OpARM64MOVDconst { 6939 break 6940 } 6941 c := v_2.AuxInt 6942 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 6943 break 6944 } 6945 v.reset(OpARM64SUBshiftLL) 6946 v.AuxInt = log2(c / 3) 6947 v.AddArg(a) 6948 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6949 v0.AuxInt = 2 6950 v0.AddArg(x) 6951 v0.AddArg(x) 6952 v.AddArg(v0) 6953 return true 6954 } 6955 // match: (MADD a x (MOVDconst [c])) 6956 // cond: c%5 == 0 && isPowerOfTwo(c/5) 6957 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 6958 for { 6959 _ = v.Args[2] 6960 a := v.Args[0] 6961 x := v.Args[1] 6962 v_2 := v.Args[2] 6963 if v_2.Op != OpARM64MOVDconst { 6964 break 6965 } 6966 c := v_2.AuxInt 6967 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 6968 break 6969 } 6970 v.reset(OpARM64ADDshiftLL) 6971 v.AuxInt = log2(c / 5) 6972 v.AddArg(a) 6973 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6974 v0.AuxInt = 2 6975 v0.AddArg(x) 6976 v0.AddArg(x) 6977 v.AddArg(v0) 6978 return true 6979 } 6980 // match: (MADD a x (MOVDconst [c])) 6981 // cond: c%7 == 0 && isPowerOfTwo(c/7) 6982 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 6983 for { 6984 _ = v.Args[2] 6985 a := v.Args[0] 6986 x := v.Args[1] 6987 v_2 := v.Args[2] 6988 if v_2.Op != OpARM64MOVDconst { 6989 break 6990 } 6991 c := v_2.AuxInt 6992 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 6993 break 6994 } 6995 v.reset(OpARM64SUBshiftLL) 6996 v.AuxInt = log2(c / 7) 6997 v.AddArg(a) 6998 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6999 v0.AuxInt = 3 7000 v0.AddArg(x) 7001 v0.AddArg(x) 7002 v.AddArg(v0) 7003 return true 7004 } 7005 // match: (MADD a x (MOVDconst [c])) 7006 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7007 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7008 for { 7009 _ = v.Args[2] 7010 a := v.Args[0] 7011 x := v.Args[1] 7012 v_2 := v.Args[2] 7013 if v_2.Op != OpARM64MOVDconst { 7014 break 7015 } 7016 c := v_2.AuxInt 7017 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7018 break 7019 } 7020 v.reset(OpARM64ADDshiftLL) 7021 v.AuxInt = log2(c / 9) 7022 v.AddArg(a) 7023 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7024 v0.AuxInt = 3 7025 v0.AddArg(x) 7026 v0.AddArg(x) 7027 v.AddArg(v0) 7028 return true 7029 } 7030 return false 7031 } 7032 func rewriteValueARM64_OpARM64MADD_10(v *Value) bool { 7033 b := v.Block 7034 _ = b 7035 // match: (MADD a (MOVDconst [-1]) x) 7036 // cond: 7037 // result: (SUB a x) 7038 for { 7039 _ = v.Args[2] 7040 a := v.Args[0] 7041 v_1 := v.Args[1] 7042 if v_1.Op != OpARM64MOVDconst { 7043 break 7044 } 7045 if v_1.AuxInt != -1 { 7046 break 7047 } 7048 x := v.Args[2] 7049 v.reset(OpARM64SUB) 7050 v.AddArg(a) 7051 v.AddArg(x) 7052 return true 7053 } 7054 // match: (MADD a (MOVDconst [0]) _) 7055 // cond: 7056 // result: a 7057 for { 7058 _ = v.Args[2] 7059 a := v.Args[0] 7060 v_1 := v.Args[1] 7061 if v_1.Op != OpARM64MOVDconst { 7062 break 7063 } 7064 if v_1.AuxInt != 0 { 7065 break 7066 } 7067 v.reset(OpCopy) 7068 v.Type = a.Type 7069 v.AddArg(a) 7070 return true 7071 } 7072 // match: (MADD a (MOVDconst [1]) x) 7073 // cond: 7074 // result: (ADD a x) 7075 for { 7076 _ = v.Args[2] 7077 a := v.Args[0] 7078 v_1 := v.Args[1] 7079 if v_1.Op != OpARM64MOVDconst { 7080 break 7081 } 7082 if v_1.AuxInt != 1 { 7083 break 7084 } 7085 x := v.Args[2] 7086 v.reset(OpARM64ADD) 7087 v.AddArg(a) 7088 v.AddArg(x) 7089 return true 7090 } 7091 // match: (MADD a (MOVDconst [c]) x) 7092 // cond: isPowerOfTwo(c) 7093 // result: (ADDshiftLL a x [log2(c)]) 7094 for { 7095 _ = v.Args[2] 7096 a := v.Args[0] 7097 v_1 := v.Args[1] 7098 if v_1.Op != OpARM64MOVDconst { 7099 break 7100 } 7101 c := v_1.AuxInt 7102 x := v.Args[2] 7103 if !(isPowerOfTwo(c)) { 7104 break 7105 } 7106 v.reset(OpARM64ADDshiftLL) 7107 v.AuxInt = log2(c) 7108 v.AddArg(a) 7109 v.AddArg(x) 7110 return true 7111 } 7112 // match: (MADD a (MOVDconst [c]) x) 7113 // cond: isPowerOfTwo(c-1) && c>=3 7114 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7115 for { 7116 _ = v.Args[2] 7117 a := v.Args[0] 7118 v_1 := v.Args[1] 7119 if v_1.Op != OpARM64MOVDconst { 7120 break 7121 } 7122 c := v_1.AuxInt 7123 x := v.Args[2] 7124 if !(isPowerOfTwo(c-1) && c >= 3) { 7125 break 7126 } 7127 v.reset(OpARM64ADD) 7128 v.AddArg(a) 7129 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7130 v0.AuxInt = log2(c - 1) 7131 v0.AddArg(x) 7132 v0.AddArg(x) 7133 v.AddArg(v0) 7134 return true 7135 } 7136 // match: (MADD a (MOVDconst [c]) x) 7137 // cond: isPowerOfTwo(c+1) && c>=7 7138 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7139 for { 7140 _ = v.Args[2] 7141 a := v.Args[0] 7142 v_1 := v.Args[1] 7143 if v_1.Op != OpARM64MOVDconst { 7144 break 7145 } 7146 c := v_1.AuxInt 7147 x := v.Args[2] 7148 if !(isPowerOfTwo(c+1) && c >= 7) { 7149 break 7150 } 7151 v.reset(OpARM64SUB) 7152 v.AddArg(a) 7153 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7154 v0.AuxInt = log2(c + 1) 7155 v0.AddArg(x) 7156 v0.AddArg(x) 7157 v.AddArg(v0) 7158 return true 7159 } 7160 // match: (MADD a (MOVDconst [c]) x) 7161 // cond: c%3 == 0 && isPowerOfTwo(c/3) 7162 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7163 for { 7164 _ = v.Args[2] 7165 a := v.Args[0] 7166 v_1 := v.Args[1] 7167 if v_1.Op != OpARM64MOVDconst { 7168 break 7169 } 7170 c := v_1.AuxInt 7171 x := v.Args[2] 7172 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 7173 break 7174 } 7175 v.reset(OpARM64SUBshiftLL) 7176 v.AuxInt = log2(c / 3) 7177 v.AddArg(a) 7178 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7179 v0.AuxInt = 2 7180 v0.AddArg(x) 7181 v0.AddArg(x) 7182 v.AddArg(v0) 7183 return true 7184 } 7185 // match: (MADD a (MOVDconst [c]) x) 7186 // cond: c%5 == 0 && isPowerOfTwo(c/5) 7187 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7188 for { 7189 _ = v.Args[2] 7190 a := v.Args[0] 7191 v_1 := v.Args[1] 7192 if v_1.Op != OpARM64MOVDconst { 7193 break 7194 } 7195 c := v_1.AuxInt 7196 x := v.Args[2] 7197 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 7198 break 7199 } 7200 v.reset(OpARM64ADDshiftLL) 7201 v.AuxInt = log2(c / 5) 7202 v.AddArg(a) 7203 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7204 v0.AuxInt = 2 7205 v0.AddArg(x) 7206 v0.AddArg(x) 7207 v.AddArg(v0) 7208 return true 7209 } 7210 // match: (MADD a (MOVDconst [c]) x) 7211 // cond: c%7 == 0 && isPowerOfTwo(c/7) 7212 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7213 for { 7214 _ = v.Args[2] 7215 a := v.Args[0] 7216 v_1 := v.Args[1] 7217 if v_1.Op != OpARM64MOVDconst { 7218 break 7219 } 7220 c := v_1.AuxInt 7221 x := v.Args[2] 7222 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 7223 break 7224 } 7225 v.reset(OpARM64SUBshiftLL) 7226 v.AuxInt = log2(c / 7) 7227 v.AddArg(a) 7228 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7229 v0.AuxInt = 3 7230 v0.AddArg(x) 7231 v0.AddArg(x) 7232 v.AddArg(v0) 7233 return true 7234 } 7235 // match: (MADD a (MOVDconst [c]) x) 7236 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7237 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7238 for { 7239 _ = v.Args[2] 7240 a := v.Args[0] 7241 v_1 := v.Args[1] 7242 if v_1.Op != OpARM64MOVDconst { 7243 break 7244 } 7245 c := v_1.AuxInt 7246 x := v.Args[2] 7247 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7248 break 7249 } 7250 v.reset(OpARM64ADDshiftLL) 7251 v.AuxInt = log2(c / 9) 7252 v.AddArg(a) 7253 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7254 v0.AuxInt = 3 7255 v0.AddArg(x) 7256 v0.AddArg(x) 7257 v.AddArg(v0) 7258 return true 7259 } 7260 return false 7261 } 7262 func rewriteValueARM64_OpARM64MADD_20(v *Value) bool { 7263 b := v.Block 7264 _ = b 7265 // match: (MADD (MOVDconst [c]) x y) 7266 // cond: 7267 // result: (ADDconst [c] (MUL <x.Type> x y)) 7268 for { 7269 _ = v.Args[2] 7270 v_0 := v.Args[0] 7271 if v_0.Op != OpARM64MOVDconst { 7272 break 7273 } 7274 c := v_0.AuxInt 7275 x := v.Args[1] 7276 y := v.Args[2] 7277 v.reset(OpARM64ADDconst) 7278 v.AuxInt = c 7279 v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 7280 v0.AddArg(x) 7281 v0.AddArg(y) 7282 v.AddArg(v0) 7283 return true 7284 } 7285 // match: (MADD a (MOVDconst [c]) (MOVDconst [d])) 7286 // cond: 7287 // result: (ADDconst [c*d] a) 7288 for { 7289 _ = v.Args[2] 7290 a := v.Args[0] 7291 v_1 := v.Args[1] 7292 if v_1.Op != OpARM64MOVDconst { 7293 break 7294 } 7295 c := v_1.AuxInt 7296 v_2 := v.Args[2] 7297 if v_2.Op != OpARM64MOVDconst { 7298 break 7299 } 7300 d := v_2.AuxInt 7301 v.reset(OpARM64ADDconst) 7302 v.AuxInt = c * d 7303 v.AddArg(a) 7304 return true 7305 } 7306 return false 7307 } 7308 func rewriteValueARM64_OpARM64MADDW_0(v *Value) bool { 7309 b := v.Block 7310 _ = b 7311 // match: (MADDW a x (MOVDconst [c])) 7312 // cond: int32(c)==-1 7313 // result: (SUB a x) 7314 for { 7315 _ = v.Args[2] 7316 a := v.Args[0] 7317 x := v.Args[1] 7318 v_2 := v.Args[2] 7319 if v_2.Op != OpARM64MOVDconst { 7320 break 7321 } 7322 c := v_2.AuxInt 7323 if !(int32(c) == -1) { 7324 break 7325 } 7326 v.reset(OpARM64SUB) 7327 v.AddArg(a) 7328 v.AddArg(x) 7329 return true 7330 } 7331 // match: (MADDW a _ (MOVDconst [c])) 7332 // cond: int32(c)==0 7333 // result: a 7334 for { 7335 _ = v.Args[2] 7336 a := v.Args[0] 7337 v_2 := v.Args[2] 7338 if v_2.Op != OpARM64MOVDconst { 7339 break 7340 } 7341 c := v_2.AuxInt 7342 if !(int32(c) == 0) { 7343 break 7344 } 7345 v.reset(OpCopy) 7346 v.Type = a.Type 7347 v.AddArg(a) 7348 return true 7349 } 7350 // match: (MADDW a x (MOVDconst [c])) 7351 // cond: int32(c)==1 7352 // result: (ADD a x) 7353 for { 7354 _ = v.Args[2] 7355 a := v.Args[0] 7356 x := v.Args[1] 7357 v_2 := v.Args[2] 7358 if v_2.Op != OpARM64MOVDconst { 7359 break 7360 } 7361 c := v_2.AuxInt 7362 if !(int32(c) == 1) { 7363 break 7364 } 7365 v.reset(OpARM64ADD) 7366 v.AddArg(a) 7367 v.AddArg(x) 7368 return true 7369 } 7370 // match: (MADDW a x (MOVDconst [c])) 7371 // cond: isPowerOfTwo(c) 7372 // result: (ADDshiftLL a x [log2(c)]) 7373 for { 7374 _ = v.Args[2] 7375 a := v.Args[0] 7376 x := v.Args[1] 7377 v_2 := v.Args[2] 7378 if v_2.Op != OpARM64MOVDconst { 7379 break 7380 } 7381 c := v_2.AuxInt 7382 if !(isPowerOfTwo(c)) { 7383 break 7384 } 7385 v.reset(OpARM64ADDshiftLL) 7386 v.AuxInt = log2(c) 7387 v.AddArg(a) 7388 v.AddArg(x) 7389 return true 7390 } 7391 // match: (MADDW a x (MOVDconst [c])) 7392 // cond: isPowerOfTwo(c-1) && int32(c)>=3 7393 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7394 for { 7395 _ = v.Args[2] 7396 a := v.Args[0] 7397 x := v.Args[1] 7398 v_2 := v.Args[2] 7399 if v_2.Op != OpARM64MOVDconst { 7400 break 7401 } 7402 c := v_2.AuxInt 7403 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7404 break 7405 } 7406 v.reset(OpARM64ADD) 7407 v.AddArg(a) 7408 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7409 v0.AuxInt = log2(c - 1) 7410 v0.AddArg(x) 7411 v0.AddArg(x) 7412 v.AddArg(v0) 7413 return true 7414 } 7415 // match: (MADDW a x (MOVDconst [c])) 7416 // cond: isPowerOfTwo(c+1) && int32(c)>=7 7417 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7418 for { 7419 _ = v.Args[2] 7420 a := v.Args[0] 7421 x := v.Args[1] 7422 v_2 := v.Args[2] 7423 if v_2.Op != OpARM64MOVDconst { 7424 break 7425 } 7426 c := v_2.AuxInt 7427 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7428 break 7429 } 7430 v.reset(OpARM64SUB) 7431 v.AddArg(a) 7432 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7433 v0.AuxInt = log2(c + 1) 7434 v0.AddArg(x) 7435 v0.AddArg(x) 7436 v.AddArg(v0) 7437 return true 7438 } 7439 // match: (MADDW a x (MOVDconst [c])) 7440 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7441 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7442 for { 7443 _ = v.Args[2] 7444 a := v.Args[0] 7445 x := v.Args[1] 7446 v_2 := v.Args[2] 7447 if v_2.Op != OpARM64MOVDconst { 7448 break 7449 } 7450 c := v_2.AuxInt 7451 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7452 break 7453 } 7454 v.reset(OpARM64SUBshiftLL) 7455 v.AuxInt = log2(c / 3) 7456 v.AddArg(a) 7457 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7458 v0.AuxInt = 2 7459 v0.AddArg(x) 7460 v0.AddArg(x) 7461 v.AddArg(v0) 7462 return true 7463 } 7464 // match: (MADDW a x (MOVDconst [c])) 7465 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7466 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7467 for { 7468 _ = v.Args[2] 7469 a := v.Args[0] 7470 x := v.Args[1] 7471 v_2 := v.Args[2] 7472 if v_2.Op != OpARM64MOVDconst { 7473 break 7474 } 7475 c := v_2.AuxInt 7476 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7477 break 7478 } 7479 v.reset(OpARM64ADDshiftLL) 7480 v.AuxInt = log2(c / 5) 7481 v.AddArg(a) 7482 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7483 v0.AuxInt = 2 7484 v0.AddArg(x) 7485 v0.AddArg(x) 7486 v.AddArg(v0) 7487 return true 7488 } 7489 // match: (MADDW a x (MOVDconst [c])) 7490 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 7491 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7492 for { 7493 _ = v.Args[2] 7494 a := v.Args[0] 7495 x := v.Args[1] 7496 v_2 := v.Args[2] 7497 if v_2.Op != OpARM64MOVDconst { 7498 break 7499 } 7500 c := v_2.AuxInt 7501 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 7502 break 7503 } 7504 v.reset(OpARM64SUBshiftLL) 7505 v.AuxInt = log2(c / 7) 7506 v.AddArg(a) 7507 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7508 v0.AuxInt = 3 7509 v0.AddArg(x) 7510 v0.AddArg(x) 7511 v.AddArg(v0) 7512 return true 7513 } 7514 // match: (MADDW a x (MOVDconst [c])) 7515 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 7516 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7517 for { 7518 _ = v.Args[2] 7519 a := v.Args[0] 7520 x := v.Args[1] 7521 v_2 := v.Args[2] 7522 if v_2.Op != OpARM64MOVDconst { 7523 break 7524 } 7525 c := v_2.AuxInt 7526 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 7527 break 7528 } 7529 v.reset(OpARM64ADDshiftLL) 7530 v.AuxInt = log2(c / 9) 7531 v.AddArg(a) 7532 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7533 v0.AuxInt = 3 7534 v0.AddArg(x) 7535 v0.AddArg(x) 7536 v.AddArg(v0) 7537 return true 7538 } 7539 return false 7540 } 7541 func rewriteValueARM64_OpARM64MADDW_10(v *Value) bool { 7542 b := v.Block 7543 _ = b 7544 // match: (MADDW a (MOVDconst [c]) x) 7545 // cond: int32(c)==-1 7546 // result: (SUB a x) 7547 for { 7548 _ = v.Args[2] 7549 a := v.Args[0] 7550 v_1 := v.Args[1] 7551 if v_1.Op != OpARM64MOVDconst { 7552 break 7553 } 7554 c := v_1.AuxInt 7555 x := v.Args[2] 7556 if !(int32(c) == -1) { 7557 break 7558 } 7559 v.reset(OpARM64SUB) 7560 v.AddArg(a) 7561 v.AddArg(x) 7562 return true 7563 } 7564 // match: (MADDW a (MOVDconst [c]) _) 7565 // cond: int32(c)==0 7566 // result: a 7567 for { 7568 _ = v.Args[2] 7569 a := v.Args[0] 7570 v_1 := v.Args[1] 7571 if v_1.Op != OpARM64MOVDconst { 7572 break 7573 } 7574 c := v_1.AuxInt 7575 if !(int32(c) == 0) { 7576 break 7577 } 7578 v.reset(OpCopy) 7579 v.Type = a.Type 7580 v.AddArg(a) 7581 return true 7582 } 7583 // match: (MADDW a (MOVDconst [c]) x) 7584 // cond: int32(c)==1 7585 // result: (ADD a x) 7586 for { 7587 _ = v.Args[2] 7588 a := v.Args[0] 7589 v_1 := v.Args[1] 7590 if v_1.Op != OpARM64MOVDconst { 7591 break 7592 } 7593 c := v_1.AuxInt 7594 x := v.Args[2] 7595 if !(int32(c) == 1) { 7596 break 7597 } 7598 v.reset(OpARM64ADD) 7599 v.AddArg(a) 7600 v.AddArg(x) 7601 return true 7602 } 7603 // match: (MADDW a (MOVDconst [c]) x) 7604 // cond: isPowerOfTwo(c) 7605 // result: (ADDshiftLL a x [log2(c)]) 7606 for { 7607 _ = v.Args[2] 7608 a := v.Args[0] 7609 v_1 := v.Args[1] 7610 if v_1.Op != OpARM64MOVDconst { 7611 break 7612 } 7613 c := v_1.AuxInt 7614 x := v.Args[2] 7615 if !(isPowerOfTwo(c)) { 7616 break 7617 } 7618 v.reset(OpARM64ADDshiftLL) 7619 v.AuxInt = log2(c) 7620 v.AddArg(a) 7621 v.AddArg(x) 7622 return true 7623 } 7624 // match: (MADDW a (MOVDconst [c]) x) 7625 // cond: isPowerOfTwo(c-1) && int32(c)>=3 7626 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7627 for { 7628 _ = v.Args[2] 7629 a := v.Args[0] 7630 v_1 := v.Args[1] 7631 if v_1.Op != OpARM64MOVDconst { 7632 break 7633 } 7634 c := v_1.AuxInt 7635 x := v.Args[2] 7636 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7637 break 7638 } 7639 v.reset(OpARM64ADD) 7640 v.AddArg(a) 7641 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7642 v0.AuxInt = log2(c - 1) 7643 v0.AddArg(x) 7644 v0.AddArg(x) 7645 v.AddArg(v0) 7646 return true 7647 } 7648 // match: (MADDW a (MOVDconst [c]) x) 7649 // cond: isPowerOfTwo(c+1) && int32(c)>=7 7650 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7651 for { 7652 _ = v.Args[2] 7653 a := v.Args[0] 7654 v_1 := v.Args[1] 7655 if v_1.Op != OpARM64MOVDconst { 7656 break 7657 } 7658 c := v_1.AuxInt 7659 x := v.Args[2] 7660 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7661 break 7662 } 7663 v.reset(OpARM64SUB) 7664 v.AddArg(a) 7665 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7666 v0.AuxInt = log2(c + 1) 7667 v0.AddArg(x) 7668 v0.AddArg(x) 7669 v.AddArg(v0) 7670 return true 7671 } 7672 // match: (MADDW a (MOVDconst [c]) x) 7673 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7674 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7675 for { 7676 _ = v.Args[2] 7677 a := v.Args[0] 7678 v_1 := v.Args[1] 7679 if v_1.Op != OpARM64MOVDconst { 7680 break 7681 } 7682 c := v_1.AuxInt 7683 x := v.Args[2] 7684 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7685 break 7686 } 7687 v.reset(OpARM64SUBshiftLL) 7688 v.AuxInt = log2(c / 3) 7689 v.AddArg(a) 7690 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7691 v0.AuxInt = 2 7692 v0.AddArg(x) 7693 v0.AddArg(x) 7694 v.AddArg(v0) 7695 return true 7696 } 7697 // match: (MADDW a (MOVDconst [c]) x) 7698 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7699 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7700 for { 7701 _ = v.Args[2] 7702 a := v.Args[0] 7703 v_1 := v.Args[1] 7704 if v_1.Op != OpARM64MOVDconst { 7705 break 7706 } 7707 c := v_1.AuxInt 7708 x := v.Args[2] 7709 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7710 break 7711 } 7712 v.reset(OpARM64ADDshiftLL) 7713 v.AuxInt = log2(c / 5) 7714 v.AddArg(a) 7715 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7716 v0.AuxInt = 2 7717 v0.AddArg(x) 7718 v0.AddArg(x) 7719 v.AddArg(v0) 7720 return true 7721 } 7722 // match: (MADDW a (MOVDconst [c]) x) 7723 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 7724 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7725 for { 7726 _ = v.Args[2] 7727 a := v.Args[0] 7728 v_1 := v.Args[1] 7729 if v_1.Op != OpARM64MOVDconst { 7730 break 7731 } 7732 c := v_1.AuxInt 7733 x := v.Args[2] 7734 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 7735 break 7736 } 7737 v.reset(OpARM64SUBshiftLL) 7738 v.AuxInt = log2(c / 7) 7739 v.AddArg(a) 7740 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7741 v0.AuxInt = 3 7742 v0.AddArg(x) 7743 v0.AddArg(x) 7744 v.AddArg(v0) 7745 return true 7746 } 7747 // match: (MADDW a (MOVDconst [c]) x) 7748 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 7749 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7750 for { 7751 _ = v.Args[2] 7752 a := v.Args[0] 7753 v_1 := v.Args[1] 7754 if v_1.Op != OpARM64MOVDconst { 7755 break 7756 } 7757 c := v_1.AuxInt 7758 x := v.Args[2] 7759 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 7760 break 7761 } 7762 v.reset(OpARM64ADDshiftLL) 7763 v.AuxInt = log2(c / 9) 7764 v.AddArg(a) 7765 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7766 v0.AuxInt = 3 7767 v0.AddArg(x) 7768 v0.AddArg(x) 7769 v.AddArg(v0) 7770 return true 7771 } 7772 return false 7773 } 7774 func rewriteValueARM64_OpARM64MADDW_20(v *Value) bool { 7775 b := v.Block 7776 _ = b 7777 // match: (MADDW (MOVDconst [c]) x y) 7778 // cond: 7779 // result: (ADDconst [c] (MULW <x.Type> x y)) 7780 for { 7781 _ = v.Args[2] 7782 v_0 := v.Args[0] 7783 if v_0.Op != OpARM64MOVDconst { 7784 break 7785 } 7786 c := v_0.AuxInt 7787 x := v.Args[1] 7788 y := v.Args[2] 7789 v.reset(OpARM64ADDconst) 7790 v.AuxInt = c 7791 v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 7792 v0.AddArg(x) 7793 v0.AddArg(y) 7794 v.AddArg(v0) 7795 return true 7796 } 7797 // match: (MADDW a (MOVDconst [c]) (MOVDconst [d])) 7798 // cond: 7799 // result: (ADDconst [int64(int32(c)*int32(d))] a) 7800 for { 7801 _ = v.Args[2] 7802 a := v.Args[0] 7803 v_1 := v.Args[1] 7804 if v_1.Op != OpARM64MOVDconst { 7805 break 7806 } 7807 c := v_1.AuxInt 7808 v_2 := v.Args[2] 7809 if v_2.Op != OpARM64MOVDconst { 7810 break 7811 } 7812 d := v_2.AuxInt 7813 v.reset(OpARM64ADDconst) 7814 v.AuxInt = int64(int32(c) * int32(d)) 7815 v.AddArg(a) 7816 return true 7817 } 7818 return false 7819 } 7820 func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool { 7821 b := v.Block 7822 _ = b 7823 // match: (MNEG x (MOVDconst [-1])) 7824 // cond: 7825 // result: x 7826 for { 7827 _ = v.Args[1] 7828 x := v.Args[0] 7829 v_1 := v.Args[1] 7830 if v_1.Op != OpARM64MOVDconst { 7831 break 7832 } 7833 if v_1.AuxInt != -1 { 7834 break 7835 } 7836 v.reset(OpCopy) 7837 v.Type = x.Type 7838 v.AddArg(x) 7839 return true 7840 } 7841 // match: (MNEG (MOVDconst [-1]) x) 7842 // cond: 7843 // result: x 7844 for { 7845 _ = v.Args[1] 7846 v_0 := v.Args[0] 7847 if v_0.Op != OpARM64MOVDconst { 7848 break 7849 } 7850 if v_0.AuxInt != -1 { 7851 break 7852 } 7853 x := v.Args[1] 7854 v.reset(OpCopy) 7855 v.Type = x.Type 7856 v.AddArg(x) 7857 return true 7858 } 7859 // match: (MNEG _ (MOVDconst [0])) 7860 // cond: 7861 // result: (MOVDconst [0]) 7862 for { 7863 _ = v.Args[1] 7864 v_1 := v.Args[1] 7865 if v_1.Op != OpARM64MOVDconst { 7866 break 7867 } 7868 if v_1.AuxInt != 0 { 7869 break 7870 } 7871 v.reset(OpARM64MOVDconst) 7872 v.AuxInt = 0 7873 return true 7874 } 7875 // match: (MNEG (MOVDconst [0]) _) 7876 // cond: 7877 // result: (MOVDconst [0]) 7878 for { 7879 _ = v.Args[1] 7880 v_0 := v.Args[0] 7881 if v_0.Op != OpARM64MOVDconst { 7882 break 7883 } 7884 if v_0.AuxInt != 0 { 7885 break 7886 } 7887 v.reset(OpARM64MOVDconst) 7888 v.AuxInt = 0 7889 return true 7890 } 7891 // match: (MNEG x (MOVDconst [1])) 7892 // cond: 7893 // result: (NEG x) 7894 for { 7895 _ = v.Args[1] 7896 x := v.Args[0] 7897 v_1 := v.Args[1] 7898 if v_1.Op != OpARM64MOVDconst { 7899 break 7900 } 7901 if v_1.AuxInt != 1 { 7902 break 7903 } 7904 v.reset(OpARM64NEG) 7905 v.AddArg(x) 7906 return true 7907 } 7908 // match: (MNEG (MOVDconst [1]) x) 7909 // cond: 7910 // result: (NEG x) 7911 for { 7912 _ = v.Args[1] 7913 v_0 := v.Args[0] 7914 if v_0.Op != OpARM64MOVDconst { 7915 break 7916 } 7917 if v_0.AuxInt != 1 { 7918 break 7919 } 7920 x := v.Args[1] 7921 v.reset(OpARM64NEG) 7922 v.AddArg(x) 7923 return true 7924 } 7925 // match: (MNEG x (MOVDconst [c])) 7926 // cond: isPowerOfTwo(c) 7927 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7928 for { 7929 _ = v.Args[1] 7930 x := v.Args[0] 7931 v_1 := v.Args[1] 7932 if v_1.Op != OpARM64MOVDconst { 7933 break 7934 } 7935 c := v_1.AuxInt 7936 if !(isPowerOfTwo(c)) { 7937 break 7938 } 7939 v.reset(OpARM64NEG) 7940 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7941 v0.AuxInt = log2(c) 7942 v0.AddArg(x) 7943 v.AddArg(v0) 7944 return true 7945 } 7946 // match: (MNEG (MOVDconst [c]) x) 7947 // cond: isPowerOfTwo(c) 7948 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7949 for { 7950 _ = v.Args[1] 7951 v_0 := v.Args[0] 7952 if v_0.Op != OpARM64MOVDconst { 7953 break 7954 } 7955 c := v_0.AuxInt 7956 x := v.Args[1] 7957 if !(isPowerOfTwo(c)) { 7958 break 7959 } 7960 v.reset(OpARM64NEG) 7961 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7962 v0.AuxInt = log2(c) 7963 v0.AddArg(x) 7964 v.AddArg(v0) 7965 return true 7966 } 7967 // match: (MNEG x (MOVDconst [c])) 7968 // cond: isPowerOfTwo(c-1) && c >= 3 7969 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7970 for { 7971 _ = v.Args[1] 7972 x := v.Args[0] 7973 v_1 := v.Args[1] 7974 if v_1.Op != OpARM64MOVDconst { 7975 break 7976 } 7977 c := v_1.AuxInt 7978 if !(isPowerOfTwo(c-1) && c >= 3) { 7979 break 7980 } 7981 v.reset(OpARM64NEG) 7982 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7983 v0.AuxInt = log2(c - 1) 7984 v0.AddArg(x) 7985 v0.AddArg(x) 7986 v.AddArg(v0) 7987 return true 7988 } 7989 // match: (MNEG (MOVDconst [c]) x) 7990 // cond: isPowerOfTwo(c-1) && c >= 3 7991 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7992 for { 7993 _ = v.Args[1] 7994 v_0 := v.Args[0] 7995 if v_0.Op != OpARM64MOVDconst { 7996 break 7997 } 7998 c := v_0.AuxInt 7999 x := v.Args[1] 8000 if !(isPowerOfTwo(c-1) && c >= 3) { 8001 break 8002 } 8003 v.reset(OpARM64NEG) 8004 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8005 v0.AuxInt = log2(c - 1) 8006 v0.AddArg(x) 8007 v0.AddArg(x) 8008 v.AddArg(v0) 8009 return true 8010 } 8011 return false 8012 } 8013 func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool { 8014 b := v.Block 8015 _ = b 8016 // match: (MNEG x (MOVDconst [c])) 8017 // cond: isPowerOfTwo(c+1) && c >= 7 8018 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8019 for { 8020 _ = v.Args[1] 8021 x := v.Args[0] 8022 v_1 := v.Args[1] 8023 if v_1.Op != OpARM64MOVDconst { 8024 break 8025 } 8026 c := v_1.AuxInt 8027 if !(isPowerOfTwo(c+1) && c >= 7) { 8028 break 8029 } 8030 v.reset(OpARM64NEG) 8031 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8032 v0.AuxInt = log2(c + 1) 8033 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8034 v1.AddArg(x) 8035 v0.AddArg(v1) 8036 v0.AddArg(x) 8037 v.AddArg(v0) 8038 return true 8039 } 8040 // match: (MNEG (MOVDconst [c]) x) 8041 // cond: isPowerOfTwo(c+1) && c >= 7 8042 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8043 for { 8044 _ = v.Args[1] 8045 v_0 := v.Args[0] 8046 if v_0.Op != OpARM64MOVDconst { 8047 break 8048 } 8049 c := v_0.AuxInt 8050 x := v.Args[1] 8051 if !(isPowerOfTwo(c+1) && c >= 7) { 8052 break 8053 } 8054 v.reset(OpARM64NEG) 8055 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8056 v0.AuxInt = log2(c + 1) 8057 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8058 v1.AddArg(x) 8059 v0.AddArg(v1) 8060 v0.AddArg(x) 8061 v.AddArg(v0) 8062 return true 8063 } 8064 // match: (MNEG x (MOVDconst [c])) 8065 // cond: c%3 == 0 && isPowerOfTwo(c/3) 8066 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8067 for { 8068 _ = v.Args[1] 8069 x := v.Args[0] 8070 v_1 := v.Args[1] 8071 if v_1.Op != OpARM64MOVDconst { 8072 break 8073 } 8074 c := v_1.AuxInt 8075 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 8076 break 8077 } 8078 v.reset(OpARM64SLLconst) 8079 v.Type = x.Type 8080 v.AuxInt = log2(c / 3) 8081 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8082 v0.AuxInt = 2 8083 v0.AddArg(x) 8084 v0.AddArg(x) 8085 v.AddArg(v0) 8086 return true 8087 } 8088 // match: (MNEG (MOVDconst [c]) x) 8089 // cond: c%3 == 0 && isPowerOfTwo(c/3) 8090 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8091 for { 8092 _ = v.Args[1] 8093 v_0 := v.Args[0] 8094 if v_0.Op != OpARM64MOVDconst { 8095 break 8096 } 8097 c := v_0.AuxInt 8098 x := v.Args[1] 8099 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 8100 break 8101 } 8102 v.reset(OpARM64SLLconst) 8103 v.Type = x.Type 8104 v.AuxInt = log2(c / 3) 8105 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8106 v0.AuxInt = 2 8107 v0.AddArg(x) 8108 v0.AddArg(x) 8109 v.AddArg(v0) 8110 return true 8111 } 8112 // match: (MNEG x (MOVDconst [c])) 8113 // cond: c%5 == 0 && isPowerOfTwo(c/5) 8114 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8115 for { 8116 _ = v.Args[1] 8117 x := v.Args[0] 8118 v_1 := v.Args[1] 8119 if v_1.Op != OpARM64MOVDconst { 8120 break 8121 } 8122 c := v_1.AuxInt 8123 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 8124 break 8125 } 8126 v.reset(OpARM64NEG) 8127 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8128 v0.AuxInt = log2(c / 5) 8129 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8130 v1.AuxInt = 2 8131 v1.AddArg(x) 8132 v1.AddArg(x) 8133 v0.AddArg(v1) 8134 v.AddArg(v0) 8135 return true 8136 } 8137 // match: (MNEG (MOVDconst [c]) x) 8138 // cond: c%5 == 0 && isPowerOfTwo(c/5) 8139 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8140 for { 8141 _ = v.Args[1] 8142 v_0 := v.Args[0] 8143 if v_0.Op != OpARM64MOVDconst { 8144 break 8145 } 8146 c := v_0.AuxInt 8147 x := v.Args[1] 8148 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 8149 break 8150 } 8151 v.reset(OpARM64NEG) 8152 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8153 v0.AuxInt = log2(c / 5) 8154 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8155 v1.AuxInt = 2 8156 v1.AddArg(x) 8157 v1.AddArg(x) 8158 v0.AddArg(v1) 8159 v.AddArg(v0) 8160 return true 8161 } 8162 // match: (MNEG x (MOVDconst [c])) 8163 // cond: c%7 == 0 && isPowerOfTwo(c/7) 8164 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8165 for { 8166 _ = v.Args[1] 8167 x := v.Args[0] 8168 v_1 := v.Args[1] 8169 if v_1.Op != OpARM64MOVDconst { 8170 break 8171 } 8172 c := v_1.AuxInt 8173 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 8174 break 8175 } 8176 v.reset(OpARM64SLLconst) 8177 v.Type = x.Type 8178 v.AuxInt = log2(c / 7) 8179 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8180 v0.AuxInt = 3 8181 v0.AddArg(x) 8182 v0.AddArg(x) 8183 v.AddArg(v0) 8184 return true 8185 } 8186 // match: (MNEG (MOVDconst [c]) x) 8187 // cond: c%7 == 0 && isPowerOfTwo(c/7) 8188 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8189 for { 8190 _ = v.Args[1] 8191 v_0 := v.Args[0] 8192 if v_0.Op != OpARM64MOVDconst { 8193 break 8194 } 8195 c := v_0.AuxInt 8196 x := v.Args[1] 8197 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 8198 break 8199 } 8200 v.reset(OpARM64SLLconst) 8201 v.Type = x.Type 8202 v.AuxInt = log2(c / 7) 8203 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8204 v0.AuxInt = 3 8205 v0.AddArg(x) 8206 v0.AddArg(x) 8207 v.AddArg(v0) 8208 return true 8209 } 8210 // match: (MNEG x (MOVDconst [c])) 8211 // cond: c%9 == 0 && isPowerOfTwo(c/9) 8212 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8213 for { 8214 _ = v.Args[1] 8215 x := v.Args[0] 8216 v_1 := v.Args[1] 8217 if v_1.Op != OpARM64MOVDconst { 8218 break 8219 } 8220 c := v_1.AuxInt 8221 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 8222 break 8223 } 8224 v.reset(OpARM64NEG) 8225 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8226 v0.AuxInt = log2(c / 9) 8227 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8228 v1.AuxInt = 3 8229 v1.AddArg(x) 8230 v1.AddArg(x) 8231 v0.AddArg(v1) 8232 v.AddArg(v0) 8233 return true 8234 } 8235 // match: (MNEG (MOVDconst [c]) x) 8236 // cond: c%9 == 0 && isPowerOfTwo(c/9) 8237 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8238 for { 8239 _ = v.Args[1] 8240 v_0 := v.Args[0] 8241 if v_0.Op != OpARM64MOVDconst { 8242 break 8243 } 8244 c := v_0.AuxInt 8245 x := v.Args[1] 8246 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 8247 break 8248 } 8249 v.reset(OpARM64NEG) 8250 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8251 v0.AuxInt = log2(c / 9) 8252 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8253 v1.AuxInt = 3 8254 v1.AddArg(x) 8255 v1.AddArg(x) 8256 v0.AddArg(v1) 8257 v.AddArg(v0) 8258 return true 8259 } 8260 return false 8261 } 8262 func rewriteValueARM64_OpARM64MNEG_20(v *Value) bool { 8263 // match: (MNEG (MOVDconst [c]) (MOVDconst [d])) 8264 // cond: 8265 // result: (MOVDconst [-c*d]) 8266 for { 8267 _ = v.Args[1] 8268 v_0 := v.Args[0] 8269 if v_0.Op != OpARM64MOVDconst { 8270 break 8271 } 8272 c := v_0.AuxInt 8273 v_1 := v.Args[1] 8274 if v_1.Op != OpARM64MOVDconst { 8275 break 8276 } 8277 d := v_1.AuxInt 8278 v.reset(OpARM64MOVDconst) 8279 v.AuxInt = -c * d 8280 return true 8281 } 8282 // match: (MNEG (MOVDconst [d]) (MOVDconst [c])) 8283 // cond: 8284 // result: (MOVDconst [-c*d]) 8285 for { 8286 _ = v.Args[1] 8287 v_0 := v.Args[0] 8288 if v_0.Op != OpARM64MOVDconst { 8289 break 8290 } 8291 d := v_0.AuxInt 8292 v_1 := v.Args[1] 8293 if v_1.Op != OpARM64MOVDconst { 8294 break 8295 } 8296 c := v_1.AuxInt 8297 v.reset(OpARM64MOVDconst) 8298 v.AuxInt = -c * d 8299 return true 8300 } 8301 return false 8302 } 8303 func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool { 8304 b := v.Block 8305 _ = b 8306 // match: (MNEGW x (MOVDconst [c])) 8307 // cond: int32(c)==-1 8308 // result: x 8309 for { 8310 _ = v.Args[1] 8311 x := v.Args[0] 8312 v_1 := v.Args[1] 8313 if v_1.Op != OpARM64MOVDconst { 8314 break 8315 } 8316 c := v_1.AuxInt 8317 if !(int32(c) == -1) { 8318 break 8319 } 8320 v.reset(OpCopy) 8321 v.Type = x.Type 8322 v.AddArg(x) 8323 return true 8324 } 8325 // match: (MNEGW (MOVDconst [c]) x) 8326 // cond: int32(c)==-1 8327 // result: x 8328 for { 8329 _ = v.Args[1] 8330 v_0 := v.Args[0] 8331 if v_0.Op != OpARM64MOVDconst { 8332 break 8333 } 8334 c := v_0.AuxInt 8335 x := v.Args[1] 8336 if !(int32(c) == -1) { 8337 break 8338 } 8339 v.reset(OpCopy) 8340 v.Type = x.Type 8341 v.AddArg(x) 8342 return true 8343 } 8344 // match: (MNEGW _ (MOVDconst [c])) 8345 // cond: int32(c)==0 8346 // result: (MOVDconst [0]) 8347 for { 8348 _ = v.Args[1] 8349 v_1 := v.Args[1] 8350 if v_1.Op != OpARM64MOVDconst { 8351 break 8352 } 8353 c := v_1.AuxInt 8354 if !(int32(c) == 0) { 8355 break 8356 } 8357 v.reset(OpARM64MOVDconst) 8358 v.AuxInt = 0 8359 return true 8360 } 8361 // match: (MNEGW (MOVDconst [c]) _) 8362 // cond: int32(c)==0 8363 // result: (MOVDconst [0]) 8364 for { 8365 _ = v.Args[1] 8366 v_0 := v.Args[0] 8367 if v_0.Op != OpARM64MOVDconst { 8368 break 8369 } 8370 c := v_0.AuxInt 8371 if !(int32(c) == 0) { 8372 break 8373 } 8374 v.reset(OpARM64MOVDconst) 8375 v.AuxInt = 0 8376 return true 8377 } 8378 // match: (MNEGW x (MOVDconst [c])) 8379 // cond: int32(c)==1 8380 // result: (NEG x) 8381 for { 8382 _ = v.Args[1] 8383 x := v.Args[0] 8384 v_1 := v.Args[1] 8385 if v_1.Op != OpARM64MOVDconst { 8386 break 8387 } 8388 c := v_1.AuxInt 8389 if !(int32(c) == 1) { 8390 break 8391 } 8392 v.reset(OpARM64NEG) 8393 v.AddArg(x) 8394 return true 8395 } 8396 // match: (MNEGW (MOVDconst [c]) x) 8397 // cond: int32(c)==1 8398 // result: (NEG x) 8399 for { 8400 _ = v.Args[1] 8401 v_0 := v.Args[0] 8402 if v_0.Op != OpARM64MOVDconst { 8403 break 8404 } 8405 c := v_0.AuxInt 8406 x := v.Args[1] 8407 if !(int32(c) == 1) { 8408 break 8409 } 8410 v.reset(OpARM64NEG) 8411 v.AddArg(x) 8412 return true 8413 } 8414 // match: (MNEGW x (MOVDconst [c])) 8415 // cond: isPowerOfTwo(c) 8416 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 8417 for { 8418 _ = v.Args[1] 8419 x := v.Args[0] 8420 v_1 := v.Args[1] 8421 if v_1.Op != OpARM64MOVDconst { 8422 break 8423 } 8424 c := v_1.AuxInt 8425 if !(isPowerOfTwo(c)) { 8426 break 8427 } 8428 v.reset(OpARM64NEG) 8429 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8430 v0.AuxInt = log2(c) 8431 v0.AddArg(x) 8432 v.AddArg(v0) 8433 return true 8434 } 8435 // match: (MNEGW (MOVDconst [c]) x) 8436 // cond: isPowerOfTwo(c) 8437 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 8438 for { 8439 _ = v.Args[1] 8440 v_0 := v.Args[0] 8441 if v_0.Op != OpARM64MOVDconst { 8442 break 8443 } 8444 c := v_0.AuxInt 8445 x := v.Args[1] 8446 if !(isPowerOfTwo(c)) { 8447 break 8448 } 8449 v.reset(OpARM64NEG) 8450 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8451 v0.AuxInt = log2(c) 8452 v0.AddArg(x) 8453 v.AddArg(v0) 8454 return true 8455 } 8456 // match: (MNEGW x (MOVDconst [c])) 8457 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 8458 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 8459 for { 8460 _ = v.Args[1] 8461 x := v.Args[0] 8462 v_1 := v.Args[1] 8463 if v_1.Op != OpARM64MOVDconst { 8464 break 8465 } 8466 c := v_1.AuxInt 8467 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 8468 break 8469 } 8470 v.reset(OpARM64NEG) 8471 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8472 v0.AuxInt = log2(c - 1) 8473 v0.AddArg(x) 8474 v0.AddArg(x) 8475 v.AddArg(v0) 8476 return true 8477 } 8478 // match: (MNEGW (MOVDconst [c]) x) 8479 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 8480 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 8481 for { 8482 _ = v.Args[1] 8483 v_0 := v.Args[0] 8484 if v_0.Op != OpARM64MOVDconst { 8485 break 8486 } 8487 c := v_0.AuxInt 8488 x := v.Args[1] 8489 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 8490 break 8491 } 8492 v.reset(OpARM64NEG) 8493 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8494 v0.AuxInt = log2(c - 1) 8495 v0.AddArg(x) 8496 v0.AddArg(x) 8497 v.AddArg(v0) 8498 return true 8499 } 8500 return false 8501 } 8502 func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool { 8503 b := v.Block 8504 _ = b 8505 // match: (MNEGW x (MOVDconst [c])) 8506 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 8507 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8508 for { 8509 _ = v.Args[1] 8510 x := v.Args[0] 8511 v_1 := v.Args[1] 8512 if v_1.Op != OpARM64MOVDconst { 8513 break 8514 } 8515 c := v_1.AuxInt 8516 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 8517 break 8518 } 8519 v.reset(OpARM64NEG) 8520 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8521 v0.AuxInt = log2(c + 1) 8522 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8523 v1.AddArg(x) 8524 v0.AddArg(v1) 8525 v0.AddArg(x) 8526 v.AddArg(v0) 8527 return true 8528 } 8529 // match: (MNEGW (MOVDconst [c]) x) 8530 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 8531 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8532 for { 8533 _ = v.Args[1] 8534 v_0 := v.Args[0] 8535 if v_0.Op != OpARM64MOVDconst { 8536 break 8537 } 8538 c := v_0.AuxInt 8539 x := v.Args[1] 8540 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 8541 break 8542 } 8543 v.reset(OpARM64NEG) 8544 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8545 v0.AuxInt = log2(c + 1) 8546 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8547 v1.AddArg(x) 8548 v0.AddArg(v1) 8549 v0.AddArg(x) 8550 v.AddArg(v0) 8551 return true 8552 } 8553 // match: (MNEGW x (MOVDconst [c])) 8554 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 8555 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8556 for { 8557 _ = v.Args[1] 8558 x := v.Args[0] 8559 v_1 := v.Args[1] 8560 if v_1.Op != OpARM64MOVDconst { 8561 break 8562 } 8563 c := v_1.AuxInt 8564 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 8565 break 8566 } 8567 v.reset(OpARM64SLLconst) 8568 v.Type = x.Type 8569 v.AuxInt = log2(c / 3) 8570 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8571 v0.AuxInt = 2 8572 v0.AddArg(x) 8573 v0.AddArg(x) 8574 v.AddArg(v0) 8575 return true 8576 } 8577 // match: (MNEGW (MOVDconst [c]) x) 8578 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 8579 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8580 for { 8581 _ = v.Args[1] 8582 v_0 := v.Args[0] 8583 if v_0.Op != OpARM64MOVDconst { 8584 break 8585 } 8586 c := v_0.AuxInt 8587 x := v.Args[1] 8588 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 8589 break 8590 } 8591 v.reset(OpARM64SLLconst) 8592 v.Type = x.Type 8593 v.AuxInt = log2(c / 3) 8594 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8595 v0.AuxInt = 2 8596 v0.AddArg(x) 8597 v0.AddArg(x) 8598 v.AddArg(v0) 8599 return true 8600 } 8601 // match: (MNEGW x (MOVDconst [c])) 8602 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 8603 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8604 for { 8605 _ = v.Args[1] 8606 x := v.Args[0] 8607 v_1 := v.Args[1] 8608 if v_1.Op != OpARM64MOVDconst { 8609 break 8610 } 8611 c := v_1.AuxInt 8612 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 8613 break 8614 } 8615 v.reset(OpARM64NEG) 8616 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8617 v0.AuxInt = log2(c / 5) 8618 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8619 v1.AuxInt = 2 8620 v1.AddArg(x) 8621 v1.AddArg(x) 8622 v0.AddArg(v1) 8623 v.AddArg(v0) 8624 return true 8625 } 8626 // match: (MNEGW (MOVDconst [c]) x) 8627 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 8628 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8629 for { 8630 _ = v.Args[1] 8631 v_0 := v.Args[0] 8632 if v_0.Op != OpARM64MOVDconst { 8633 break 8634 } 8635 c := v_0.AuxInt 8636 x := v.Args[1] 8637 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 8638 break 8639 } 8640 v.reset(OpARM64NEG) 8641 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8642 v0.AuxInt = log2(c / 5) 8643 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8644 v1.AuxInt = 2 8645 v1.AddArg(x) 8646 v1.AddArg(x) 8647 v0.AddArg(v1) 8648 v.AddArg(v0) 8649 return true 8650 } 8651 // match: (MNEGW x (MOVDconst [c])) 8652 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8653 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8654 for { 8655 _ = v.Args[1] 8656 x := v.Args[0] 8657 v_1 := v.Args[1] 8658 if v_1.Op != OpARM64MOVDconst { 8659 break 8660 } 8661 c := v_1.AuxInt 8662 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8663 break 8664 } 8665 v.reset(OpARM64SLLconst) 8666 v.Type = x.Type 8667 v.AuxInt = log2(c / 7) 8668 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8669 v0.AuxInt = 3 8670 v0.AddArg(x) 8671 v0.AddArg(x) 8672 v.AddArg(v0) 8673 return true 8674 } 8675 // match: (MNEGW (MOVDconst [c]) x) 8676 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8677 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8678 for { 8679 _ = v.Args[1] 8680 v_0 := v.Args[0] 8681 if v_0.Op != OpARM64MOVDconst { 8682 break 8683 } 8684 c := v_0.AuxInt 8685 x := v.Args[1] 8686 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8687 break 8688 } 8689 v.reset(OpARM64SLLconst) 8690 v.Type = x.Type 8691 v.AuxInt = log2(c / 7) 8692 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8693 v0.AuxInt = 3 8694 v0.AddArg(x) 8695 v0.AddArg(x) 8696 v.AddArg(v0) 8697 return true 8698 } 8699 // match: (MNEGW x (MOVDconst [c])) 8700 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8701 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8702 for { 8703 _ = v.Args[1] 8704 x := v.Args[0] 8705 v_1 := v.Args[1] 8706 if v_1.Op != OpARM64MOVDconst { 8707 break 8708 } 8709 c := v_1.AuxInt 8710 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8711 break 8712 } 8713 v.reset(OpARM64NEG) 8714 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8715 v0.AuxInt = log2(c / 9) 8716 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8717 v1.AuxInt = 3 8718 v1.AddArg(x) 8719 v1.AddArg(x) 8720 v0.AddArg(v1) 8721 v.AddArg(v0) 8722 return true 8723 } 8724 // match: (MNEGW (MOVDconst [c]) x) 8725 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8726 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8727 for { 8728 _ = v.Args[1] 8729 v_0 := v.Args[0] 8730 if v_0.Op != OpARM64MOVDconst { 8731 break 8732 } 8733 c := v_0.AuxInt 8734 x := v.Args[1] 8735 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8736 break 8737 } 8738 v.reset(OpARM64NEG) 8739 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8740 v0.AuxInt = log2(c / 9) 8741 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8742 v1.AuxInt = 3 8743 v1.AddArg(x) 8744 v1.AddArg(x) 8745 v0.AddArg(v1) 8746 v.AddArg(v0) 8747 return true 8748 } 8749 return false 8750 } 8751 func rewriteValueARM64_OpARM64MNEGW_20(v *Value) bool { 8752 // match: (MNEGW (MOVDconst [c]) (MOVDconst [d])) 8753 // cond: 8754 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 8755 for { 8756 _ = v.Args[1] 8757 v_0 := v.Args[0] 8758 if v_0.Op != OpARM64MOVDconst { 8759 break 8760 } 8761 c := v_0.AuxInt 8762 v_1 := v.Args[1] 8763 if v_1.Op != OpARM64MOVDconst { 8764 break 8765 } 8766 d := v_1.AuxInt 8767 v.reset(OpARM64MOVDconst) 8768 v.AuxInt = -int64(int32(c) * int32(d)) 8769 return true 8770 } 8771 // match: (MNEGW (MOVDconst [d]) (MOVDconst [c])) 8772 // cond: 8773 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 8774 for { 8775 _ = v.Args[1] 8776 v_0 := v.Args[0] 8777 if v_0.Op != OpARM64MOVDconst { 8778 break 8779 } 8780 d := v_0.AuxInt 8781 v_1 := v.Args[1] 8782 if v_1.Op != OpARM64MOVDconst { 8783 break 8784 } 8785 c := v_1.AuxInt 8786 v.reset(OpARM64MOVDconst) 8787 v.AuxInt = -int64(int32(c) * int32(d)) 8788 return true 8789 } 8790 return false 8791 } 8792 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 8793 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 8794 // cond: 8795 // result: (MOVDconst [c%d]) 8796 for { 8797 _ = v.Args[1] 8798 v_0 := v.Args[0] 8799 if v_0.Op != OpARM64MOVDconst { 8800 break 8801 } 8802 c := v_0.AuxInt 8803 v_1 := v.Args[1] 8804 if v_1.Op != OpARM64MOVDconst { 8805 break 8806 } 8807 d := v_1.AuxInt 8808 v.reset(OpARM64MOVDconst) 8809 v.AuxInt = c % d 8810 return true 8811 } 8812 return false 8813 } 8814 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 8815 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 8816 // cond: 8817 // result: (MOVDconst [int64(int32(c)%int32(d))]) 8818 for { 8819 _ = v.Args[1] 8820 v_0 := v.Args[0] 8821 if v_0.Op != OpARM64MOVDconst { 8822 break 8823 } 8824 c := v_0.AuxInt 8825 v_1 := v.Args[1] 8826 if v_1.Op != OpARM64MOVDconst { 8827 break 8828 } 8829 d := v_1.AuxInt 8830 v.reset(OpARM64MOVDconst) 8831 v.AuxInt = int64(int32(c) % int32(d)) 8832 return true 8833 } 8834 return false 8835 } 8836 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 8837 b := v.Block 8838 _ = b 8839 config := b.Func.Config 8840 _ = config 8841 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 8842 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8843 // result: (MOVBUload [off1+off2] {sym} ptr mem) 8844 for { 8845 off1 := v.AuxInt 8846 sym := v.Aux 8847 _ = v.Args[1] 8848 v_0 := v.Args[0] 8849 if v_0.Op != OpARM64ADDconst { 8850 break 8851 } 8852 off2 := v_0.AuxInt 8853 ptr := v_0.Args[0] 8854 mem := v.Args[1] 8855 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8856 break 8857 } 8858 v.reset(OpARM64MOVBUload) 8859 v.AuxInt = off1 + off2 8860 v.Aux = sym 8861 v.AddArg(ptr) 8862 v.AddArg(mem) 8863 return true 8864 } 8865 // match: (MOVBUload [off] {sym} (ADD ptr idx) mem) 8866 // cond: off == 0 && sym == nil 8867 // result: (MOVBUloadidx ptr idx mem) 8868 for { 8869 off := v.AuxInt 8870 sym := v.Aux 8871 _ = v.Args[1] 8872 v_0 := v.Args[0] 8873 if v_0.Op != OpARM64ADD { 8874 break 8875 } 8876 _ = v_0.Args[1] 8877 ptr := v_0.Args[0] 8878 idx := v_0.Args[1] 8879 mem := v.Args[1] 8880 if !(off == 0 && sym == nil) { 8881 break 8882 } 8883 v.reset(OpARM64MOVBUloadidx) 8884 v.AddArg(ptr) 8885 v.AddArg(idx) 8886 v.AddArg(mem) 8887 return true 8888 } 8889 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 8890 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8891 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 8892 for { 8893 off1 := v.AuxInt 8894 sym1 := v.Aux 8895 _ = v.Args[1] 8896 v_0 := v.Args[0] 8897 if v_0.Op != OpARM64MOVDaddr { 8898 break 8899 } 8900 off2 := v_0.AuxInt 8901 sym2 := v_0.Aux 8902 ptr := v_0.Args[0] 8903 mem := v.Args[1] 8904 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8905 break 8906 } 8907 v.reset(OpARM64MOVBUload) 8908 v.AuxInt = off1 + off2 8909 v.Aux = mergeSym(sym1, sym2) 8910 v.AddArg(ptr) 8911 v.AddArg(mem) 8912 return true 8913 } 8914 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 8915 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 8916 // result: (MOVDconst [0]) 8917 for { 8918 off := v.AuxInt 8919 sym := v.Aux 8920 _ = v.Args[1] 8921 ptr := v.Args[0] 8922 v_1 := v.Args[1] 8923 if v_1.Op != OpARM64MOVBstorezero { 8924 break 8925 } 8926 off2 := v_1.AuxInt 8927 sym2 := v_1.Aux 8928 _ = v_1.Args[1] 8929 ptr2 := v_1.Args[0] 8930 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 8931 break 8932 } 8933 v.reset(OpARM64MOVDconst) 8934 v.AuxInt = 0 8935 return true 8936 } 8937 // match: (MOVBUload [off] {sym} (SB) _) 8938 // cond: symIsRO(sym) 8939 // result: (MOVDconst [int64(read8(sym, off))]) 8940 for { 8941 off := v.AuxInt 8942 sym := v.Aux 8943 _ = v.Args[1] 8944 v_0 := v.Args[0] 8945 if v_0.Op != OpSB { 8946 break 8947 } 8948 if !(symIsRO(sym)) { 8949 break 8950 } 8951 v.reset(OpARM64MOVDconst) 8952 v.AuxInt = int64(read8(sym, off)) 8953 return true 8954 } 8955 return false 8956 } 8957 func rewriteValueARM64_OpARM64MOVBUloadidx_0(v *Value) bool { 8958 // match: (MOVBUloadidx ptr (MOVDconst [c]) mem) 8959 // cond: 8960 // result: (MOVBUload [c] ptr mem) 8961 for { 8962 _ = v.Args[2] 8963 ptr := v.Args[0] 8964 v_1 := v.Args[1] 8965 if v_1.Op != OpARM64MOVDconst { 8966 break 8967 } 8968 c := v_1.AuxInt 8969 mem := v.Args[2] 8970 v.reset(OpARM64MOVBUload) 8971 v.AuxInt = c 8972 v.AddArg(ptr) 8973 v.AddArg(mem) 8974 return true 8975 } 8976 // match: (MOVBUloadidx (MOVDconst [c]) ptr mem) 8977 // cond: 8978 // result: (MOVBUload [c] ptr mem) 8979 for { 8980 _ = v.Args[2] 8981 v_0 := v.Args[0] 8982 if v_0.Op != OpARM64MOVDconst { 8983 break 8984 } 8985 c := v_0.AuxInt 8986 ptr := v.Args[1] 8987 mem := v.Args[2] 8988 v.reset(OpARM64MOVBUload) 8989 v.AuxInt = c 8990 v.AddArg(ptr) 8991 v.AddArg(mem) 8992 return true 8993 } 8994 // match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 8995 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 8996 // result: (MOVDconst [0]) 8997 for { 8998 _ = v.Args[2] 8999 ptr := v.Args[0] 9000 idx := v.Args[1] 9001 v_2 := v.Args[2] 9002 if v_2.Op != OpARM64MOVBstorezeroidx { 9003 break 9004 } 9005 _ = v_2.Args[2] 9006 ptr2 := v_2.Args[0] 9007 idx2 := v_2.Args[1] 9008 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 9009 break 9010 } 9011 v.reset(OpARM64MOVDconst) 9012 v.AuxInt = 0 9013 return true 9014 } 9015 return false 9016 } 9017 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 9018 // match: (MOVBUreg x:(MOVBUload _ _)) 9019 // cond: 9020 // result: (MOVDreg x) 9021 for { 9022 x := v.Args[0] 9023 if x.Op != OpARM64MOVBUload { 9024 break 9025 } 9026 _ = x.Args[1] 9027 v.reset(OpARM64MOVDreg) 9028 v.AddArg(x) 9029 return true 9030 } 9031 // match: (MOVBUreg x:(MOVBUloadidx _ _ _)) 9032 // cond: 9033 // result: (MOVDreg x) 9034 for { 9035 x := v.Args[0] 9036 if x.Op != OpARM64MOVBUloadidx { 9037 break 9038 } 9039 _ = x.Args[2] 9040 v.reset(OpARM64MOVDreg) 9041 v.AddArg(x) 9042 return true 9043 } 9044 // match: (MOVBUreg x:(MOVBUreg _)) 9045 // cond: 9046 // result: (MOVDreg x) 9047 for { 9048 x := v.Args[0] 9049 if x.Op != OpARM64MOVBUreg { 9050 break 9051 } 9052 v.reset(OpARM64MOVDreg) 9053 v.AddArg(x) 9054 return true 9055 } 9056 // match: (MOVBUreg (ANDconst [c] x)) 9057 // cond: 9058 // result: (ANDconst [c&(1<<8-1)] x) 9059 for { 9060 v_0 := v.Args[0] 9061 if v_0.Op != OpARM64ANDconst { 9062 break 9063 } 9064 c := v_0.AuxInt 9065 x := v_0.Args[0] 9066 v.reset(OpARM64ANDconst) 9067 v.AuxInt = c & (1<<8 - 1) 9068 v.AddArg(x) 9069 return true 9070 } 9071 // match: (MOVBUreg (MOVDconst [c])) 9072 // cond: 9073 // result: (MOVDconst [int64(uint8(c))]) 9074 for { 9075 v_0 := v.Args[0] 9076 if v_0.Op != OpARM64MOVDconst { 9077 break 9078 } 9079 c := v_0.AuxInt 9080 v.reset(OpARM64MOVDconst) 9081 v.AuxInt = int64(uint8(c)) 9082 return true 9083 } 9084 // match: (MOVBUreg x) 9085 // cond: x.Type.IsBoolean() 9086 // result: (MOVDreg x) 9087 for { 9088 x := v.Args[0] 9089 if !(x.Type.IsBoolean()) { 9090 break 9091 } 9092 v.reset(OpARM64MOVDreg) 9093 v.AddArg(x) 9094 return true 9095 } 9096 // match: (MOVBUreg (SLLconst [sc] x)) 9097 // cond: isARM64BFMask(sc, 1<<8-1, sc) 9098 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x) 9099 for { 9100 v_0 := v.Args[0] 9101 if v_0.Op != OpARM64SLLconst { 9102 break 9103 } 9104 sc := v_0.AuxInt 9105 x := v_0.Args[0] 9106 if !(isARM64BFMask(sc, 1<<8-1, sc)) { 9107 break 9108 } 9109 v.reset(OpARM64UBFIZ) 9110 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc)) 9111 v.AddArg(x) 9112 return true 9113 } 9114 // match: (MOVBUreg (SRLconst [sc] x)) 9115 // cond: isARM64BFMask(sc, 1<<8-1, 0) 9116 // result: (UBFX [arm64BFAuxInt(sc, 8)] x) 9117 for { 9118 v_0 := v.Args[0] 9119 if v_0.Op != OpARM64SRLconst { 9120 break 9121 } 9122 sc := v_0.AuxInt 9123 x := v_0.Args[0] 9124 if !(isARM64BFMask(sc, 1<<8-1, 0)) { 9125 break 9126 } 9127 v.reset(OpARM64UBFX) 9128 v.AuxInt = arm64BFAuxInt(sc, 8) 9129 v.AddArg(x) 9130 return true 9131 } 9132 return false 9133 } 9134 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 9135 b := v.Block 9136 _ = b 9137 config := b.Func.Config 9138 _ = config 9139 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 9140 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9141 // result: (MOVBload [off1+off2] {sym} ptr mem) 9142 for { 9143 off1 := v.AuxInt 9144 sym := v.Aux 9145 _ = v.Args[1] 9146 v_0 := v.Args[0] 9147 if v_0.Op != OpARM64ADDconst { 9148 break 9149 } 9150 off2 := v_0.AuxInt 9151 ptr := v_0.Args[0] 9152 mem := v.Args[1] 9153 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9154 break 9155 } 9156 v.reset(OpARM64MOVBload) 9157 v.AuxInt = off1 + off2 9158 v.Aux = sym 9159 v.AddArg(ptr) 9160 v.AddArg(mem) 9161 return true 9162 } 9163 // match: (MOVBload [off] {sym} (ADD ptr idx) mem) 9164 // cond: off == 0 && sym == nil 9165 // result: (MOVBloadidx ptr idx mem) 9166 for { 9167 off := v.AuxInt 9168 sym := v.Aux 9169 _ = v.Args[1] 9170 v_0 := v.Args[0] 9171 if v_0.Op != OpARM64ADD { 9172 break 9173 } 9174 _ = v_0.Args[1] 9175 ptr := v_0.Args[0] 9176 idx := v_0.Args[1] 9177 mem := v.Args[1] 9178 if !(off == 0 && sym == nil) { 9179 break 9180 } 9181 v.reset(OpARM64MOVBloadidx) 9182 v.AddArg(ptr) 9183 v.AddArg(idx) 9184 v.AddArg(mem) 9185 return true 9186 } 9187 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 9188 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9189 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 9190 for { 9191 off1 := v.AuxInt 9192 sym1 := v.Aux 9193 _ = v.Args[1] 9194 v_0 := v.Args[0] 9195 if v_0.Op != OpARM64MOVDaddr { 9196 break 9197 } 9198 off2 := v_0.AuxInt 9199 sym2 := v_0.Aux 9200 ptr := v_0.Args[0] 9201 mem := v.Args[1] 9202 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9203 break 9204 } 9205 v.reset(OpARM64MOVBload) 9206 v.AuxInt = off1 + off2 9207 v.Aux = mergeSym(sym1, sym2) 9208 v.AddArg(ptr) 9209 v.AddArg(mem) 9210 return true 9211 } 9212 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 9213 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 9214 // result: (MOVDconst [0]) 9215 for { 9216 off := v.AuxInt 9217 sym := v.Aux 9218 _ = v.Args[1] 9219 ptr := v.Args[0] 9220 v_1 := v.Args[1] 9221 if v_1.Op != OpARM64MOVBstorezero { 9222 break 9223 } 9224 off2 := v_1.AuxInt 9225 sym2 := v_1.Aux 9226 _ = v_1.Args[1] 9227 ptr2 := v_1.Args[0] 9228 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 9229 break 9230 } 9231 v.reset(OpARM64MOVDconst) 9232 v.AuxInt = 0 9233 return true 9234 } 9235 return false 9236 } 9237 func rewriteValueARM64_OpARM64MOVBloadidx_0(v *Value) bool { 9238 // match: (MOVBloadidx ptr (MOVDconst [c]) mem) 9239 // cond: 9240 // result: (MOVBload [c] ptr mem) 9241 for { 9242 _ = v.Args[2] 9243 ptr := v.Args[0] 9244 v_1 := v.Args[1] 9245 if v_1.Op != OpARM64MOVDconst { 9246 break 9247 } 9248 c := v_1.AuxInt 9249 mem := v.Args[2] 9250 v.reset(OpARM64MOVBload) 9251 v.AuxInt = c 9252 v.AddArg(ptr) 9253 v.AddArg(mem) 9254 return true 9255 } 9256 // match: (MOVBloadidx (MOVDconst [c]) ptr mem) 9257 // cond: 9258 // result: (MOVBload [c] ptr mem) 9259 for { 9260 _ = v.Args[2] 9261 v_0 := v.Args[0] 9262 if v_0.Op != OpARM64MOVDconst { 9263 break 9264 } 9265 c := v_0.AuxInt 9266 ptr := v.Args[1] 9267 mem := v.Args[2] 9268 v.reset(OpARM64MOVBload) 9269 v.AuxInt = c 9270 v.AddArg(ptr) 9271 v.AddArg(mem) 9272 return true 9273 } 9274 // match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 9275 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 9276 // result: (MOVDconst [0]) 9277 for { 9278 _ = v.Args[2] 9279 ptr := v.Args[0] 9280 idx := v.Args[1] 9281 v_2 := v.Args[2] 9282 if v_2.Op != OpARM64MOVBstorezeroidx { 9283 break 9284 } 9285 _ = v_2.Args[2] 9286 ptr2 := v_2.Args[0] 9287 idx2 := v_2.Args[1] 9288 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 9289 break 9290 } 9291 v.reset(OpARM64MOVDconst) 9292 v.AuxInt = 0 9293 return true 9294 } 9295 return false 9296 } 9297 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 9298 // match: (MOVBreg x:(MOVBload _ _)) 9299 // cond: 9300 // result: (MOVDreg x) 9301 for { 9302 x := v.Args[0] 9303 if x.Op != OpARM64MOVBload { 9304 break 9305 } 9306 _ = x.Args[1] 9307 v.reset(OpARM64MOVDreg) 9308 v.AddArg(x) 9309 return true 9310 } 9311 // match: (MOVBreg x:(MOVBloadidx _ _ _)) 9312 // cond: 9313 // result: (MOVDreg x) 9314 for { 9315 x := v.Args[0] 9316 if x.Op != OpARM64MOVBloadidx { 9317 break 9318 } 9319 _ = x.Args[2] 9320 v.reset(OpARM64MOVDreg) 9321 v.AddArg(x) 9322 return true 9323 } 9324 // match: (MOVBreg x:(MOVBreg _)) 9325 // cond: 9326 // result: (MOVDreg x) 9327 for { 9328 x := v.Args[0] 9329 if x.Op != OpARM64MOVBreg { 9330 break 9331 } 9332 v.reset(OpARM64MOVDreg) 9333 v.AddArg(x) 9334 return true 9335 } 9336 // match: (MOVBreg (MOVDconst [c])) 9337 // cond: 9338 // result: (MOVDconst [int64(int8(c))]) 9339 for { 9340 v_0 := v.Args[0] 9341 if v_0.Op != OpARM64MOVDconst { 9342 break 9343 } 9344 c := v_0.AuxInt 9345 v.reset(OpARM64MOVDconst) 9346 v.AuxInt = int64(int8(c)) 9347 return true 9348 } 9349 // match: (MOVBreg (SLLconst [lc] x)) 9350 // cond: lc < 8 9351 // result: (SBFIZ [arm64BFAuxInt(lc, 8-lc)] x) 9352 for { 9353 v_0 := v.Args[0] 9354 if v_0.Op != OpARM64SLLconst { 9355 break 9356 } 9357 lc := v_0.AuxInt 9358 x := v_0.Args[0] 9359 if !(lc < 8) { 9360 break 9361 } 9362 v.reset(OpARM64SBFIZ) 9363 v.AuxInt = arm64BFAuxInt(lc, 8-lc) 9364 v.AddArg(x) 9365 return true 9366 } 9367 return false 9368 } 9369 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 9370 b := v.Block 9371 _ = b 9372 config := b.Func.Config 9373 _ = config 9374 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 9375 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9376 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 9377 for { 9378 off1 := v.AuxInt 9379 sym := v.Aux 9380 _ = v.Args[2] 9381 v_0 := v.Args[0] 9382 if v_0.Op != OpARM64ADDconst { 9383 break 9384 } 9385 off2 := v_0.AuxInt 9386 ptr := v_0.Args[0] 9387 val := v.Args[1] 9388 mem := v.Args[2] 9389 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9390 break 9391 } 9392 v.reset(OpARM64MOVBstore) 9393 v.AuxInt = off1 + off2 9394 v.Aux = sym 9395 v.AddArg(ptr) 9396 v.AddArg(val) 9397 v.AddArg(mem) 9398 return true 9399 } 9400 // match: (MOVBstore [off] {sym} (ADD ptr idx) val mem) 9401 // cond: off == 0 && sym == nil 9402 // result: (MOVBstoreidx ptr idx val mem) 9403 for { 9404 off := v.AuxInt 9405 sym := v.Aux 9406 _ = v.Args[2] 9407 v_0 := v.Args[0] 9408 if v_0.Op != OpARM64ADD { 9409 break 9410 } 9411 _ = v_0.Args[1] 9412 ptr := v_0.Args[0] 9413 idx := v_0.Args[1] 9414 val := v.Args[1] 9415 mem := v.Args[2] 9416 if !(off == 0 && sym == nil) { 9417 break 9418 } 9419 v.reset(OpARM64MOVBstoreidx) 9420 v.AddArg(ptr) 9421 v.AddArg(idx) 9422 v.AddArg(val) 9423 v.AddArg(mem) 9424 return true 9425 } 9426 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 9427 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9428 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 9429 for { 9430 off1 := v.AuxInt 9431 sym1 := v.Aux 9432 _ = v.Args[2] 9433 v_0 := v.Args[0] 9434 if v_0.Op != OpARM64MOVDaddr { 9435 break 9436 } 9437 off2 := v_0.AuxInt 9438 sym2 := v_0.Aux 9439 ptr := v_0.Args[0] 9440 val := v.Args[1] 9441 mem := v.Args[2] 9442 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9443 break 9444 } 9445 v.reset(OpARM64MOVBstore) 9446 v.AuxInt = off1 + off2 9447 v.Aux = mergeSym(sym1, sym2) 9448 v.AddArg(ptr) 9449 v.AddArg(val) 9450 v.AddArg(mem) 9451 return true 9452 } 9453 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 9454 // cond: 9455 // result: (MOVBstorezero [off] {sym} ptr mem) 9456 for { 9457 off := v.AuxInt 9458 sym := v.Aux 9459 _ = v.Args[2] 9460 ptr := v.Args[0] 9461 v_1 := v.Args[1] 9462 if v_1.Op != OpARM64MOVDconst { 9463 break 9464 } 9465 if v_1.AuxInt != 0 { 9466 break 9467 } 9468 mem := v.Args[2] 9469 v.reset(OpARM64MOVBstorezero) 9470 v.AuxInt = off 9471 v.Aux = sym 9472 v.AddArg(ptr) 9473 v.AddArg(mem) 9474 return true 9475 } 9476 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 9477 // cond: 9478 // result: (MOVBstore [off] {sym} ptr x mem) 9479 for { 9480 off := v.AuxInt 9481 sym := v.Aux 9482 _ = v.Args[2] 9483 ptr := v.Args[0] 9484 v_1 := v.Args[1] 9485 if v_1.Op != OpARM64MOVBreg { 9486 break 9487 } 9488 x := v_1.Args[0] 9489 mem := v.Args[2] 9490 v.reset(OpARM64MOVBstore) 9491 v.AuxInt = off 9492 v.Aux = sym 9493 v.AddArg(ptr) 9494 v.AddArg(x) 9495 v.AddArg(mem) 9496 return true 9497 } 9498 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 9499 // cond: 9500 // result: (MOVBstore [off] {sym} ptr x mem) 9501 for { 9502 off := v.AuxInt 9503 sym := v.Aux 9504 _ = v.Args[2] 9505 ptr := v.Args[0] 9506 v_1 := v.Args[1] 9507 if v_1.Op != OpARM64MOVBUreg { 9508 break 9509 } 9510 x := v_1.Args[0] 9511 mem := v.Args[2] 9512 v.reset(OpARM64MOVBstore) 9513 v.AuxInt = off 9514 v.Aux = sym 9515 v.AddArg(ptr) 9516 v.AddArg(x) 9517 v.AddArg(mem) 9518 return true 9519 } 9520 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 9521 // cond: 9522 // result: (MOVBstore [off] {sym} ptr x mem) 9523 for { 9524 off := v.AuxInt 9525 sym := v.Aux 9526 _ = v.Args[2] 9527 ptr := v.Args[0] 9528 v_1 := v.Args[1] 9529 if v_1.Op != OpARM64MOVHreg { 9530 break 9531 } 9532 x := v_1.Args[0] 9533 mem := v.Args[2] 9534 v.reset(OpARM64MOVBstore) 9535 v.AuxInt = off 9536 v.Aux = sym 9537 v.AddArg(ptr) 9538 v.AddArg(x) 9539 v.AddArg(mem) 9540 return true 9541 } 9542 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 9543 // cond: 9544 // result: (MOVBstore [off] {sym} ptr x mem) 9545 for { 9546 off := v.AuxInt 9547 sym := v.Aux 9548 _ = v.Args[2] 9549 ptr := v.Args[0] 9550 v_1 := v.Args[1] 9551 if v_1.Op != OpARM64MOVHUreg { 9552 break 9553 } 9554 x := v_1.Args[0] 9555 mem := v.Args[2] 9556 v.reset(OpARM64MOVBstore) 9557 v.AuxInt = off 9558 v.Aux = sym 9559 v.AddArg(ptr) 9560 v.AddArg(x) 9561 v.AddArg(mem) 9562 return true 9563 } 9564 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 9565 // cond: 9566 // result: (MOVBstore [off] {sym} ptr x mem) 9567 for { 9568 off := v.AuxInt 9569 sym := v.Aux 9570 _ = v.Args[2] 9571 ptr := v.Args[0] 9572 v_1 := v.Args[1] 9573 if v_1.Op != OpARM64MOVWreg { 9574 break 9575 } 9576 x := v_1.Args[0] 9577 mem := v.Args[2] 9578 v.reset(OpARM64MOVBstore) 9579 v.AuxInt = off 9580 v.Aux = sym 9581 v.AddArg(ptr) 9582 v.AddArg(x) 9583 v.AddArg(mem) 9584 return true 9585 } 9586 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 9587 // cond: 9588 // result: (MOVBstore [off] {sym} ptr x mem) 9589 for { 9590 off := v.AuxInt 9591 sym := v.Aux 9592 _ = v.Args[2] 9593 ptr := v.Args[0] 9594 v_1 := v.Args[1] 9595 if v_1.Op != OpARM64MOVWUreg { 9596 break 9597 } 9598 x := v_1.Args[0] 9599 mem := v.Args[2] 9600 v.reset(OpARM64MOVBstore) 9601 v.AuxInt = off 9602 v.Aux = sym 9603 v.AddArg(ptr) 9604 v.AddArg(x) 9605 v.AddArg(mem) 9606 return true 9607 } 9608 return false 9609 } 9610 func rewriteValueARM64_OpARM64MOVBstore_10(v *Value) bool { 9611 // match: (MOVBstore [i] {s} ptr0 (SRLconst [8] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9612 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9613 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9614 for { 9615 i := v.AuxInt 9616 s := v.Aux 9617 _ = v.Args[2] 9618 ptr0 := v.Args[0] 9619 v_1 := v.Args[1] 9620 if v_1.Op != OpARM64SRLconst { 9621 break 9622 } 9623 if v_1.AuxInt != 8 { 9624 break 9625 } 9626 w := v_1.Args[0] 9627 x := v.Args[2] 9628 if x.Op != OpARM64MOVBstore { 9629 break 9630 } 9631 if x.AuxInt != i-1 { 9632 break 9633 } 9634 if x.Aux != s { 9635 break 9636 } 9637 _ = x.Args[2] 9638 ptr1 := x.Args[0] 9639 if w != x.Args[1] { 9640 break 9641 } 9642 mem := x.Args[2] 9643 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9644 break 9645 } 9646 v.reset(OpARM64MOVHstore) 9647 v.AuxInt = i - 1 9648 v.Aux = s 9649 v.AddArg(ptr0) 9650 v.AddArg(w) 9651 v.AddArg(mem) 9652 return true 9653 } 9654 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9655 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9656 // result: (MOVHstoreidx ptr1 idx1 w mem) 9657 for { 9658 if v.AuxInt != 1 { 9659 break 9660 } 9661 s := v.Aux 9662 _ = v.Args[2] 9663 v_0 := v.Args[0] 9664 if v_0.Op != OpARM64ADD { 9665 break 9666 } 9667 _ = v_0.Args[1] 9668 ptr0 := v_0.Args[0] 9669 idx0 := v_0.Args[1] 9670 v_1 := v.Args[1] 9671 if v_1.Op != OpARM64SRLconst { 9672 break 9673 } 9674 if v_1.AuxInt != 8 { 9675 break 9676 } 9677 w := v_1.Args[0] 9678 x := v.Args[2] 9679 if x.Op != OpARM64MOVBstoreidx { 9680 break 9681 } 9682 _ = x.Args[3] 9683 ptr1 := x.Args[0] 9684 idx1 := x.Args[1] 9685 if w != x.Args[2] { 9686 break 9687 } 9688 mem := x.Args[3] 9689 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9690 break 9691 } 9692 v.reset(OpARM64MOVHstoreidx) 9693 v.AddArg(ptr1) 9694 v.AddArg(idx1) 9695 v.AddArg(w) 9696 v.AddArg(mem) 9697 return true 9698 } 9699 // match: (MOVBstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(8, 8)] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9700 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9701 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9702 for { 9703 i := v.AuxInt 9704 s := v.Aux 9705 _ = v.Args[2] 9706 ptr0 := v.Args[0] 9707 v_1 := v.Args[1] 9708 if v_1.Op != OpARM64UBFX { 9709 break 9710 } 9711 if v_1.AuxInt != arm64BFAuxInt(8, 8) { 9712 break 9713 } 9714 w := v_1.Args[0] 9715 x := v.Args[2] 9716 if x.Op != OpARM64MOVBstore { 9717 break 9718 } 9719 if x.AuxInt != i-1 { 9720 break 9721 } 9722 if x.Aux != s { 9723 break 9724 } 9725 _ = x.Args[2] 9726 ptr1 := x.Args[0] 9727 if w != x.Args[1] { 9728 break 9729 } 9730 mem := x.Args[2] 9731 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9732 break 9733 } 9734 v.reset(OpARM64MOVHstore) 9735 v.AuxInt = i - 1 9736 v.Aux = s 9737 v.AddArg(ptr0) 9738 v.AddArg(w) 9739 v.AddArg(mem) 9740 return true 9741 } 9742 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(8, 8)] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9743 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9744 // result: (MOVHstoreidx ptr1 idx1 w mem) 9745 for { 9746 if v.AuxInt != 1 { 9747 break 9748 } 9749 s := v.Aux 9750 _ = v.Args[2] 9751 v_0 := v.Args[0] 9752 if v_0.Op != OpARM64ADD { 9753 break 9754 } 9755 _ = v_0.Args[1] 9756 ptr0 := v_0.Args[0] 9757 idx0 := v_0.Args[1] 9758 v_1 := v.Args[1] 9759 if v_1.Op != OpARM64UBFX { 9760 break 9761 } 9762 if v_1.AuxInt != arm64BFAuxInt(8, 8) { 9763 break 9764 } 9765 w := v_1.Args[0] 9766 x := v.Args[2] 9767 if x.Op != OpARM64MOVBstoreidx { 9768 break 9769 } 9770 _ = x.Args[3] 9771 ptr1 := x.Args[0] 9772 idx1 := x.Args[1] 9773 if w != x.Args[2] { 9774 break 9775 } 9776 mem := x.Args[3] 9777 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9778 break 9779 } 9780 v.reset(OpARM64MOVHstoreidx) 9781 v.AddArg(ptr1) 9782 v.AddArg(idx1) 9783 v.AddArg(w) 9784 v.AddArg(mem) 9785 return true 9786 } 9787 // match: (MOVBstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(8, 24)] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9788 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9789 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9790 for { 9791 i := v.AuxInt 9792 s := v.Aux 9793 _ = v.Args[2] 9794 ptr0 := v.Args[0] 9795 v_1 := v.Args[1] 9796 if v_1.Op != OpARM64UBFX { 9797 break 9798 } 9799 if v_1.AuxInt != arm64BFAuxInt(8, 24) { 9800 break 9801 } 9802 w := v_1.Args[0] 9803 x := v.Args[2] 9804 if x.Op != OpARM64MOVBstore { 9805 break 9806 } 9807 if x.AuxInt != i-1 { 9808 break 9809 } 9810 if x.Aux != s { 9811 break 9812 } 9813 _ = x.Args[2] 9814 ptr1 := x.Args[0] 9815 if w != x.Args[1] { 9816 break 9817 } 9818 mem := x.Args[2] 9819 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9820 break 9821 } 9822 v.reset(OpARM64MOVHstore) 9823 v.AuxInt = i - 1 9824 v.Aux = s 9825 v.AddArg(ptr0) 9826 v.AddArg(w) 9827 v.AddArg(mem) 9828 return true 9829 } 9830 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(8, 24)] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9831 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9832 // result: (MOVHstoreidx ptr1 idx1 w mem) 9833 for { 9834 if v.AuxInt != 1 { 9835 break 9836 } 9837 s := v.Aux 9838 _ = v.Args[2] 9839 v_0 := v.Args[0] 9840 if v_0.Op != OpARM64ADD { 9841 break 9842 } 9843 _ = v_0.Args[1] 9844 ptr0 := v_0.Args[0] 9845 idx0 := v_0.Args[1] 9846 v_1 := v.Args[1] 9847 if v_1.Op != OpARM64UBFX { 9848 break 9849 } 9850 if v_1.AuxInt != arm64BFAuxInt(8, 24) { 9851 break 9852 } 9853 w := v_1.Args[0] 9854 x := v.Args[2] 9855 if x.Op != OpARM64MOVBstoreidx { 9856 break 9857 } 9858 _ = x.Args[3] 9859 ptr1 := x.Args[0] 9860 idx1 := x.Args[1] 9861 if w != x.Args[2] { 9862 break 9863 } 9864 mem := x.Args[3] 9865 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9866 break 9867 } 9868 v.reset(OpARM64MOVHstoreidx) 9869 v.AddArg(ptr1) 9870 v.AddArg(idx1) 9871 v.AddArg(w) 9872 v.AddArg(mem) 9873 return true 9874 } 9875 // match: (MOVBstore [i] {s} ptr0 (SRLconst [8] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9876 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9877 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9878 for { 9879 i := v.AuxInt 9880 s := v.Aux 9881 _ = v.Args[2] 9882 ptr0 := v.Args[0] 9883 v_1 := v.Args[1] 9884 if v_1.Op != OpARM64SRLconst { 9885 break 9886 } 9887 if v_1.AuxInt != 8 { 9888 break 9889 } 9890 v_1_0 := v_1.Args[0] 9891 if v_1_0.Op != OpARM64MOVDreg { 9892 break 9893 } 9894 w := v_1_0.Args[0] 9895 x := v.Args[2] 9896 if x.Op != OpARM64MOVBstore { 9897 break 9898 } 9899 if x.AuxInt != i-1 { 9900 break 9901 } 9902 if x.Aux != s { 9903 break 9904 } 9905 _ = x.Args[2] 9906 ptr1 := x.Args[0] 9907 if w != x.Args[1] { 9908 break 9909 } 9910 mem := x.Args[2] 9911 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9912 break 9913 } 9914 v.reset(OpARM64MOVHstore) 9915 v.AuxInt = i - 1 9916 v.Aux = s 9917 v.AddArg(ptr0) 9918 v.AddArg(w) 9919 v.AddArg(mem) 9920 return true 9921 } 9922 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w mem)) 9923 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9924 // result: (MOVHstoreidx ptr1 idx1 w mem) 9925 for { 9926 if v.AuxInt != 1 { 9927 break 9928 } 9929 s := v.Aux 9930 _ = v.Args[2] 9931 v_0 := v.Args[0] 9932 if v_0.Op != OpARM64ADD { 9933 break 9934 } 9935 _ = v_0.Args[1] 9936 ptr0 := v_0.Args[0] 9937 idx0 := v_0.Args[1] 9938 v_1 := v.Args[1] 9939 if v_1.Op != OpARM64SRLconst { 9940 break 9941 } 9942 if v_1.AuxInt != 8 { 9943 break 9944 } 9945 v_1_0 := v_1.Args[0] 9946 if v_1_0.Op != OpARM64MOVDreg { 9947 break 9948 } 9949 w := v_1_0.Args[0] 9950 x := v.Args[2] 9951 if x.Op != OpARM64MOVBstoreidx { 9952 break 9953 } 9954 _ = x.Args[3] 9955 ptr1 := x.Args[0] 9956 idx1 := x.Args[1] 9957 if w != x.Args[2] { 9958 break 9959 } 9960 mem := x.Args[3] 9961 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9962 break 9963 } 9964 v.reset(OpARM64MOVHstoreidx) 9965 v.AddArg(ptr1) 9966 v.AddArg(idx1) 9967 v.AddArg(w) 9968 v.AddArg(mem) 9969 return true 9970 } 9971 // match: (MOVBstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] w) mem)) 9972 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9973 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 9974 for { 9975 i := v.AuxInt 9976 s := v.Aux 9977 _ = v.Args[2] 9978 ptr0 := v.Args[0] 9979 v_1 := v.Args[1] 9980 if v_1.Op != OpARM64SRLconst { 9981 break 9982 } 9983 j := v_1.AuxInt 9984 w := v_1.Args[0] 9985 x := v.Args[2] 9986 if x.Op != OpARM64MOVBstore { 9987 break 9988 } 9989 if x.AuxInt != i-1 { 9990 break 9991 } 9992 if x.Aux != s { 9993 break 9994 } 9995 _ = x.Args[2] 9996 ptr1 := x.Args[0] 9997 w0 := x.Args[1] 9998 if w0.Op != OpARM64SRLconst { 9999 break 10000 } 10001 if w0.AuxInt != j-8 { 10002 break 10003 } 10004 if w != w0.Args[0] { 10005 break 10006 } 10007 mem := x.Args[2] 10008 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 10009 break 10010 } 10011 v.reset(OpARM64MOVHstore) 10012 v.AuxInt = i - 1 10013 v.Aux = s 10014 v.AddArg(ptr0) 10015 v.AddArg(w0) 10016 v.AddArg(mem) 10017 return true 10018 } 10019 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] w) mem)) 10020 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10021 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 10022 for { 10023 if v.AuxInt != 1 { 10024 break 10025 } 10026 s := v.Aux 10027 _ = v.Args[2] 10028 v_0 := v.Args[0] 10029 if v_0.Op != OpARM64ADD { 10030 break 10031 } 10032 _ = v_0.Args[1] 10033 ptr0 := v_0.Args[0] 10034 idx0 := v_0.Args[1] 10035 v_1 := v.Args[1] 10036 if v_1.Op != OpARM64SRLconst { 10037 break 10038 } 10039 j := v_1.AuxInt 10040 w := v_1.Args[0] 10041 x := v.Args[2] 10042 if x.Op != OpARM64MOVBstoreidx { 10043 break 10044 } 10045 _ = x.Args[3] 10046 ptr1 := x.Args[0] 10047 idx1 := x.Args[1] 10048 w0 := x.Args[2] 10049 if w0.Op != OpARM64SRLconst { 10050 break 10051 } 10052 if w0.AuxInt != j-8 { 10053 break 10054 } 10055 if w != w0.Args[0] { 10056 break 10057 } 10058 mem := x.Args[3] 10059 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10060 break 10061 } 10062 v.reset(OpARM64MOVHstoreidx) 10063 v.AddArg(ptr1) 10064 v.AddArg(idx1) 10065 v.AddArg(w0) 10066 v.AddArg(mem) 10067 return true 10068 } 10069 return false 10070 } 10071 func rewriteValueARM64_OpARM64MOVBstore_20(v *Value) bool { 10072 b := v.Block 10073 _ = b 10074 // match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem)) 10075 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x) 10076 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 10077 for { 10078 i := v.AuxInt 10079 s := v.Aux 10080 _ = v.Args[2] 10081 ptr0 := v.Args[0] 10082 v_1 := v.Args[1] 10083 if v_1.Op != OpARM64UBFX { 10084 break 10085 } 10086 bfc := v_1.AuxInt 10087 w := v_1.Args[0] 10088 x := v.Args[2] 10089 if x.Op != OpARM64MOVBstore { 10090 break 10091 } 10092 if x.AuxInt != i-1 { 10093 break 10094 } 10095 if x.Aux != s { 10096 break 10097 } 10098 _ = x.Args[2] 10099 ptr1 := x.Args[0] 10100 w0 := x.Args[1] 10101 if w0.Op != OpARM64UBFX { 10102 break 10103 } 10104 bfc2 := w0.AuxInt 10105 if w != w0.Args[0] { 10106 break 10107 } 10108 mem := x.Args[2] 10109 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) { 10110 break 10111 } 10112 v.reset(OpARM64MOVHstore) 10113 v.AuxInt = i - 1 10114 v.Aux = s 10115 v.AddArg(ptr0) 10116 v.AddArg(w0) 10117 v.AddArg(mem) 10118 return true 10119 } 10120 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [bfc] w) x:(MOVBstoreidx ptr1 idx1 w0:(UBFX [bfc2] w) mem)) 10121 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x) 10122 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 10123 for { 10124 if v.AuxInt != 1 { 10125 break 10126 } 10127 s := v.Aux 10128 _ = v.Args[2] 10129 v_0 := v.Args[0] 10130 if v_0.Op != OpARM64ADD { 10131 break 10132 } 10133 _ = v_0.Args[1] 10134 ptr0 := v_0.Args[0] 10135 idx0 := v_0.Args[1] 10136 v_1 := v.Args[1] 10137 if v_1.Op != OpARM64UBFX { 10138 break 10139 } 10140 bfc := v_1.AuxInt 10141 w := v_1.Args[0] 10142 x := v.Args[2] 10143 if x.Op != OpARM64MOVBstoreidx { 10144 break 10145 } 10146 _ = x.Args[3] 10147 ptr1 := x.Args[0] 10148 idx1 := x.Args[1] 10149 w0 := x.Args[2] 10150 if w0.Op != OpARM64UBFX { 10151 break 10152 } 10153 bfc2 := w0.AuxInt 10154 if w != w0.Args[0] { 10155 break 10156 } 10157 mem := x.Args[3] 10158 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) { 10159 break 10160 } 10161 v.reset(OpARM64MOVHstoreidx) 10162 v.AddArg(ptr1) 10163 v.AddArg(idx1) 10164 v.AddArg(w0) 10165 v.AddArg(mem) 10166 return true 10167 } 10168 // match: (MOVBstore [i] {s} ptr0 (SRLconst [j] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] (MOVDreg w)) mem)) 10169 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 10170 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 10171 for { 10172 i := v.AuxInt 10173 s := v.Aux 10174 _ = v.Args[2] 10175 ptr0 := v.Args[0] 10176 v_1 := v.Args[1] 10177 if v_1.Op != OpARM64SRLconst { 10178 break 10179 } 10180 j := v_1.AuxInt 10181 v_1_0 := v_1.Args[0] 10182 if v_1_0.Op != OpARM64MOVDreg { 10183 break 10184 } 10185 w := v_1_0.Args[0] 10186 x := v.Args[2] 10187 if x.Op != OpARM64MOVBstore { 10188 break 10189 } 10190 if x.AuxInt != i-1 { 10191 break 10192 } 10193 if x.Aux != s { 10194 break 10195 } 10196 _ = x.Args[2] 10197 ptr1 := x.Args[0] 10198 w0 := x.Args[1] 10199 if w0.Op != OpARM64SRLconst { 10200 break 10201 } 10202 if w0.AuxInt != j-8 { 10203 break 10204 } 10205 w0_0 := w0.Args[0] 10206 if w0_0.Op != OpARM64MOVDreg { 10207 break 10208 } 10209 if w != w0_0.Args[0] { 10210 break 10211 } 10212 mem := x.Args[2] 10213 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 10214 break 10215 } 10216 v.reset(OpARM64MOVHstore) 10217 v.AuxInt = i - 1 10218 v.Aux = s 10219 v.AddArg(ptr0) 10220 v.AddArg(w0) 10221 v.AddArg(mem) 10222 return true 10223 } 10224 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] (MOVDreg w)) mem)) 10225 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10226 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 10227 for { 10228 if v.AuxInt != 1 { 10229 break 10230 } 10231 s := v.Aux 10232 _ = v.Args[2] 10233 v_0 := v.Args[0] 10234 if v_0.Op != OpARM64ADD { 10235 break 10236 } 10237 _ = v_0.Args[1] 10238 ptr0 := v_0.Args[0] 10239 idx0 := v_0.Args[1] 10240 v_1 := v.Args[1] 10241 if v_1.Op != OpARM64SRLconst { 10242 break 10243 } 10244 j := v_1.AuxInt 10245 v_1_0 := v_1.Args[0] 10246 if v_1_0.Op != OpARM64MOVDreg { 10247 break 10248 } 10249 w := v_1_0.Args[0] 10250 x := v.Args[2] 10251 if x.Op != OpARM64MOVBstoreidx { 10252 break 10253 } 10254 _ = x.Args[3] 10255 ptr1 := x.Args[0] 10256 idx1 := x.Args[1] 10257 w0 := x.Args[2] 10258 if w0.Op != OpARM64SRLconst { 10259 break 10260 } 10261 if w0.AuxInt != j-8 { 10262 break 10263 } 10264 w0_0 := w0.Args[0] 10265 if w0_0.Op != OpARM64MOVDreg { 10266 break 10267 } 10268 if w != w0_0.Args[0] { 10269 break 10270 } 10271 mem := x.Args[3] 10272 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10273 break 10274 } 10275 v.reset(OpARM64MOVHstoreidx) 10276 v.AddArg(ptr1) 10277 v.AddArg(idx1) 10278 v.AddArg(w0) 10279 v.AddArg(mem) 10280 return true 10281 } 10282 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem)))))))) 10283 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) 10284 // result: (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem) 10285 for { 10286 i := v.AuxInt 10287 s := v.Aux 10288 _ = v.Args[2] 10289 ptr := v.Args[0] 10290 w := v.Args[1] 10291 x0 := v.Args[2] 10292 if x0.Op != OpARM64MOVBstore { 10293 break 10294 } 10295 if x0.AuxInt != i-1 { 10296 break 10297 } 10298 if x0.Aux != s { 10299 break 10300 } 10301 _ = x0.Args[2] 10302 if ptr != x0.Args[0] { 10303 break 10304 } 10305 x0_1 := x0.Args[1] 10306 if x0_1.Op != OpARM64SRLconst { 10307 break 10308 } 10309 if x0_1.AuxInt != 8 { 10310 break 10311 } 10312 if w != x0_1.Args[0] { 10313 break 10314 } 10315 x1 := x0.Args[2] 10316 if x1.Op != OpARM64MOVBstore { 10317 break 10318 } 10319 if x1.AuxInt != i-2 { 10320 break 10321 } 10322 if x1.Aux != s { 10323 break 10324 } 10325 _ = x1.Args[2] 10326 if ptr != x1.Args[0] { 10327 break 10328 } 10329 x1_1 := x1.Args[1] 10330 if x1_1.Op != OpARM64SRLconst { 10331 break 10332 } 10333 if x1_1.AuxInt != 16 { 10334 break 10335 } 10336 if w != x1_1.Args[0] { 10337 break 10338 } 10339 x2 := x1.Args[2] 10340 if x2.Op != OpARM64MOVBstore { 10341 break 10342 } 10343 if x2.AuxInt != i-3 { 10344 break 10345 } 10346 if x2.Aux != s { 10347 break 10348 } 10349 _ = x2.Args[2] 10350 if ptr != x2.Args[0] { 10351 break 10352 } 10353 x2_1 := x2.Args[1] 10354 if x2_1.Op != OpARM64SRLconst { 10355 break 10356 } 10357 if x2_1.AuxInt != 24 { 10358 break 10359 } 10360 if w != x2_1.Args[0] { 10361 break 10362 } 10363 x3 := x2.Args[2] 10364 if x3.Op != OpARM64MOVBstore { 10365 break 10366 } 10367 if x3.AuxInt != i-4 { 10368 break 10369 } 10370 if x3.Aux != s { 10371 break 10372 } 10373 _ = x3.Args[2] 10374 if ptr != x3.Args[0] { 10375 break 10376 } 10377 x3_1 := x3.Args[1] 10378 if x3_1.Op != OpARM64SRLconst { 10379 break 10380 } 10381 if x3_1.AuxInt != 32 { 10382 break 10383 } 10384 if w != x3_1.Args[0] { 10385 break 10386 } 10387 x4 := x3.Args[2] 10388 if x4.Op != OpARM64MOVBstore { 10389 break 10390 } 10391 if x4.AuxInt != i-5 { 10392 break 10393 } 10394 if x4.Aux != s { 10395 break 10396 } 10397 _ = x4.Args[2] 10398 if ptr != x4.Args[0] { 10399 break 10400 } 10401 x4_1 := x4.Args[1] 10402 if x4_1.Op != OpARM64SRLconst { 10403 break 10404 } 10405 if x4_1.AuxInt != 40 { 10406 break 10407 } 10408 if w != x4_1.Args[0] { 10409 break 10410 } 10411 x5 := x4.Args[2] 10412 if x5.Op != OpARM64MOVBstore { 10413 break 10414 } 10415 if x5.AuxInt != i-6 { 10416 break 10417 } 10418 if x5.Aux != s { 10419 break 10420 } 10421 _ = x5.Args[2] 10422 if ptr != x5.Args[0] { 10423 break 10424 } 10425 x5_1 := x5.Args[1] 10426 if x5_1.Op != OpARM64SRLconst { 10427 break 10428 } 10429 if x5_1.AuxInt != 48 { 10430 break 10431 } 10432 if w != x5_1.Args[0] { 10433 break 10434 } 10435 x6 := x5.Args[2] 10436 if x6.Op != OpARM64MOVBstore { 10437 break 10438 } 10439 if x6.AuxInt != i-7 { 10440 break 10441 } 10442 if x6.Aux != s { 10443 break 10444 } 10445 _ = x6.Args[2] 10446 if ptr != x6.Args[0] { 10447 break 10448 } 10449 x6_1 := x6.Args[1] 10450 if x6_1.Op != OpARM64SRLconst { 10451 break 10452 } 10453 if x6_1.AuxInt != 56 { 10454 break 10455 } 10456 if w != x6_1.Args[0] { 10457 break 10458 } 10459 mem := x6.Args[2] 10460 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) { 10461 break 10462 } 10463 v.reset(OpARM64MOVDstore) 10464 v.AuxInt = i - 7 10465 v.Aux = s 10466 v.AddArg(ptr) 10467 v0 := b.NewValue0(v.Pos, OpARM64REV, w.Type) 10468 v0.AddArg(w) 10469 v.AddArg(v0) 10470 v.AddArg(mem) 10471 return true 10472 } 10473 // match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem)))))))) 10474 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) 10475 // result: (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem) 10476 for { 10477 if v.AuxInt != 7 { 10478 break 10479 } 10480 s := v.Aux 10481 _ = v.Args[2] 10482 p := v.Args[0] 10483 w := v.Args[1] 10484 x0 := v.Args[2] 10485 if x0.Op != OpARM64MOVBstore { 10486 break 10487 } 10488 if x0.AuxInt != 6 { 10489 break 10490 } 10491 if x0.Aux != s { 10492 break 10493 } 10494 _ = x0.Args[2] 10495 if p != x0.Args[0] { 10496 break 10497 } 10498 x0_1 := x0.Args[1] 10499 if x0_1.Op != OpARM64SRLconst { 10500 break 10501 } 10502 if x0_1.AuxInt != 8 { 10503 break 10504 } 10505 if w != x0_1.Args[0] { 10506 break 10507 } 10508 x1 := x0.Args[2] 10509 if x1.Op != OpARM64MOVBstore { 10510 break 10511 } 10512 if x1.AuxInt != 5 { 10513 break 10514 } 10515 if x1.Aux != s { 10516 break 10517 } 10518 _ = x1.Args[2] 10519 if p != x1.Args[0] { 10520 break 10521 } 10522 x1_1 := x1.Args[1] 10523 if x1_1.Op != OpARM64SRLconst { 10524 break 10525 } 10526 if x1_1.AuxInt != 16 { 10527 break 10528 } 10529 if w != x1_1.Args[0] { 10530 break 10531 } 10532 x2 := x1.Args[2] 10533 if x2.Op != OpARM64MOVBstore { 10534 break 10535 } 10536 if x2.AuxInt != 4 { 10537 break 10538 } 10539 if x2.Aux != s { 10540 break 10541 } 10542 _ = x2.Args[2] 10543 if p != x2.Args[0] { 10544 break 10545 } 10546 x2_1 := x2.Args[1] 10547 if x2_1.Op != OpARM64SRLconst { 10548 break 10549 } 10550 if x2_1.AuxInt != 24 { 10551 break 10552 } 10553 if w != x2_1.Args[0] { 10554 break 10555 } 10556 x3 := x2.Args[2] 10557 if x3.Op != OpARM64MOVBstore { 10558 break 10559 } 10560 if x3.AuxInt != 3 { 10561 break 10562 } 10563 if x3.Aux != s { 10564 break 10565 } 10566 _ = x3.Args[2] 10567 if p != x3.Args[0] { 10568 break 10569 } 10570 x3_1 := x3.Args[1] 10571 if x3_1.Op != OpARM64SRLconst { 10572 break 10573 } 10574 if x3_1.AuxInt != 32 { 10575 break 10576 } 10577 if w != x3_1.Args[0] { 10578 break 10579 } 10580 x4 := x3.Args[2] 10581 if x4.Op != OpARM64MOVBstore { 10582 break 10583 } 10584 if x4.AuxInt != 2 { 10585 break 10586 } 10587 if x4.Aux != s { 10588 break 10589 } 10590 _ = x4.Args[2] 10591 if p != x4.Args[0] { 10592 break 10593 } 10594 x4_1 := x4.Args[1] 10595 if x4_1.Op != OpARM64SRLconst { 10596 break 10597 } 10598 if x4_1.AuxInt != 40 { 10599 break 10600 } 10601 if w != x4_1.Args[0] { 10602 break 10603 } 10604 x5 := x4.Args[2] 10605 if x5.Op != OpARM64MOVBstore { 10606 break 10607 } 10608 if x5.AuxInt != 1 { 10609 break 10610 } 10611 if x5.Aux != s { 10612 break 10613 } 10614 _ = x5.Args[2] 10615 p1 := x5.Args[0] 10616 if p1.Op != OpARM64ADD { 10617 break 10618 } 10619 _ = p1.Args[1] 10620 ptr1 := p1.Args[0] 10621 idx1 := p1.Args[1] 10622 x5_1 := x5.Args[1] 10623 if x5_1.Op != OpARM64SRLconst { 10624 break 10625 } 10626 if x5_1.AuxInt != 48 { 10627 break 10628 } 10629 if w != x5_1.Args[0] { 10630 break 10631 } 10632 x6 := x5.Args[2] 10633 if x6.Op != OpARM64MOVBstoreidx { 10634 break 10635 } 10636 _ = x6.Args[3] 10637 ptr0 := x6.Args[0] 10638 idx0 := x6.Args[1] 10639 x6_2 := x6.Args[2] 10640 if x6_2.Op != OpARM64SRLconst { 10641 break 10642 } 10643 if x6_2.AuxInt != 56 { 10644 break 10645 } 10646 if w != x6_2.Args[0] { 10647 break 10648 } 10649 mem := x6.Args[3] 10650 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) { 10651 break 10652 } 10653 v.reset(OpARM64MOVDstoreidx) 10654 v.AddArg(ptr0) 10655 v.AddArg(idx0) 10656 v0 := b.NewValue0(v.Pos, OpARM64REV, w.Type) 10657 v0.AddArg(w) 10658 v.AddArg(v0) 10659 v.AddArg(mem) 10660 return true 10661 } 10662 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 10663 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 10664 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 10665 for { 10666 i := v.AuxInt 10667 s := v.Aux 10668 _ = v.Args[2] 10669 ptr := v.Args[0] 10670 w := v.Args[1] 10671 x0 := v.Args[2] 10672 if x0.Op != OpARM64MOVBstore { 10673 break 10674 } 10675 if x0.AuxInt != i-1 { 10676 break 10677 } 10678 if x0.Aux != s { 10679 break 10680 } 10681 _ = x0.Args[2] 10682 if ptr != x0.Args[0] { 10683 break 10684 } 10685 x0_1 := x0.Args[1] 10686 if x0_1.Op != OpARM64UBFX { 10687 break 10688 } 10689 if x0_1.AuxInt != arm64BFAuxInt(8, 24) { 10690 break 10691 } 10692 if w != x0_1.Args[0] { 10693 break 10694 } 10695 x1 := x0.Args[2] 10696 if x1.Op != OpARM64MOVBstore { 10697 break 10698 } 10699 if x1.AuxInt != i-2 { 10700 break 10701 } 10702 if x1.Aux != s { 10703 break 10704 } 10705 _ = x1.Args[2] 10706 if ptr != x1.Args[0] { 10707 break 10708 } 10709 x1_1 := x1.Args[1] 10710 if x1_1.Op != OpARM64UBFX { 10711 break 10712 } 10713 if x1_1.AuxInt != arm64BFAuxInt(16, 16) { 10714 break 10715 } 10716 if w != x1_1.Args[0] { 10717 break 10718 } 10719 x2 := x1.Args[2] 10720 if x2.Op != OpARM64MOVBstore { 10721 break 10722 } 10723 if x2.AuxInt != i-3 { 10724 break 10725 } 10726 if x2.Aux != s { 10727 break 10728 } 10729 _ = x2.Args[2] 10730 if ptr != x2.Args[0] { 10731 break 10732 } 10733 x2_1 := x2.Args[1] 10734 if x2_1.Op != OpARM64UBFX { 10735 break 10736 } 10737 if x2_1.AuxInt != arm64BFAuxInt(24, 8) { 10738 break 10739 } 10740 if w != x2_1.Args[0] { 10741 break 10742 } 10743 mem := x2.Args[2] 10744 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 10745 break 10746 } 10747 v.reset(OpARM64MOVWstore) 10748 v.AuxInt = i - 3 10749 v.Aux = s 10750 v.AddArg(ptr) 10751 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 10752 v0.AddArg(w) 10753 v.AddArg(v0) 10754 v.AddArg(mem) 10755 return true 10756 } 10757 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 10758 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 10759 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 10760 for { 10761 if v.AuxInt != 3 { 10762 break 10763 } 10764 s := v.Aux 10765 _ = v.Args[2] 10766 p := v.Args[0] 10767 w := v.Args[1] 10768 x0 := v.Args[2] 10769 if x0.Op != OpARM64MOVBstore { 10770 break 10771 } 10772 if x0.AuxInt != 2 { 10773 break 10774 } 10775 if x0.Aux != s { 10776 break 10777 } 10778 _ = x0.Args[2] 10779 if p != x0.Args[0] { 10780 break 10781 } 10782 x0_1 := x0.Args[1] 10783 if x0_1.Op != OpARM64UBFX { 10784 break 10785 } 10786 if x0_1.AuxInt != arm64BFAuxInt(8, 24) { 10787 break 10788 } 10789 if w != x0_1.Args[0] { 10790 break 10791 } 10792 x1 := x0.Args[2] 10793 if x1.Op != OpARM64MOVBstore { 10794 break 10795 } 10796 if x1.AuxInt != 1 { 10797 break 10798 } 10799 if x1.Aux != s { 10800 break 10801 } 10802 _ = x1.Args[2] 10803 p1 := x1.Args[0] 10804 if p1.Op != OpARM64ADD { 10805 break 10806 } 10807 _ = p1.Args[1] 10808 ptr1 := p1.Args[0] 10809 idx1 := p1.Args[1] 10810 x1_1 := x1.Args[1] 10811 if x1_1.Op != OpARM64UBFX { 10812 break 10813 } 10814 if x1_1.AuxInt != arm64BFAuxInt(16, 16) { 10815 break 10816 } 10817 if w != x1_1.Args[0] { 10818 break 10819 } 10820 x2 := x1.Args[2] 10821 if x2.Op != OpARM64MOVBstoreidx { 10822 break 10823 } 10824 _ = x2.Args[3] 10825 ptr0 := x2.Args[0] 10826 idx0 := x2.Args[1] 10827 x2_2 := x2.Args[2] 10828 if x2_2.Op != OpARM64UBFX { 10829 break 10830 } 10831 if x2_2.AuxInt != arm64BFAuxInt(24, 8) { 10832 break 10833 } 10834 if w != x2_2.Args[0] { 10835 break 10836 } 10837 mem := x2.Args[3] 10838 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 10839 break 10840 } 10841 v.reset(OpARM64MOVWstoreidx) 10842 v.AddArg(ptr0) 10843 v.AddArg(idx0) 10844 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 10845 v0.AddArg(w) 10846 v.AddArg(v0) 10847 v.AddArg(mem) 10848 return true 10849 } 10850 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem)))) 10851 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 10852 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 10853 for { 10854 i := v.AuxInt 10855 s := v.Aux 10856 _ = v.Args[2] 10857 ptr := v.Args[0] 10858 w := v.Args[1] 10859 x0 := v.Args[2] 10860 if x0.Op != OpARM64MOVBstore { 10861 break 10862 } 10863 if x0.AuxInt != i-1 { 10864 break 10865 } 10866 if x0.Aux != s { 10867 break 10868 } 10869 _ = x0.Args[2] 10870 if ptr != x0.Args[0] { 10871 break 10872 } 10873 x0_1 := x0.Args[1] 10874 if x0_1.Op != OpARM64SRLconst { 10875 break 10876 } 10877 if x0_1.AuxInt != 8 { 10878 break 10879 } 10880 x0_1_0 := x0_1.Args[0] 10881 if x0_1_0.Op != OpARM64MOVDreg { 10882 break 10883 } 10884 if w != x0_1_0.Args[0] { 10885 break 10886 } 10887 x1 := x0.Args[2] 10888 if x1.Op != OpARM64MOVBstore { 10889 break 10890 } 10891 if x1.AuxInt != i-2 { 10892 break 10893 } 10894 if x1.Aux != s { 10895 break 10896 } 10897 _ = x1.Args[2] 10898 if ptr != x1.Args[0] { 10899 break 10900 } 10901 x1_1 := x1.Args[1] 10902 if x1_1.Op != OpARM64SRLconst { 10903 break 10904 } 10905 if x1_1.AuxInt != 16 { 10906 break 10907 } 10908 x1_1_0 := x1_1.Args[0] 10909 if x1_1_0.Op != OpARM64MOVDreg { 10910 break 10911 } 10912 if w != x1_1_0.Args[0] { 10913 break 10914 } 10915 x2 := x1.Args[2] 10916 if x2.Op != OpARM64MOVBstore { 10917 break 10918 } 10919 if x2.AuxInt != i-3 { 10920 break 10921 } 10922 if x2.Aux != s { 10923 break 10924 } 10925 _ = x2.Args[2] 10926 if ptr != x2.Args[0] { 10927 break 10928 } 10929 x2_1 := x2.Args[1] 10930 if x2_1.Op != OpARM64SRLconst { 10931 break 10932 } 10933 if x2_1.AuxInt != 24 { 10934 break 10935 } 10936 x2_1_0 := x2_1.Args[0] 10937 if x2_1_0.Op != OpARM64MOVDreg { 10938 break 10939 } 10940 if w != x2_1_0.Args[0] { 10941 break 10942 } 10943 mem := x2.Args[2] 10944 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 10945 break 10946 } 10947 v.reset(OpARM64MOVWstore) 10948 v.AuxInt = i - 3 10949 v.Aux = s 10950 v.AddArg(ptr) 10951 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 10952 v0.AddArg(w) 10953 v.AddArg(v0) 10954 v.AddArg(mem) 10955 return true 10956 } 10957 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem)))) 10958 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 10959 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 10960 for { 10961 if v.AuxInt != 3 { 10962 break 10963 } 10964 s := v.Aux 10965 _ = v.Args[2] 10966 p := v.Args[0] 10967 w := v.Args[1] 10968 x0 := v.Args[2] 10969 if x0.Op != OpARM64MOVBstore { 10970 break 10971 } 10972 if x0.AuxInt != 2 { 10973 break 10974 } 10975 if x0.Aux != s { 10976 break 10977 } 10978 _ = x0.Args[2] 10979 if p != x0.Args[0] { 10980 break 10981 } 10982 x0_1 := x0.Args[1] 10983 if x0_1.Op != OpARM64SRLconst { 10984 break 10985 } 10986 if x0_1.AuxInt != 8 { 10987 break 10988 } 10989 x0_1_0 := x0_1.Args[0] 10990 if x0_1_0.Op != OpARM64MOVDreg { 10991 break 10992 } 10993 if w != x0_1_0.Args[0] { 10994 break 10995 } 10996 x1 := x0.Args[2] 10997 if x1.Op != OpARM64MOVBstore { 10998 break 10999 } 11000 if x1.AuxInt != 1 { 11001 break 11002 } 11003 if x1.Aux != s { 11004 break 11005 } 11006 _ = x1.Args[2] 11007 p1 := x1.Args[0] 11008 if p1.Op != OpARM64ADD { 11009 break 11010 } 11011 _ = p1.Args[1] 11012 ptr1 := p1.Args[0] 11013 idx1 := p1.Args[1] 11014 x1_1 := x1.Args[1] 11015 if x1_1.Op != OpARM64SRLconst { 11016 break 11017 } 11018 if x1_1.AuxInt != 16 { 11019 break 11020 } 11021 x1_1_0 := x1_1.Args[0] 11022 if x1_1_0.Op != OpARM64MOVDreg { 11023 break 11024 } 11025 if w != x1_1_0.Args[0] { 11026 break 11027 } 11028 x2 := x1.Args[2] 11029 if x2.Op != OpARM64MOVBstoreidx { 11030 break 11031 } 11032 _ = x2.Args[3] 11033 ptr0 := x2.Args[0] 11034 idx0 := x2.Args[1] 11035 x2_2 := x2.Args[2] 11036 if x2_2.Op != OpARM64SRLconst { 11037 break 11038 } 11039 if x2_2.AuxInt != 24 { 11040 break 11041 } 11042 x2_2_0 := x2_2.Args[0] 11043 if x2_2_0.Op != OpARM64MOVDreg { 11044 break 11045 } 11046 if w != x2_2_0.Args[0] { 11047 break 11048 } 11049 mem := x2.Args[3] 11050 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 11051 break 11052 } 11053 v.reset(OpARM64MOVWstoreidx) 11054 v.AddArg(ptr0) 11055 v.AddArg(idx0) 11056 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 11057 v0.AddArg(w) 11058 v.AddArg(v0) 11059 v.AddArg(mem) 11060 return true 11061 } 11062 return false 11063 } 11064 func rewriteValueARM64_OpARM64MOVBstore_30(v *Value) bool { 11065 b := v.Block 11066 _ = b 11067 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem)))) 11068 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 11069 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 11070 for { 11071 i := v.AuxInt 11072 s := v.Aux 11073 _ = v.Args[2] 11074 ptr := v.Args[0] 11075 w := v.Args[1] 11076 x0 := v.Args[2] 11077 if x0.Op != OpARM64MOVBstore { 11078 break 11079 } 11080 if x0.AuxInt != i-1 { 11081 break 11082 } 11083 if x0.Aux != s { 11084 break 11085 } 11086 _ = x0.Args[2] 11087 if ptr != x0.Args[0] { 11088 break 11089 } 11090 x0_1 := x0.Args[1] 11091 if x0_1.Op != OpARM64SRLconst { 11092 break 11093 } 11094 if x0_1.AuxInt != 8 { 11095 break 11096 } 11097 if w != x0_1.Args[0] { 11098 break 11099 } 11100 x1 := x0.Args[2] 11101 if x1.Op != OpARM64MOVBstore { 11102 break 11103 } 11104 if x1.AuxInt != i-2 { 11105 break 11106 } 11107 if x1.Aux != s { 11108 break 11109 } 11110 _ = x1.Args[2] 11111 if ptr != x1.Args[0] { 11112 break 11113 } 11114 x1_1 := x1.Args[1] 11115 if x1_1.Op != OpARM64SRLconst { 11116 break 11117 } 11118 if x1_1.AuxInt != 16 { 11119 break 11120 } 11121 if w != x1_1.Args[0] { 11122 break 11123 } 11124 x2 := x1.Args[2] 11125 if x2.Op != OpARM64MOVBstore { 11126 break 11127 } 11128 if x2.AuxInt != i-3 { 11129 break 11130 } 11131 if x2.Aux != s { 11132 break 11133 } 11134 _ = x2.Args[2] 11135 if ptr != x2.Args[0] { 11136 break 11137 } 11138 x2_1 := x2.Args[1] 11139 if x2_1.Op != OpARM64SRLconst { 11140 break 11141 } 11142 if x2_1.AuxInt != 24 { 11143 break 11144 } 11145 if w != x2_1.Args[0] { 11146 break 11147 } 11148 mem := x2.Args[2] 11149 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 11150 break 11151 } 11152 v.reset(OpARM64MOVWstore) 11153 v.AuxInt = i - 3 11154 v.Aux = s 11155 v.AddArg(ptr) 11156 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 11157 v0.AddArg(w) 11158 v.AddArg(v0) 11159 v.AddArg(mem) 11160 return true 11161 } 11162 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem)))) 11163 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 11164 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 11165 for { 11166 if v.AuxInt != 3 { 11167 break 11168 } 11169 s := v.Aux 11170 _ = v.Args[2] 11171 p := v.Args[0] 11172 w := v.Args[1] 11173 x0 := v.Args[2] 11174 if x0.Op != OpARM64MOVBstore { 11175 break 11176 } 11177 if x0.AuxInt != 2 { 11178 break 11179 } 11180 if x0.Aux != s { 11181 break 11182 } 11183 _ = x0.Args[2] 11184 if p != x0.Args[0] { 11185 break 11186 } 11187 x0_1 := x0.Args[1] 11188 if x0_1.Op != OpARM64SRLconst { 11189 break 11190 } 11191 if x0_1.AuxInt != 8 { 11192 break 11193 } 11194 if w != x0_1.Args[0] { 11195 break 11196 } 11197 x1 := x0.Args[2] 11198 if x1.Op != OpARM64MOVBstore { 11199 break 11200 } 11201 if x1.AuxInt != 1 { 11202 break 11203 } 11204 if x1.Aux != s { 11205 break 11206 } 11207 _ = x1.Args[2] 11208 p1 := x1.Args[0] 11209 if p1.Op != OpARM64ADD { 11210 break 11211 } 11212 _ = p1.Args[1] 11213 ptr1 := p1.Args[0] 11214 idx1 := p1.Args[1] 11215 x1_1 := x1.Args[1] 11216 if x1_1.Op != OpARM64SRLconst { 11217 break 11218 } 11219 if x1_1.AuxInt != 16 { 11220 break 11221 } 11222 if w != x1_1.Args[0] { 11223 break 11224 } 11225 x2 := x1.Args[2] 11226 if x2.Op != OpARM64MOVBstoreidx { 11227 break 11228 } 11229 _ = x2.Args[3] 11230 ptr0 := x2.Args[0] 11231 idx0 := x2.Args[1] 11232 x2_2 := x2.Args[2] 11233 if x2_2.Op != OpARM64SRLconst { 11234 break 11235 } 11236 if x2_2.AuxInt != 24 { 11237 break 11238 } 11239 if w != x2_2.Args[0] { 11240 break 11241 } 11242 mem := x2.Args[3] 11243 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 11244 break 11245 } 11246 v.reset(OpARM64MOVWstoreidx) 11247 v.AddArg(ptr0) 11248 v.AddArg(idx0) 11249 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 11250 v0.AddArg(w) 11251 v.AddArg(v0) 11252 v.AddArg(mem) 11253 return true 11254 } 11255 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem)) 11256 // cond: x.Uses == 1 && clobber(x) 11257 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11258 for { 11259 i := v.AuxInt 11260 s := v.Aux 11261 _ = v.Args[2] 11262 ptr := v.Args[0] 11263 w := v.Args[1] 11264 x := v.Args[2] 11265 if x.Op != OpARM64MOVBstore { 11266 break 11267 } 11268 if x.AuxInt != i-1 { 11269 break 11270 } 11271 if x.Aux != s { 11272 break 11273 } 11274 _ = x.Args[2] 11275 if ptr != x.Args[0] { 11276 break 11277 } 11278 x_1 := x.Args[1] 11279 if x_1.Op != OpARM64SRLconst { 11280 break 11281 } 11282 if x_1.AuxInt != 8 { 11283 break 11284 } 11285 if w != x_1.Args[0] { 11286 break 11287 } 11288 mem := x.Args[2] 11289 if !(x.Uses == 1 && clobber(x)) { 11290 break 11291 } 11292 v.reset(OpARM64MOVHstore) 11293 v.AuxInt = i - 1 11294 v.Aux = s 11295 v.AddArg(ptr) 11296 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11297 v0.AddArg(w) 11298 v.AddArg(v0) 11299 v.AddArg(mem) 11300 return true 11301 } 11302 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem)) 11303 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11304 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11305 for { 11306 if v.AuxInt != 1 { 11307 break 11308 } 11309 s := v.Aux 11310 _ = v.Args[2] 11311 v_0 := v.Args[0] 11312 if v_0.Op != OpARM64ADD { 11313 break 11314 } 11315 _ = v_0.Args[1] 11316 ptr1 := v_0.Args[0] 11317 idx1 := v_0.Args[1] 11318 w := v.Args[1] 11319 x := v.Args[2] 11320 if x.Op != OpARM64MOVBstoreidx { 11321 break 11322 } 11323 _ = x.Args[3] 11324 ptr0 := x.Args[0] 11325 idx0 := x.Args[1] 11326 x_2 := x.Args[2] 11327 if x_2.Op != OpARM64SRLconst { 11328 break 11329 } 11330 if x_2.AuxInt != 8 { 11331 break 11332 } 11333 if w != x_2.Args[0] { 11334 break 11335 } 11336 mem := x.Args[3] 11337 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11338 break 11339 } 11340 v.reset(OpARM64MOVHstoreidx) 11341 v.AddArg(ptr0) 11342 v.AddArg(idx0) 11343 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11344 v0.AddArg(w) 11345 v.AddArg(v0) 11346 v.AddArg(mem) 11347 return true 11348 } 11349 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 11350 // cond: x.Uses == 1 && clobber(x) 11351 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11352 for { 11353 i := v.AuxInt 11354 s := v.Aux 11355 _ = v.Args[2] 11356 ptr := v.Args[0] 11357 w := v.Args[1] 11358 x := v.Args[2] 11359 if x.Op != OpARM64MOVBstore { 11360 break 11361 } 11362 if x.AuxInt != i-1 { 11363 break 11364 } 11365 if x.Aux != s { 11366 break 11367 } 11368 _ = x.Args[2] 11369 if ptr != x.Args[0] { 11370 break 11371 } 11372 x_1 := x.Args[1] 11373 if x_1.Op != OpARM64UBFX { 11374 break 11375 } 11376 if x_1.AuxInt != arm64BFAuxInt(8, 8) { 11377 break 11378 } 11379 if w != x_1.Args[0] { 11380 break 11381 } 11382 mem := x.Args[2] 11383 if !(x.Uses == 1 && clobber(x)) { 11384 break 11385 } 11386 v.reset(OpARM64MOVHstore) 11387 v.AuxInt = i - 1 11388 v.Aux = s 11389 v.AddArg(ptr) 11390 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11391 v0.AddArg(w) 11392 v.AddArg(v0) 11393 v.AddArg(mem) 11394 return true 11395 } 11396 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 11397 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11398 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11399 for { 11400 if v.AuxInt != 1 { 11401 break 11402 } 11403 s := v.Aux 11404 _ = v.Args[2] 11405 v_0 := v.Args[0] 11406 if v_0.Op != OpARM64ADD { 11407 break 11408 } 11409 _ = v_0.Args[1] 11410 ptr1 := v_0.Args[0] 11411 idx1 := v_0.Args[1] 11412 w := v.Args[1] 11413 x := v.Args[2] 11414 if x.Op != OpARM64MOVBstoreidx { 11415 break 11416 } 11417 _ = x.Args[3] 11418 ptr0 := x.Args[0] 11419 idx0 := x.Args[1] 11420 x_2 := x.Args[2] 11421 if x_2.Op != OpARM64UBFX { 11422 break 11423 } 11424 if x_2.AuxInt != arm64BFAuxInt(8, 8) { 11425 break 11426 } 11427 if w != x_2.Args[0] { 11428 break 11429 } 11430 mem := x.Args[3] 11431 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11432 break 11433 } 11434 v.reset(OpARM64MOVHstoreidx) 11435 v.AddArg(ptr0) 11436 v.AddArg(idx0) 11437 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11438 v0.AddArg(w) 11439 v.AddArg(v0) 11440 v.AddArg(mem) 11441 return true 11442 } 11443 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem)) 11444 // cond: x.Uses == 1 && clobber(x) 11445 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11446 for { 11447 i := v.AuxInt 11448 s := v.Aux 11449 _ = v.Args[2] 11450 ptr := v.Args[0] 11451 w := v.Args[1] 11452 x := v.Args[2] 11453 if x.Op != OpARM64MOVBstore { 11454 break 11455 } 11456 if x.AuxInt != i-1 { 11457 break 11458 } 11459 if x.Aux != s { 11460 break 11461 } 11462 _ = x.Args[2] 11463 if ptr != x.Args[0] { 11464 break 11465 } 11466 x_1 := x.Args[1] 11467 if x_1.Op != OpARM64SRLconst { 11468 break 11469 } 11470 if x_1.AuxInt != 8 { 11471 break 11472 } 11473 x_1_0 := x_1.Args[0] 11474 if x_1_0.Op != OpARM64MOVDreg { 11475 break 11476 } 11477 if w != x_1_0.Args[0] { 11478 break 11479 } 11480 mem := x.Args[2] 11481 if !(x.Uses == 1 && clobber(x)) { 11482 break 11483 } 11484 v.reset(OpARM64MOVHstore) 11485 v.AuxInt = i - 1 11486 v.Aux = s 11487 v.AddArg(ptr) 11488 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11489 v0.AddArg(w) 11490 v.AddArg(v0) 11491 v.AddArg(mem) 11492 return true 11493 } 11494 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem)) 11495 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11496 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11497 for { 11498 if v.AuxInt != 1 { 11499 break 11500 } 11501 s := v.Aux 11502 _ = v.Args[2] 11503 v_0 := v.Args[0] 11504 if v_0.Op != OpARM64ADD { 11505 break 11506 } 11507 _ = v_0.Args[1] 11508 ptr1 := v_0.Args[0] 11509 idx1 := v_0.Args[1] 11510 w := v.Args[1] 11511 x := v.Args[2] 11512 if x.Op != OpARM64MOVBstoreidx { 11513 break 11514 } 11515 _ = x.Args[3] 11516 ptr0 := x.Args[0] 11517 idx0 := x.Args[1] 11518 x_2 := x.Args[2] 11519 if x_2.Op != OpARM64SRLconst { 11520 break 11521 } 11522 if x_2.AuxInt != 8 { 11523 break 11524 } 11525 x_2_0 := x_2.Args[0] 11526 if x_2_0.Op != OpARM64MOVDreg { 11527 break 11528 } 11529 if w != x_2_0.Args[0] { 11530 break 11531 } 11532 mem := x.Args[3] 11533 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11534 break 11535 } 11536 v.reset(OpARM64MOVHstoreidx) 11537 v.AddArg(ptr0) 11538 v.AddArg(idx0) 11539 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11540 v0.AddArg(w) 11541 v.AddArg(v0) 11542 v.AddArg(mem) 11543 return true 11544 } 11545 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 24)] w) mem)) 11546 // cond: x.Uses == 1 && clobber(x) 11547 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11548 for { 11549 i := v.AuxInt 11550 s := v.Aux 11551 _ = v.Args[2] 11552 ptr := v.Args[0] 11553 w := v.Args[1] 11554 x := v.Args[2] 11555 if x.Op != OpARM64MOVBstore { 11556 break 11557 } 11558 if x.AuxInt != i-1 { 11559 break 11560 } 11561 if x.Aux != s { 11562 break 11563 } 11564 _ = x.Args[2] 11565 if ptr != x.Args[0] { 11566 break 11567 } 11568 x_1 := x.Args[1] 11569 if x_1.Op != OpARM64UBFX { 11570 break 11571 } 11572 if x_1.AuxInt != arm64BFAuxInt(8, 24) { 11573 break 11574 } 11575 if w != x_1.Args[0] { 11576 break 11577 } 11578 mem := x.Args[2] 11579 if !(x.Uses == 1 && clobber(x)) { 11580 break 11581 } 11582 v.reset(OpARM64MOVHstore) 11583 v.AuxInt = i - 1 11584 v.Aux = s 11585 v.AddArg(ptr) 11586 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11587 v0.AddArg(w) 11588 v.AddArg(v0) 11589 v.AddArg(mem) 11590 return true 11591 } 11592 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(8, 24)] w) mem)) 11593 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11594 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11595 for { 11596 if v.AuxInt != 1 { 11597 break 11598 } 11599 s := v.Aux 11600 _ = v.Args[2] 11601 v_0 := v.Args[0] 11602 if v_0.Op != OpARM64ADD { 11603 break 11604 } 11605 _ = v_0.Args[1] 11606 ptr1 := v_0.Args[0] 11607 idx1 := v_0.Args[1] 11608 w := v.Args[1] 11609 x := v.Args[2] 11610 if x.Op != OpARM64MOVBstoreidx { 11611 break 11612 } 11613 _ = x.Args[3] 11614 ptr0 := x.Args[0] 11615 idx0 := x.Args[1] 11616 x_2 := x.Args[2] 11617 if x_2.Op != OpARM64UBFX { 11618 break 11619 } 11620 if x_2.AuxInt != arm64BFAuxInt(8, 24) { 11621 break 11622 } 11623 if w != x_2.Args[0] { 11624 break 11625 } 11626 mem := x.Args[3] 11627 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11628 break 11629 } 11630 v.reset(OpARM64MOVHstoreidx) 11631 v.AddArg(ptr0) 11632 v.AddArg(idx0) 11633 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11634 v0.AddArg(w) 11635 v.AddArg(v0) 11636 v.AddArg(mem) 11637 return true 11638 } 11639 return false 11640 } 11641 func rewriteValueARM64_OpARM64MOVBstore_40(v *Value) bool { 11642 b := v.Block 11643 _ = b 11644 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem)) 11645 // cond: x.Uses == 1 && clobber(x) 11646 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11647 for { 11648 i := v.AuxInt 11649 s := v.Aux 11650 _ = v.Args[2] 11651 ptr := v.Args[0] 11652 w := v.Args[1] 11653 x := v.Args[2] 11654 if x.Op != OpARM64MOVBstore { 11655 break 11656 } 11657 if x.AuxInt != i-1 { 11658 break 11659 } 11660 if x.Aux != s { 11661 break 11662 } 11663 _ = x.Args[2] 11664 if ptr != x.Args[0] { 11665 break 11666 } 11667 x_1 := x.Args[1] 11668 if x_1.Op != OpARM64SRLconst { 11669 break 11670 } 11671 if x_1.AuxInt != 8 { 11672 break 11673 } 11674 x_1_0 := x_1.Args[0] 11675 if x_1_0.Op != OpARM64MOVDreg { 11676 break 11677 } 11678 if w != x_1_0.Args[0] { 11679 break 11680 } 11681 mem := x.Args[2] 11682 if !(x.Uses == 1 && clobber(x)) { 11683 break 11684 } 11685 v.reset(OpARM64MOVHstore) 11686 v.AuxInt = i - 1 11687 v.Aux = s 11688 v.AddArg(ptr) 11689 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11690 v0.AddArg(w) 11691 v.AddArg(v0) 11692 v.AddArg(mem) 11693 return true 11694 } 11695 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem)) 11696 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11697 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11698 for { 11699 if v.AuxInt != 1 { 11700 break 11701 } 11702 s := v.Aux 11703 _ = v.Args[2] 11704 v_0 := v.Args[0] 11705 if v_0.Op != OpARM64ADD { 11706 break 11707 } 11708 _ = v_0.Args[1] 11709 ptr1 := v_0.Args[0] 11710 idx1 := v_0.Args[1] 11711 w := v.Args[1] 11712 x := v.Args[2] 11713 if x.Op != OpARM64MOVBstoreidx { 11714 break 11715 } 11716 _ = x.Args[3] 11717 ptr0 := x.Args[0] 11718 idx0 := x.Args[1] 11719 x_2 := x.Args[2] 11720 if x_2.Op != OpARM64SRLconst { 11721 break 11722 } 11723 if x_2.AuxInt != 8 { 11724 break 11725 } 11726 x_2_0 := x_2.Args[0] 11727 if x_2_0.Op != OpARM64MOVDreg { 11728 break 11729 } 11730 if w != x_2_0.Args[0] { 11731 break 11732 } 11733 mem := x.Args[3] 11734 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11735 break 11736 } 11737 v.reset(OpARM64MOVHstoreidx) 11738 v.AddArg(ptr0) 11739 v.AddArg(idx0) 11740 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11741 v0.AddArg(w) 11742 v.AddArg(v0) 11743 v.AddArg(mem) 11744 return true 11745 } 11746 return false 11747 } 11748 func rewriteValueARM64_OpARM64MOVBstoreidx_0(v *Value) bool { 11749 // match: (MOVBstoreidx ptr (MOVDconst [c]) val mem) 11750 // cond: 11751 // result: (MOVBstore [c] ptr val mem) 11752 for { 11753 _ = v.Args[3] 11754 ptr := v.Args[0] 11755 v_1 := v.Args[1] 11756 if v_1.Op != OpARM64MOVDconst { 11757 break 11758 } 11759 c := v_1.AuxInt 11760 val := v.Args[2] 11761 mem := v.Args[3] 11762 v.reset(OpARM64MOVBstore) 11763 v.AuxInt = c 11764 v.AddArg(ptr) 11765 v.AddArg(val) 11766 v.AddArg(mem) 11767 return true 11768 } 11769 // match: (MOVBstoreidx (MOVDconst [c]) idx val mem) 11770 // cond: 11771 // result: (MOVBstore [c] idx val mem) 11772 for { 11773 _ = v.Args[3] 11774 v_0 := v.Args[0] 11775 if v_0.Op != OpARM64MOVDconst { 11776 break 11777 } 11778 c := v_0.AuxInt 11779 idx := v.Args[1] 11780 val := v.Args[2] 11781 mem := v.Args[3] 11782 v.reset(OpARM64MOVBstore) 11783 v.AuxInt = c 11784 v.AddArg(idx) 11785 v.AddArg(val) 11786 v.AddArg(mem) 11787 return true 11788 } 11789 // match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem) 11790 // cond: 11791 // result: (MOVBstorezeroidx ptr idx mem) 11792 for { 11793 _ = v.Args[3] 11794 ptr := v.Args[0] 11795 idx := v.Args[1] 11796 v_2 := v.Args[2] 11797 if v_2.Op != OpARM64MOVDconst { 11798 break 11799 } 11800 if v_2.AuxInt != 0 { 11801 break 11802 } 11803 mem := v.Args[3] 11804 v.reset(OpARM64MOVBstorezeroidx) 11805 v.AddArg(ptr) 11806 v.AddArg(idx) 11807 v.AddArg(mem) 11808 return true 11809 } 11810 // match: (MOVBstoreidx ptr idx (MOVBreg x) mem) 11811 // cond: 11812 // result: (MOVBstoreidx ptr idx x mem) 11813 for { 11814 _ = v.Args[3] 11815 ptr := v.Args[0] 11816 idx := v.Args[1] 11817 v_2 := v.Args[2] 11818 if v_2.Op != OpARM64MOVBreg { 11819 break 11820 } 11821 x := v_2.Args[0] 11822 mem := v.Args[3] 11823 v.reset(OpARM64MOVBstoreidx) 11824 v.AddArg(ptr) 11825 v.AddArg(idx) 11826 v.AddArg(x) 11827 v.AddArg(mem) 11828 return true 11829 } 11830 // match: (MOVBstoreidx ptr idx (MOVBUreg x) mem) 11831 // cond: 11832 // result: (MOVBstoreidx ptr idx x mem) 11833 for { 11834 _ = v.Args[3] 11835 ptr := v.Args[0] 11836 idx := v.Args[1] 11837 v_2 := v.Args[2] 11838 if v_2.Op != OpARM64MOVBUreg { 11839 break 11840 } 11841 x := v_2.Args[0] 11842 mem := v.Args[3] 11843 v.reset(OpARM64MOVBstoreidx) 11844 v.AddArg(ptr) 11845 v.AddArg(idx) 11846 v.AddArg(x) 11847 v.AddArg(mem) 11848 return true 11849 } 11850 // match: (MOVBstoreidx ptr idx (MOVHreg x) mem) 11851 // cond: 11852 // result: (MOVBstoreidx ptr idx x mem) 11853 for { 11854 _ = v.Args[3] 11855 ptr := v.Args[0] 11856 idx := v.Args[1] 11857 v_2 := v.Args[2] 11858 if v_2.Op != OpARM64MOVHreg { 11859 break 11860 } 11861 x := v_2.Args[0] 11862 mem := v.Args[3] 11863 v.reset(OpARM64MOVBstoreidx) 11864 v.AddArg(ptr) 11865 v.AddArg(idx) 11866 v.AddArg(x) 11867 v.AddArg(mem) 11868 return true 11869 } 11870 // match: (MOVBstoreidx ptr idx (MOVHUreg x) mem) 11871 // cond: 11872 // result: (MOVBstoreidx ptr idx x mem) 11873 for { 11874 _ = v.Args[3] 11875 ptr := v.Args[0] 11876 idx := v.Args[1] 11877 v_2 := v.Args[2] 11878 if v_2.Op != OpARM64MOVHUreg { 11879 break 11880 } 11881 x := v_2.Args[0] 11882 mem := v.Args[3] 11883 v.reset(OpARM64MOVBstoreidx) 11884 v.AddArg(ptr) 11885 v.AddArg(idx) 11886 v.AddArg(x) 11887 v.AddArg(mem) 11888 return true 11889 } 11890 // match: (MOVBstoreidx ptr idx (MOVWreg x) mem) 11891 // cond: 11892 // result: (MOVBstoreidx ptr idx x mem) 11893 for { 11894 _ = v.Args[3] 11895 ptr := v.Args[0] 11896 idx := v.Args[1] 11897 v_2 := v.Args[2] 11898 if v_2.Op != OpARM64MOVWreg { 11899 break 11900 } 11901 x := v_2.Args[0] 11902 mem := v.Args[3] 11903 v.reset(OpARM64MOVBstoreidx) 11904 v.AddArg(ptr) 11905 v.AddArg(idx) 11906 v.AddArg(x) 11907 v.AddArg(mem) 11908 return true 11909 } 11910 // match: (MOVBstoreidx ptr idx (MOVWUreg x) mem) 11911 // cond: 11912 // result: (MOVBstoreidx ptr idx x mem) 11913 for { 11914 _ = v.Args[3] 11915 ptr := v.Args[0] 11916 idx := v.Args[1] 11917 v_2 := v.Args[2] 11918 if v_2.Op != OpARM64MOVWUreg { 11919 break 11920 } 11921 x := v_2.Args[0] 11922 mem := v.Args[3] 11923 v.reset(OpARM64MOVBstoreidx) 11924 v.AddArg(ptr) 11925 v.AddArg(idx) 11926 v.AddArg(x) 11927 v.AddArg(mem) 11928 return true 11929 } 11930 // match: (MOVBstoreidx ptr (ADDconst [1] idx) (SRLconst [8] w) x:(MOVBstoreidx ptr idx w mem)) 11931 // cond: x.Uses == 1 && clobber(x) 11932 // result: (MOVHstoreidx ptr idx w mem) 11933 for { 11934 _ = v.Args[3] 11935 ptr := v.Args[0] 11936 v_1 := v.Args[1] 11937 if v_1.Op != OpARM64ADDconst { 11938 break 11939 } 11940 if v_1.AuxInt != 1 { 11941 break 11942 } 11943 idx := v_1.Args[0] 11944 v_2 := v.Args[2] 11945 if v_2.Op != OpARM64SRLconst { 11946 break 11947 } 11948 if v_2.AuxInt != 8 { 11949 break 11950 } 11951 w := v_2.Args[0] 11952 x := v.Args[3] 11953 if x.Op != OpARM64MOVBstoreidx { 11954 break 11955 } 11956 _ = x.Args[3] 11957 if ptr != x.Args[0] { 11958 break 11959 } 11960 if idx != x.Args[1] { 11961 break 11962 } 11963 if w != x.Args[2] { 11964 break 11965 } 11966 mem := x.Args[3] 11967 if !(x.Uses == 1 && clobber(x)) { 11968 break 11969 } 11970 v.reset(OpARM64MOVHstoreidx) 11971 v.AddArg(ptr) 11972 v.AddArg(idx) 11973 v.AddArg(w) 11974 v.AddArg(mem) 11975 return true 11976 } 11977 return false 11978 } 11979 func rewriteValueARM64_OpARM64MOVBstoreidx_10(v *Value) bool { 11980 b := v.Block 11981 _ = b 11982 // match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 11983 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 11984 // result: (MOVWstoreidx ptr idx (REVW <w.Type> w) mem) 11985 for { 11986 _ = v.Args[3] 11987 ptr := v.Args[0] 11988 v_1 := v.Args[1] 11989 if v_1.Op != OpARM64ADDconst { 11990 break 11991 } 11992 if v_1.AuxInt != 3 { 11993 break 11994 } 11995 idx := v_1.Args[0] 11996 w := v.Args[2] 11997 x0 := v.Args[3] 11998 if x0.Op != OpARM64MOVBstoreidx { 11999 break 12000 } 12001 _ = x0.Args[3] 12002 if ptr != x0.Args[0] { 12003 break 12004 } 12005 x0_1 := x0.Args[1] 12006 if x0_1.Op != OpARM64ADDconst { 12007 break 12008 } 12009 if x0_1.AuxInt != 2 { 12010 break 12011 } 12012 if idx != x0_1.Args[0] { 12013 break 12014 } 12015 x0_2 := x0.Args[2] 12016 if x0_2.Op != OpARM64UBFX { 12017 break 12018 } 12019 if x0_2.AuxInt != arm64BFAuxInt(8, 24) { 12020 break 12021 } 12022 if w != x0_2.Args[0] { 12023 break 12024 } 12025 x1 := x0.Args[3] 12026 if x1.Op != OpARM64MOVBstoreidx { 12027 break 12028 } 12029 _ = x1.Args[3] 12030 if ptr != x1.Args[0] { 12031 break 12032 } 12033 x1_1 := x1.Args[1] 12034 if x1_1.Op != OpARM64ADDconst { 12035 break 12036 } 12037 if x1_1.AuxInt != 1 { 12038 break 12039 } 12040 if idx != x1_1.Args[0] { 12041 break 12042 } 12043 x1_2 := x1.Args[2] 12044 if x1_2.Op != OpARM64UBFX { 12045 break 12046 } 12047 if x1_2.AuxInt != arm64BFAuxInt(16, 16) { 12048 break 12049 } 12050 if w != x1_2.Args[0] { 12051 break 12052 } 12053 x2 := x1.Args[3] 12054 if x2.Op != OpARM64MOVBstoreidx { 12055 break 12056 } 12057 _ = x2.Args[3] 12058 if ptr != x2.Args[0] { 12059 break 12060 } 12061 if idx != x2.Args[1] { 12062 break 12063 } 12064 x2_2 := x2.Args[2] 12065 if x2_2.Op != OpARM64UBFX { 12066 break 12067 } 12068 if x2_2.AuxInt != arm64BFAuxInt(24, 8) { 12069 break 12070 } 12071 if w != x2_2.Args[0] { 12072 break 12073 } 12074 mem := x2.Args[3] 12075 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 12076 break 12077 } 12078 v.reset(OpARM64MOVWstoreidx) 12079 v.AddArg(ptr) 12080 v.AddArg(idx) 12081 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 12082 v0.AddArg(w) 12083 v.AddArg(v0) 12084 v.AddArg(mem) 12085 return true 12086 } 12087 // match: (MOVBstoreidx ptr idx w x0:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr (ADDconst [3] idx) (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 12088 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 12089 // result: (MOVWstoreidx ptr idx w mem) 12090 for { 12091 _ = v.Args[3] 12092 ptr := v.Args[0] 12093 idx := v.Args[1] 12094 w := v.Args[2] 12095 x0 := v.Args[3] 12096 if x0.Op != OpARM64MOVBstoreidx { 12097 break 12098 } 12099 _ = x0.Args[3] 12100 if ptr != x0.Args[0] { 12101 break 12102 } 12103 x0_1 := x0.Args[1] 12104 if x0_1.Op != OpARM64ADDconst { 12105 break 12106 } 12107 if x0_1.AuxInt != 1 { 12108 break 12109 } 12110 if idx != x0_1.Args[0] { 12111 break 12112 } 12113 x0_2 := x0.Args[2] 12114 if x0_2.Op != OpARM64UBFX { 12115 break 12116 } 12117 if x0_2.AuxInt != arm64BFAuxInt(8, 24) { 12118 break 12119 } 12120 if w != x0_2.Args[0] { 12121 break 12122 } 12123 x1 := x0.Args[3] 12124 if x1.Op != OpARM64MOVBstoreidx { 12125 break 12126 } 12127 _ = x1.Args[3] 12128 if ptr != x1.Args[0] { 12129 break 12130 } 12131 x1_1 := x1.Args[1] 12132 if x1_1.Op != OpARM64ADDconst { 12133 break 12134 } 12135 if x1_1.AuxInt != 2 { 12136 break 12137 } 12138 if idx != x1_1.Args[0] { 12139 break 12140 } 12141 x1_2 := x1.Args[2] 12142 if x1_2.Op != OpARM64UBFX { 12143 break 12144 } 12145 if x1_2.AuxInt != arm64BFAuxInt(16, 16) { 12146 break 12147 } 12148 if w != x1_2.Args[0] { 12149 break 12150 } 12151 x2 := x1.Args[3] 12152 if x2.Op != OpARM64MOVBstoreidx { 12153 break 12154 } 12155 _ = x2.Args[3] 12156 if ptr != x2.Args[0] { 12157 break 12158 } 12159 x2_1 := x2.Args[1] 12160 if x2_1.Op != OpARM64ADDconst { 12161 break 12162 } 12163 if x2_1.AuxInt != 3 { 12164 break 12165 } 12166 if idx != x2_1.Args[0] { 12167 break 12168 } 12169 x2_2 := x2.Args[2] 12170 if x2_2.Op != OpARM64UBFX { 12171 break 12172 } 12173 if x2_2.AuxInt != arm64BFAuxInt(24, 8) { 12174 break 12175 } 12176 if w != x2_2.Args[0] { 12177 break 12178 } 12179 mem := x2.Args[3] 12180 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 12181 break 12182 } 12183 v.reset(OpARM64MOVWstoreidx) 12184 v.AddArg(ptr) 12185 v.AddArg(idx) 12186 v.AddArg(w) 12187 v.AddArg(mem) 12188 return true 12189 } 12190 // match: (MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 12191 // cond: x.Uses == 1 && clobber(x) 12192 // result: (MOVHstoreidx ptr idx (REV16W <w.Type> w) mem) 12193 for { 12194 _ = v.Args[3] 12195 ptr := v.Args[0] 12196 v_1 := v.Args[1] 12197 if v_1.Op != OpARM64ADDconst { 12198 break 12199 } 12200 if v_1.AuxInt != 1 { 12201 break 12202 } 12203 idx := v_1.Args[0] 12204 w := v.Args[2] 12205 x := v.Args[3] 12206 if x.Op != OpARM64MOVBstoreidx { 12207 break 12208 } 12209 _ = x.Args[3] 12210 if ptr != x.Args[0] { 12211 break 12212 } 12213 if idx != x.Args[1] { 12214 break 12215 } 12216 x_2 := x.Args[2] 12217 if x_2.Op != OpARM64UBFX { 12218 break 12219 } 12220 if x_2.AuxInt != arm64BFAuxInt(8, 8) { 12221 break 12222 } 12223 if w != x_2.Args[0] { 12224 break 12225 } 12226 mem := x.Args[3] 12227 if !(x.Uses == 1 && clobber(x)) { 12228 break 12229 } 12230 v.reset(OpARM64MOVHstoreidx) 12231 v.AddArg(ptr) 12232 v.AddArg(idx) 12233 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 12234 v0.AddArg(w) 12235 v.AddArg(v0) 12236 v.AddArg(mem) 12237 return true 12238 } 12239 // match: (MOVBstoreidx ptr idx w x:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 12240 // cond: x.Uses == 1 && clobber(x) 12241 // result: (MOVHstoreidx ptr idx w mem) 12242 for { 12243 _ = v.Args[3] 12244 ptr := v.Args[0] 12245 idx := v.Args[1] 12246 w := v.Args[2] 12247 x := v.Args[3] 12248 if x.Op != OpARM64MOVBstoreidx { 12249 break 12250 } 12251 _ = x.Args[3] 12252 if ptr != x.Args[0] { 12253 break 12254 } 12255 x_1 := x.Args[1] 12256 if x_1.Op != OpARM64ADDconst { 12257 break 12258 } 12259 if x_1.AuxInt != 1 { 12260 break 12261 } 12262 if idx != x_1.Args[0] { 12263 break 12264 } 12265 x_2 := x.Args[2] 12266 if x_2.Op != OpARM64UBFX { 12267 break 12268 } 12269 if x_2.AuxInt != arm64BFAuxInt(8, 8) { 12270 break 12271 } 12272 if w != x_2.Args[0] { 12273 break 12274 } 12275 mem := x.Args[3] 12276 if !(x.Uses == 1 && clobber(x)) { 12277 break 12278 } 12279 v.reset(OpARM64MOVHstoreidx) 12280 v.AddArg(ptr) 12281 v.AddArg(idx) 12282 v.AddArg(w) 12283 v.AddArg(mem) 12284 return true 12285 } 12286 return false 12287 } 12288 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 12289 b := v.Block 12290 _ = b 12291 config := b.Func.Config 12292 _ = config 12293 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 12294 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12295 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 12296 for { 12297 off1 := v.AuxInt 12298 sym := v.Aux 12299 _ = v.Args[1] 12300 v_0 := v.Args[0] 12301 if v_0.Op != OpARM64ADDconst { 12302 break 12303 } 12304 off2 := v_0.AuxInt 12305 ptr := v_0.Args[0] 12306 mem := v.Args[1] 12307 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12308 break 12309 } 12310 v.reset(OpARM64MOVBstorezero) 12311 v.AuxInt = off1 + off2 12312 v.Aux = sym 12313 v.AddArg(ptr) 12314 v.AddArg(mem) 12315 return true 12316 } 12317 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 12318 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12319 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 12320 for { 12321 off1 := v.AuxInt 12322 sym1 := v.Aux 12323 _ = v.Args[1] 12324 v_0 := v.Args[0] 12325 if v_0.Op != OpARM64MOVDaddr { 12326 break 12327 } 12328 off2 := v_0.AuxInt 12329 sym2 := v_0.Aux 12330 ptr := v_0.Args[0] 12331 mem := v.Args[1] 12332 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12333 break 12334 } 12335 v.reset(OpARM64MOVBstorezero) 12336 v.AuxInt = off1 + off2 12337 v.Aux = mergeSym(sym1, sym2) 12338 v.AddArg(ptr) 12339 v.AddArg(mem) 12340 return true 12341 } 12342 // match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem) 12343 // cond: off == 0 && sym == nil 12344 // result: (MOVBstorezeroidx ptr idx mem) 12345 for { 12346 off := v.AuxInt 12347 sym := v.Aux 12348 _ = v.Args[1] 12349 v_0 := v.Args[0] 12350 if v_0.Op != OpARM64ADD { 12351 break 12352 } 12353 _ = v_0.Args[1] 12354 ptr := v_0.Args[0] 12355 idx := v_0.Args[1] 12356 mem := v.Args[1] 12357 if !(off == 0 && sym == nil) { 12358 break 12359 } 12360 v.reset(OpARM64MOVBstorezeroidx) 12361 v.AddArg(ptr) 12362 v.AddArg(idx) 12363 v.AddArg(mem) 12364 return true 12365 } 12366 // match: (MOVBstorezero [i] {s} ptr0 x:(MOVBstorezero [j] {s} ptr1 mem)) 12367 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,1) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 12368 // result: (MOVHstorezero [min(i,j)] {s} ptr0 mem) 12369 for { 12370 i := v.AuxInt 12371 s := v.Aux 12372 _ = v.Args[1] 12373 ptr0 := v.Args[0] 12374 x := v.Args[1] 12375 if x.Op != OpARM64MOVBstorezero { 12376 break 12377 } 12378 j := x.AuxInt 12379 if x.Aux != s { 12380 break 12381 } 12382 _ = x.Args[1] 12383 ptr1 := x.Args[0] 12384 mem := x.Args[1] 12385 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 1) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 12386 break 12387 } 12388 v.reset(OpARM64MOVHstorezero) 12389 v.AuxInt = min(i, j) 12390 v.Aux = s 12391 v.AddArg(ptr0) 12392 v.AddArg(mem) 12393 return true 12394 } 12395 // match: (MOVBstorezero [1] {s} (ADD ptr0 idx0) x:(MOVBstorezeroidx ptr1 idx1 mem)) 12396 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 12397 // result: (MOVHstorezeroidx ptr1 idx1 mem) 12398 for { 12399 if v.AuxInt != 1 { 12400 break 12401 } 12402 s := v.Aux 12403 _ = v.Args[1] 12404 v_0 := v.Args[0] 12405 if v_0.Op != OpARM64ADD { 12406 break 12407 } 12408 _ = v_0.Args[1] 12409 ptr0 := v_0.Args[0] 12410 idx0 := v_0.Args[1] 12411 x := v.Args[1] 12412 if x.Op != OpARM64MOVBstorezeroidx { 12413 break 12414 } 12415 _ = x.Args[2] 12416 ptr1 := x.Args[0] 12417 idx1 := x.Args[1] 12418 mem := x.Args[2] 12419 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 12420 break 12421 } 12422 v.reset(OpARM64MOVHstorezeroidx) 12423 v.AddArg(ptr1) 12424 v.AddArg(idx1) 12425 v.AddArg(mem) 12426 return true 12427 } 12428 return false 12429 } 12430 func rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v *Value) bool { 12431 // match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem) 12432 // cond: 12433 // result: (MOVBstorezero [c] ptr mem) 12434 for { 12435 _ = v.Args[2] 12436 ptr := v.Args[0] 12437 v_1 := v.Args[1] 12438 if v_1.Op != OpARM64MOVDconst { 12439 break 12440 } 12441 c := v_1.AuxInt 12442 mem := v.Args[2] 12443 v.reset(OpARM64MOVBstorezero) 12444 v.AuxInt = c 12445 v.AddArg(ptr) 12446 v.AddArg(mem) 12447 return true 12448 } 12449 // match: (MOVBstorezeroidx (MOVDconst [c]) idx mem) 12450 // cond: 12451 // result: (MOVBstorezero [c] idx mem) 12452 for { 12453 _ = v.Args[2] 12454 v_0 := v.Args[0] 12455 if v_0.Op != OpARM64MOVDconst { 12456 break 12457 } 12458 c := v_0.AuxInt 12459 idx := v.Args[1] 12460 mem := v.Args[2] 12461 v.reset(OpARM64MOVBstorezero) 12462 v.AuxInt = c 12463 v.AddArg(idx) 12464 v.AddArg(mem) 12465 return true 12466 } 12467 // match: (MOVBstorezeroidx ptr (ADDconst [1] idx) x:(MOVBstorezeroidx ptr idx mem)) 12468 // cond: x.Uses == 1 && clobber(x) 12469 // result: (MOVHstorezeroidx ptr idx mem) 12470 for { 12471 _ = v.Args[2] 12472 ptr := v.Args[0] 12473 v_1 := v.Args[1] 12474 if v_1.Op != OpARM64ADDconst { 12475 break 12476 } 12477 if v_1.AuxInt != 1 { 12478 break 12479 } 12480 idx := v_1.Args[0] 12481 x := v.Args[2] 12482 if x.Op != OpARM64MOVBstorezeroidx { 12483 break 12484 } 12485 _ = x.Args[2] 12486 if ptr != x.Args[0] { 12487 break 12488 } 12489 if idx != x.Args[1] { 12490 break 12491 } 12492 mem := x.Args[2] 12493 if !(x.Uses == 1 && clobber(x)) { 12494 break 12495 } 12496 v.reset(OpARM64MOVHstorezeroidx) 12497 v.AddArg(ptr) 12498 v.AddArg(idx) 12499 v.AddArg(mem) 12500 return true 12501 } 12502 return false 12503 } 12504 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 12505 b := v.Block 12506 _ = b 12507 config := b.Func.Config 12508 _ = config 12509 // match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _)) 12510 // cond: 12511 // result: (FMOVDfpgp val) 12512 for { 12513 off := v.AuxInt 12514 sym := v.Aux 12515 _ = v.Args[1] 12516 ptr := v.Args[0] 12517 v_1 := v.Args[1] 12518 if v_1.Op != OpARM64FMOVDstore { 12519 break 12520 } 12521 if v_1.AuxInt != off { 12522 break 12523 } 12524 if v_1.Aux != sym { 12525 break 12526 } 12527 _ = v_1.Args[2] 12528 if ptr != v_1.Args[0] { 12529 break 12530 } 12531 val := v_1.Args[1] 12532 v.reset(OpARM64FMOVDfpgp) 12533 v.AddArg(val) 12534 return true 12535 } 12536 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 12537 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12538 // result: (MOVDload [off1+off2] {sym} ptr mem) 12539 for { 12540 off1 := v.AuxInt 12541 sym := v.Aux 12542 _ = v.Args[1] 12543 v_0 := v.Args[0] 12544 if v_0.Op != OpARM64ADDconst { 12545 break 12546 } 12547 off2 := v_0.AuxInt 12548 ptr := v_0.Args[0] 12549 mem := v.Args[1] 12550 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12551 break 12552 } 12553 v.reset(OpARM64MOVDload) 12554 v.AuxInt = off1 + off2 12555 v.Aux = sym 12556 v.AddArg(ptr) 12557 v.AddArg(mem) 12558 return true 12559 } 12560 // match: (MOVDload [off] {sym} (ADD ptr idx) mem) 12561 // cond: off == 0 && sym == nil 12562 // result: (MOVDloadidx ptr idx mem) 12563 for { 12564 off := v.AuxInt 12565 sym := v.Aux 12566 _ = v.Args[1] 12567 v_0 := v.Args[0] 12568 if v_0.Op != OpARM64ADD { 12569 break 12570 } 12571 _ = v_0.Args[1] 12572 ptr := v_0.Args[0] 12573 idx := v_0.Args[1] 12574 mem := v.Args[1] 12575 if !(off == 0 && sym == nil) { 12576 break 12577 } 12578 v.reset(OpARM64MOVDloadidx) 12579 v.AddArg(ptr) 12580 v.AddArg(idx) 12581 v.AddArg(mem) 12582 return true 12583 } 12584 // match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem) 12585 // cond: off == 0 && sym == nil 12586 // result: (MOVDloadidx8 ptr idx mem) 12587 for { 12588 off := v.AuxInt 12589 sym := v.Aux 12590 _ = v.Args[1] 12591 v_0 := v.Args[0] 12592 if v_0.Op != OpARM64ADDshiftLL { 12593 break 12594 } 12595 if v_0.AuxInt != 3 { 12596 break 12597 } 12598 _ = v_0.Args[1] 12599 ptr := v_0.Args[0] 12600 idx := v_0.Args[1] 12601 mem := v.Args[1] 12602 if !(off == 0 && sym == nil) { 12603 break 12604 } 12605 v.reset(OpARM64MOVDloadidx8) 12606 v.AddArg(ptr) 12607 v.AddArg(idx) 12608 v.AddArg(mem) 12609 return true 12610 } 12611 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 12612 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12613 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 12614 for { 12615 off1 := v.AuxInt 12616 sym1 := v.Aux 12617 _ = v.Args[1] 12618 v_0 := v.Args[0] 12619 if v_0.Op != OpARM64MOVDaddr { 12620 break 12621 } 12622 off2 := v_0.AuxInt 12623 sym2 := v_0.Aux 12624 ptr := v_0.Args[0] 12625 mem := v.Args[1] 12626 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12627 break 12628 } 12629 v.reset(OpARM64MOVDload) 12630 v.AuxInt = off1 + off2 12631 v.Aux = mergeSym(sym1, sym2) 12632 v.AddArg(ptr) 12633 v.AddArg(mem) 12634 return true 12635 } 12636 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 12637 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 12638 // result: (MOVDconst [0]) 12639 for { 12640 off := v.AuxInt 12641 sym := v.Aux 12642 _ = v.Args[1] 12643 ptr := v.Args[0] 12644 v_1 := v.Args[1] 12645 if v_1.Op != OpARM64MOVDstorezero { 12646 break 12647 } 12648 off2 := v_1.AuxInt 12649 sym2 := v_1.Aux 12650 _ = v_1.Args[1] 12651 ptr2 := v_1.Args[0] 12652 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 12653 break 12654 } 12655 v.reset(OpARM64MOVDconst) 12656 v.AuxInt = 0 12657 return true 12658 } 12659 // match: (MOVDload [off] {sym} (SB) _) 12660 // cond: symIsRO(sym) 12661 // result: (MOVDconst [int64(read64(sym, off, config.BigEndian))]) 12662 for { 12663 off := v.AuxInt 12664 sym := v.Aux 12665 _ = v.Args[1] 12666 v_0 := v.Args[0] 12667 if v_0.Op != OpSB { 12668 break 12669 } 12670 if !(symIsRO(sym)) { 12671 break 12672 } 12673 v.reset(OpARM64MOVDconst) 12674 v.AuxInt = int64(read64(sym, off, config.BigEndian)) 12675 return true 12676 } 12677 return false 12678 } 12679 func rewriteValueARM64_OpARM64MOVDloadidx_0(v *Value) bool { 12680 // match: (MOVDloadidx ptr (MOVDconst [c]) mem) 12681 // cond: 12682 // result: (MOVDload [c] ptr mem) 12683 for { 12684 _ = v.Args[2] 12685 ptr := v.Args[0] 12686 v_1 := v.Args[1] 12687 if v_1.Op != OpARM64MOVDconst { 12688 break 12689 } 12690 c := v_1.AuxInt 12691 mem := v.Args[2] 12692 v.reset(OpARM64MOVDload) 12693 v.AuxInt = c 12694 v.AddArg(ptr) 12695 v.AddArg(mem) 12696 return true 12697 } 12698 // match: (MOVDloadidx (MOVDconst [c]) ptr mem) 12699 // cond: 12700 // result: (MOVDload [c] ptr mem) 12701 for { 12702 _ = v.Args[2] 12703 v_0 := v.Args[0] 12704 if v_0.Op != OpARM64MOVDconst { 12705 break 12706 } 12707 c := v_0.AuxInt 12708 ptr := v.Args[1] 12709 mem := v.Args[2] 12710 v.reset(OpARM64MOVDload) 12711 v.AuxInt = c 12712 v.AddArg(ptr) 12713 v.AddArg(mem) 12714 return true 12715 } 12716 // match: (MOVDloadidx ptr (SLLconst [3] idx) mem) 12717 // cond: 12718 // result: (MOVDloadidx8 ptr idx mem) 12719 for { 12720 _ = v.Args[2] 12721 ptr := v.Args[0] 12722 v_1 := v.Args[1] 12723 if v_1.Op != OpARM64SLLconst { 12724 break 12725 } 12726 if v_1.AuxInt != 3 { 12727 break 12728 } 12729 idx := v_1.Args[0] 12730 mem := v.Args[2] 12731 v.reset(OpARM64MOVDloadidx8) 12732 v.AddArg(ptr) 12733 v.AddArg(idx) 12734 v.AddArg(mem) 12735 return true 12736 } 12737 // match: (MOVDloadidx (SLLconst [3] idx) ptr mem) 12738 // cond: 12739 // result: (MOVDloadidx8 ptr idx mem) 12740 for { 12741 _ = v.Args[2] 12742 v_0 := v.Args[0] 12743 if v_0.Op != OpARM64SLLconst { 12744 break 12745 } 12746 if v_0.AuxInt != 3 { 12747 break 12748 } 12749 idx := v_0.Args[0] 12750 ptr := v.Args[1] 12751 mem := v.Args[2] 12752 v.reset(OpARM64MOVDloadidx8) 12753 v.AddArg(ptr) 12754 v.AddArg(idx) 12755 v.AddArg(mem) 12756 return true 12757 } 12758 // match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _)) 12759 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 12760 // result: (MOVDconst [0]) 12761 for { 12762 _ = v.Args[2] 12763 ptr := v.Args[0] 12764 idx := v.Args[1] 12765 v_2 := v.Args[2] 12766 if v_2.Op != OpARM64MOVDstorezeroidx { 12767 break 12768 } 12769 _ = v_2.Args[2] 12770 ptr2 := v_2.Args[0] 12771 idx2 := v_2.Args[1] 12772 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 12773 break 12774 } 12775 v.reset(OpARM64MOVDconst) 12776 v.AuxInt = 0 12777 return true 12778 } 12779 return false 12780 } 12781 func rewriteValueARM64_OpARM64MOVDloadidx8_0(v *Value) bool { 12782 // match: (MOVDloadidx8 ptr (MOVDconst [c]) mem) 12783 // cond: 12784 // result: (MOVDload [c<<3] ptr mem) 12785 for { 12786 _ = v.Args[2] 12787 ptr := v.Args[0] 12788 v_1 := v.Args[1] 12789 if v_1.Op != OpARM64MOVDconst { 12790 break 12791 } 12792 c := v_1.AuxInt 12793 mem := v.Args[2] 12794 v.reset(OpARM64MOVDload) 12795 v.AuxInt = c << 3 12796 v.AddArg(ptr) 12797 v.AddArg(mem) 12798 return true 12799 } 12800 // match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _)) 12801 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 12802 // result: (MOVDconst [0]) 12803 for { 12804 _ = v.Args[2] 12805 ptr := v.Args[0] 12806 idx := v.Args[1] 12807 v_2 := v.Args[2] 12808 if v_2.Op != OpARM64MOVDstorezeroidx8 { 12809 break 12810 } 12811 _ = v_2.Args[2] 12812 ptr2 := v_2.Args[0] 12813 idx2 := v_2.Args[1] 12814 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 12815 break 12816 } 12817 v.reset(OpARM64MOVDconst) 12818 v.AuxInt = 0 12819 return true 12820 } 12821 return false 12822 } 12823 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 12824 // match: (MOVDreg x) 12825 // cond: x.Uses == 1 12826 // result: (MOVDnop x) 12827 for { 12828 x := v.Args[0] 12829 if !(x.Uses == 1) { 12830 break 12831 } 12832 v.reset(OpARM64MOVDnop) 12833 v.AddArg(x) 12834 return true 12835 } 12836 // match: (MOVDreg (MOVDconst [c])) 12837 // cond: 12838 // result: (MOVDconst [c]) 12839 for { 12840 v_0 := v.Args[0] 12841 if v_0.Op != OpARM64MOVDconst { 12842 break 12843 } 12844 c := v_0.AuxInt 12845 v.reset(OpARM64MOVDconst) 12846 v.AuxInt = c 12847 return true 12848 } 12849 return false 12850 } 12851 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 12852 b := v.Block 12853 _ = b 12854 config := b.Func.Config 12855 _ = config 12856 // match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem) 12857 // cond: 12858 // result: (FMOVDstore [off] {sym} ptr val mem) 12859 for { 12860 off := v.AuxInt 12861 sym := v.Aux 12862 _ = v.Args[2] 12863 ptr := v.Args[0] 12864 v_1 := v.Args[1] 12865 if v_1.Op != OpARM64FMOVDfpgp { 12866 break 12867 } 12868 val := v_1.Args[0] 12869 mem := v.Args[2] 12870 v.reset(OpARM64FMOVDstore) 12871 v.AuxInt = off 12872 v.Aux = sym 12873 v.AddArg(ptr) 12874 v.AddArg(val) 12875 v.AddArg(mem) 12876 return true 12877 } 12878 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 12879 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12880 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 12881 for { 12882 off1 := v.AuxInt 12883 sym := v.Aux 12884 _ = v.Args[2] 12885 v_0 := v.Args[0] 12886 if v_0.Op != OpARM64ADDconst { 12887 break 12888 } 12889 off2 := v_0.AuxInt 12890 ptr := v_0.Args[0] 12891 val := v.Args[1] 12892 mem := v.Args[2] 12893 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12894 break 12895 } 12896 v.reset(OpARM64MOVDstore) 12897 v.AuxInt = off1 + off2 12898 v.Aux = sym 12899 v.AddArg(ptr) 12900 v.AddArg(val) 12901 v.AddArg(mem) 12902 return true 12903 } 12904 // match: (MOVDstore [off] {sym} (ADD ptr idx) val mem) 12905 // cond: off == 0 && sym == nil 12906 // result: (MOVDstoreidx ptr idx val mem) 12907 for { 12908 off := v.AuxInt 12909 sym := v.Aux 12910 _ = v.Args[2] 12911 v_0 := v.Args[0] 12912 if v_0.Op != OpARM64ADD { 12913 break 12914 } 12915 _ = v_0.Args[1] 12916 ptr := v_0.Args[0] 12917 idx := v_0.Args[1] 12918 val := v.Args[1] 12919 mem := v.Args[2] 12920 if !(off == 0 && sym == nil) { 12921 break 12922 } 12923 v.reset(OpARM64MOVDstoreidx) 12924 v.AddArg(ptr) 12925 v.AddArg(idx) 12926 v.AddArg(val) 12927 v.AddArg(mem) 12928 return true 12929 } 12930 // match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem) 12931 // cond: off == 0 && sym == nil 12932 // result: (MOVDstoreidx8 ptr idx val mem) 12933 for { 12934 off := v.AuxInt 12935 sym := v.Aux 12936 _ = v.Args[2] 12937 v_0 := v.Args[0] 12938 if v_0.Op != OpARM64ADDshiftLL { 12939 break 12940 } 12941 if v_0.AuxInt != 3 { 12942 break 12943 } 12944 _ = v_0.Args[1] 12945 ptr := v_0.Args[0] 12946 idx := v_0.Args[1] 12947 val := v.Args[1] 12948 mem := v.Args[2] 12949 if !(off == 0 && sym == nil) { 12950 break 12951 } 12952 v.reset(OpARM64MOVDstoreidx8) 12953 v.AddArg(ptr) 12954 v.AddArg(idx) 12955 v.AddArg(val) 12956 v.AddArg(mem) 12957 return true 12958 } 12959 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 12960 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12961 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 12962 for { 12963 off1 := v.AuxInt 12964 sym1 := v.Aux 12965 _ = v.Args[2] 12966 v_0 := v.Args[0] 12967 if v_0.Op != OpARM64MOVDaddr { 12968 break 12969 } 12970 off2 := v_0.AuxInt 12971 sym2 := v_0.Aux 12972 ptr := v_0.Args[0] 12973 val := v.Args[1] 12974 mem := v.Args[2] 12975 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12976 break 12977 } 12978 v.reset(OpARM64MOVDstore) 12979 v.AuxInt = off1 + off2 12980 v.Aux = mergeSym(sym1, sym2) 12981 v.AddArg(ptr) 12982 v.AddArg(val) 12983 v.AddArg(mem) 12984 return true 12985 } 12986 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 12987 // cond: 12988 // result: (MOVDstorezero [off] {sym} ptr mem) 12989 for { 12990 off := v.AuxInt 12991 sym := v.Aux 12992 _ = v.Args[2] 12993 ptr := v.Args[0] 12994 v_1 := v.Args[1] 12995 if v_1.Op != OpARM64MOVDconst { 12996 break 12997 } 12998 if v_1.AuxInt != 0 { 12999 break 13000 } 13001 mem := v.Args[2] 13002 v.reset(OpARM64MOVDstorezero) 13003 v.AuxInt = off 13004 v.Aux = sym 13005 v.AddArg(ptr) 13006 v.AddArg(mem) 13007 return true 13008 } 13009 return false 13010 } 13011 func rewriteValueARM64_OpARM64MOVDstoreidx_0(v *Value) bool { 13012 // match: (MOVDstoreidx ptr (MOVDconst [c]) val mem) 13013 // cond: 13014 // result: (MOVDstore [c] ptr val mem) 13015 for { 13016 _ = v.Args[3] 13017 ptr := v.Args[0] 13018 v_1 := v.Args[1] 13019 if v_1.Op != OpARM64MOVDconst { 13020 break 13021 } 13022 c := v_1.AuxInt 13023 val := v.Args[2] 13024 mem := v.Args[3] 13025 v.reset(OpARM64MOVDstore) 13026 v.AuxInt = c 13027 v.AddArg(ptr) 13028 v.AddArg(val) 13029 v.AddArg(mem) 13030 return true 13031 } 13032 // match: (MOVDstoreidx (MOVDconst [c]) idx val mem) 13033 // cond: 13034 // result: (MOVDstore [c] idx val mem) 13035 for { 13036 _ = v.Args[3] 13037 v_0 := v.Args[0] 13038 if v_0.Op != OpARM64MOVDconst { 13039 break 13040 } 13041 c := v_0.AuxInt 13042 idx := v.Args[1] 13043 val := v.Args[2] 13044 mem := v.Args[3] 13045 v.reset(OpARM64MOVDstore) 13046 v.AuxInt = c 13047 v.AddArg(idx) 13048 v.AddArg(val) 13049 v.AddArg(mem) 13050 return true 13051 } 13052 // match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem) 13053 // cond: 13054 // result: (MOVDstoreidx8 ptr idx val mem) 13055 for { 13056 _ = v.Args[3] 13057 ptr := v.Args[0] 13058 v_1 := v.Args[1] 13059 if v_1.Op != OpARM64SLLconst { 13060 break 13061 } 13062 if v_1.AuxInt != 3 { 13063 break 13064 } 13065 idx := v_1.Args[0] 13066 val := v.Args[2] 13067 mem := v.Args[3] 13068 v.reset(OpARM64MOVDstoreidx8) 13069 v.AddArg(ptr) 13070 v.AddArg(idx) 13071 v.AddArg(val) 13072 v.AddArg(mem) 13073 return true 13074 } 13075 // match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem) 13076 // cond: 13077 // result: (MOVDstoreidx8 ptr idx val mem) 13078 for { 13079 _ = v.Args[3] 13080 v_0 := v.Args[0] 13081 if v_0.Op != OpARM64SLLconst { 13082 break 13083 } 13084 if v_0.AuxInt != 3 { 13085 break 13086 } 13087 idx := v_0.Args[0] 13088 ptr := v.Args[1] 13089 val := v.Args[2] 13090 mem := v.Args[3] 13091 v.reset(OpARM64MOVDstoreidx8) 13092 v.AddArg(ptr) 13093 v.AddArg(idx) 13094 v.AddArg(val) 13095 v.AddArg(mem) 13096 return true 13097 } 13098 // match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem) 13099 // cond: 13100 // result: (MOVDstorezeroidx ptr idx mem) 13101 for { 13102 _ = v.Args[3] 13103 ptr := v.Args[0] 13104 idx := v.Args[1] 13105 v_2 := v.Args[2] 13106 if v_2.Op != OpARM64MOVDconst { 13107 break 13108 } 13109 if v_2.AuxInt != 0 { 13110 break 13111 } 13112 mem := v.Args[3] 13113 v.reset(OpARM64MOVDstorezeroidx) 13114 v.AddArg(ptr) 13115 v.AddArg(idx) 13116 v.AddArg(mem) 13117 return true 13118 } 13119 return false 13120 } 13121 func rewriteValueARM64_OpARM64MOVDstoreidx8_0(v *Value) bool { 13122 // match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem) 13123 // cond: 13124 // result: (MOVDstore [c<<3] ptr val mem) 13125 for { 13126 _ = v.Args[3] 13127 ptr := v.Args[0] 13128 v_1 := v.Args[1] 13129 if v_1.Op != OpARM64MOVDconst { 13130 break 13131 } 13132 c := v_1.AuxInt 13133 val := v.Args[2] 13134 mem := v.Args[3] 13135 v.reset(OpARM64MOVDstore) 13136 v.AuxInt = c << 3 13137 v.AddArg(ptr) 13138 v.AddArg(val) 13139 v.AddArg(mem) 13140 return true 13141 } 13142 // match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem) 13143 // cond: 13144 // result: (MOVDstorezeroidx8 ptr idx mem) 13145 for { 13146 _ = v.Args[3] 13147 ptr := v.Args[0] 13148 idx := v.Args[1] 13149 v_2 := v.Args[2] 13150 if v_2.Op != OpARM64MOVDconst { 13151 break 13152 } 13153 if v_2.AuxInt != 0 { 13154 break 13155 } 13156 mem := v.Args[3] 13157 v.reset(OpARM64MOVDstorezeroidx8) 13158 v.AddArg(ptr) 13159 v.AddArg(idx) 13160 v.AddArg(mem) 13161 return true 13162 } 13163 return false 13164 } 13165 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 13166 b := v.Block 13167 _ = b 13168 config := b.Func.Config 13169 _ = config 13170 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 13171 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13172 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 13173 for { 13174 off1 := v.AuxInt 13175 sym := v.Aux 13176 _ = v.Args[1] 13177 v_0 := v.Args[0] 13178 if v_0.Op != OpARM64ADDconst { 13179 break 13180 } 13181 off2 := v_0.AuxInt 13182 ptr := v_0.Args[0] 13183 mem := v.Args[1] 13184 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13185 break 13186 } 13187 v.reset(OpARM64MOVDstorezero) 13188 v.AuxInt = off1 + off2 13189 v.Aux = sym 13190 v.AddArg(ptr) 13191 v.AddArg(mem) 13192 return true 13193 } 13194 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 13195 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13196 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 13197 for { 13198 off1 := v.AuxInt 13199 sym1 := v.Aux 13200 _ = v.Args[1] 13201 v_0 := v.Args[0] 13202 if v_0.Op != OpARM64MOVDaddr { 13203 break 13204 } 13205 off2 := v_0.AuxInt 13206 sym2 := v_0.Aux 13207 ptr := v_0.Args[0] 13208 mem := v.Args[1] 13209 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13210 break 13211 } 13212 v.reset(OpARM64MOVDstorezero) 13213 v.AuxInt = off1 + off2 13214 v.Aux = mergeSym(sym1, sym2) 13215 v.AddArg(ptr) 13216 v.AddArg(mem) 13217 return true 13218 } 13219 // match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem) 13220 // cond: off == 0 && sym == nil 13221 // result: (MOVDstorezeroidx ptr idx mem) 13222 for { 13223 off := v.AuxInt 13224 sym := v.Aux 13225 _ = v.Args[1] 13226 v_0 := v.Args[0] 13227 if v_0.Op != OpARM64ADD { 13228 break 13229 } 13230 _ = v_0.Args[1] 13231 ptr := v_0.Args[0] 13232 idx := v_0.Args[1] 13233 mem := v.Args[1] 13234 if !(off == 0 && sym == nil) { 13235 break 13236 } 13237 v.reset(OpARM64MOVDstorezeroidx) 13238 v.AddArg(ptr) 13239 v.AddArg(idx) 13240 v.AddArg(mem) 13241 return true 13242 } 13243 // match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem) 13244 // cond: off == 0 && sym == nil 13245 // result: (MOVDstorezeroidx8 ptr idx mem) 13246 for { 13247 off := v.AuxInt 13248 sym := v.Aux 13249 _ = v.Args[1] 13250 v_0 := v.Args[0] 13251 if v_0.Op != OpARM64ADDshiftLL { 13252 break 13253 } 13254 if v_0.AuxInt != 3 { 13255 break 13256 } 13257 _ = v_0.Args[1] 13258 ptr := v_0.Args[0] 13259 idx := v_0.Args[1] 13260 mem := v.Args[1] 13261 if !(off == 0 && sym == nil) { 13262 break 13263 } 13264 v.reset(OpARM64MOVDstorezeroidx8) 13265 v.AddArg(ptr) 13266 v.AddArg(idx) 13267 v.AddArg(mem) 13268 return true 13269 } 13270 // match: (MOVDstorezero [i] {s} ptr0 x:(MOVDstorezero [j] {s} ptr1 mem)) 13271 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,8) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 13272 // result: (MOVQstorezero [min(i,j)] {s} ptr0 mem) 13273 for { 13274 i := v.AuxInt 13275 s := v.Aux 13276 _ = v.Args[1] 13277 ptr0 := v.Args[0] 13278 x := v.Args[1] 13279 if x.Op != OpARM64MOVDstorezero { 13280 break 13281 } 13282 j := x.AuxInt 13283 if x.Aux != s { 13284 break 13285 } 13286 _ = x.Args[1] 13287 ptr1 := x.Args[0] 13288 mem := x.Args[1] 13289 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 8) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 13290 break 13291 } 13292 v.reset(OpARM64MOVQstorezero) 13293 v.AuxInt = min(i, j) 13294 v.Aux = s 13295 v.AddArg(ptr0) 13296 v.AddArg(mem) 13297 return true 13298 } 13299 // match: (MOVDstorezero [8] {s} p0:(ADD ptr0 idx0) x:(MOVDstorezeroidx ptr1 idx1 mem)) 13300 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 13301 // result: (MOVQstorezero [0] {s} p0 mem) 13302 for { 13303 if v.AuxInt != 8 { 13304 break 13305 } 13306 s := v.Aux 13307 _ = v.Args[1] 13308 p0 := v.Args[0] 13309 if p0.Op != OpARM64ADD { 13310 break 13311 } 13312 _ = p0.Args[1] 13313 ptr0 := p0.Args[0] 13314 idx0 := p0.Args[1] 13315 x := v.Args[1] 13316 if x.Op != OpARM64MOVDstorezeroidx { 13317 break 13318 } 13319 _ = x.Args[2] 13320 ptr1 := x.Args[0] 13321 idx1 := x.Args[1] 13322 mem := x.Args[2] 13323 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 13324 break 13325 } 13326 v.reset(OpARM64MOVQstorezero) 13327 v.AuxInt = 0 13328 v.Aux = s 13329 v.AddArg(p0) 13330 v.AddArg(mem) 13331 return true 13332 } 13333 // match: (MOVDstorezero [8] {s} p0:(ADDshiftLL [3] ptr0 idx0) x:(MOVDstorezeroidx8 ptr1 idx1 mem)) 13334 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 13335 // result: (MOVQstorezero [0] {s} p0 mem) 13336 for { 13337 if v.AuxInt != 8 { 13338 break 13339 } 13340 s := v.Aux 13341 _ = v.Args[1] 13342 p0 := v.Args[0] 13343 if p0.Op != OpARM64ADDshiftLL { 13344 break 13345 } 13346 if p0.AuxInt != 3 { 13347 break 13348 } 13349 _ = p0.Args[1] 13350 ptr0 := p0.Args[0] 13351 idx0 := p0.Args[1] 13352 x := v.Args[1] 13353 if x.Op != OpARM64MOVDstorezeroidx8 { 13354 break 13355 } 13356 _ = x.Args[2] 13357 ptr1 := x.Args[0] 13358 idx1 := x.Args[1] 13359 mem := x.Args[2] 13360 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 13361 break 13362 } 13363 v.reset(OpARM64MOVQstorezero) 13364 v.AuxInt = 0 13365 v.Aux = s 13366 v.AddArg(p0) 13367 v.AddArg(mem) 13368 return true 13369 } 13370 return false 13371 } 13372 func rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v *Value) bool { 13373 // match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem) 13374 // cond: 13375 // result: (MOVDstorezero [c] ptr mem) 13376 for { 13377 _ = v.Args[2] 13378 ptr := v.Args[0] 13379 v_1 := v.Args[1] 13380 if v_1.Op != OpARM64MOVDconst { 13381 break 13382 } 13383 c := v_1.AuxInt 13384 mem := v.Args[2] 13385 v.reset(OpARM64MOVDstorezero) 13386 v.AuxInt = c 13387 v.AddArg(ptr) 13388 v.AddArg(mem) 13389 return true 13390 } 13391 // match: (MOVDstorezeroidx (MOVDconst [c]) idx mem) 13392 // cond: 13393 // result: (MOVDstorezero [c] idx mem) 13394 for { 13395 _ = v.Args[2] 13396 v_0 := v.Args[0] 13397 if v_0.Op != OpARM64MOVDconst { 13398 break 13399 } 13400 c := v_0.AuxInt 13401 idx := v.Args[1] 13402 mem := v.Args[2] 13403 v.reset(OpARM64MOVDstorezero) 13404 v.AuxInt = c 13405 v.AddArg(idx) 13406 v.AddArg(mem) 13407 return true 13408 } 13409 // match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem) 13410 // cond: 13411 // result: (MOVDstorezeroidx8 ptr idx mem) 13412 for { 13413 _ = v.Args[2] 13414 ptr := v.Args[0] 13415 v_1 := v.Args[1] 13416 if v_1.Op != OpARM64SLLconst { 13417 break 13418 } 13419 if v_1.AuxInt != 3 { 13420 break 13421 } 13422 idx := v_1.Args[0] 13423 mem := v.Args[2] 13424 v.reset(OpARM64MOVDstorezeroidx8) 13425 v.AddArg(ptr) 13426 v.AddArg(idx) 13427 v.AddArg(mem) 13428 return true 13429 } 13430 // match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem) 13431 // cond: 13432 // result: (MOVDstorezeroidx8 ptr idx mem) 13433 for { 13434 _ = v.Args[2] 13435 v_0 := v.Args[0] 13436 if v_0.Op != OpARM64SLLconst { 13437 break 13438 } 13439 if v_0.AuxInt != 3 { 13440 break 13441 } 13442 idx := v_0.Args[0] 13443 ptr := v.Args[1] 13444 mem := v.Args[2] 13445 v.reset(OpARM64MOVDstorezeroidx8) 13446 v.AddArg(ptr) 13447 v.AddArg(idx) 13448 v.AddArg(mem) 13449 return true 13450 } 13451 return false 13452 } 13453 func rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v *Value) bool { 13454 // match: (MOVDstorezeroidx8 ptr (MOVDconst [c]) mem) 13455 // cond: 13456 // result: (MOVDstorezero [c<<3] ptr mem) 13457 for { 13458 _ = v.Args[2] 13459 ptr := v.Args[0] 13460 v_1 := v.Args[1] 13461 if v_1.Op != OpARM64MOVDconst { 13462 break 13463 } 13464 c := v_1.AuxInt 13465 mem := v.Args[2] 13466 v.reset(OpARM64MOVDstorezero) 13467 v.AuxInt = c << 3 13468 v.AddArg(ptr) 13469 v.AddArg(mem) 13470 return true 13471 } 13472 return false 13473 } 13474 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 13475 b := v.Block 13476 _ = b 13477 config := b.Func.Config 13478 _ = config 13479 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 13480 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13481 // result: (MOVHUload [off1+off2] {sym} ptr mem) 13482 for { 13483 off1 := v.AuxInt 13484 sym := v.Aux 13485 _ = v.Args[1] 13486 v_0 := v.Args[0] 13487 if v_0.Op != OpARM64ADDconst { 13488 break 13489 } 13490 off2 := v_0.AuxInt 13491 ptr := v_0.Args[0] 13492 mem := v.Args[1] 13493 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13494 break 13495 } 13496 v.reset(OpARM64MOVHUload) 13497 v.AuxInt = off1 + off2 13498 v.Aux = sym 13499 v.AddArg(ptr) 13500 v.AddArg(mem) 13501 return true 13502 } 13503 // match: (MOVHUload [off] {sym} (ADD ptr idx) mem) 13504 // cond: off == 0 && sym == nil 13505 // result: (MOVHUloadidx ptr idx mem) 13506 for { 13507 off := v.AuxInt 13508 sym := v.Aux 13509 _ = v.Args[1] 13510 v_0 := v.Args[0] 13511 if v_0.Op != OpARM64ADD { 13512 break 13513 } 13514 _ = v_0.Args[1] 13515 ptr := v_0.Args[0] 13516 idx := v_0.Args[1] 13517 mem := v.Args[1] 13518 if !(off == 0 && sym == nil) { 13519 break 13520 } 13521 v.reset(OpARM64MOVHUloadidx) 13522 v.AddArg(ptr) 13523 v.AddArg(idx) 13524 v.AddArg(mem) 13525 return true 13526 } 13527 // match: (MOVHUload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 13528 // cond: off == 0 && sym == nil 13529 // result: (MOVHUloadidx2 ptr idx mem) 13530 for { 13531 off := v.AuxInt 13532 sym := v.Aux 13533 _ = v.Args[1] 13534 v_0 := v.Args[0] 13535 if v_0.Op != OpARM64ADDshiftLL { 13536 break 13537 } 13538 if v_0.AuxInt != 1 { 13539 break 13540 } 13541 _ = v_0.Args[1] 13542 ptr := v_0.Args[0] 13543 idx := v_0.Args[1] 13544 mem := v.Args[1] 13545 if !(off == 0 && sym == nil) { 13546 break 13547 } 13548 v.reset(OpARM64MOVHUloadidx2) 13549 v.AddArg(ptr) 13550 v.AddArg(idx) 13551 v.AddArg(mem) 13552 return true 13553 } 13554 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 13555 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13556 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 13557 for { 13558 off1 := v.AuxInt 13559 sym1 := v.Aux 13560 _ = v.Args[1] 13561 v_0 := v.Args[0] 13562 if v_0.Op != OpARM64MOVDaddr { 13563 break 13564 } 13565 off2 := v_0.AuxInt 13566 sym2 := v_0.Aux 13567 ptr := v_0.Args[0] 13568 mem := v.Args[1] 13569 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13570 break 13571 } 13572 v.reset(OpARM64MOVHUload) 13573 v.AuxInt = off1 + off2 13574 v.Aux = mergeSym(sym1, sym2) 13575 v.AddArg(ptr) 13576 v.AddArg(mem) 13577 return true 13578 } 13579 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 13580 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 13581 // result: (MOVDconst [0]) 13582 for { 13583 off := v.AuxInt 13584 sym := v.Aux 13585 _ = v.Args[1] 13586 ptr := v.Args[0] 13587 v_1 := v.Args[1] 13588 if v_1.Op != OpARM64MOVHstorezero { 13589 break 13590 } 13591 off2 := v_1.AuxInt 13592 sym2 := v_1.Aux 13593 _ = v_1.Args[1] 13594 ptr2 := v_1.Args[0] 13595 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 13596 break 13597 } 13598 v.reset(OpARM64MOVDconst) 13599 v.AuxInt = 0 13600 return true 13601 } 13602 // match: (MOVHUload [off] {sym} (SB) _) 13603 // cond: symIsRO(sym) 13604 // result: (MOVDconst [int64(read16(sym, off, config.BigEndian))]) 13605 for { 13606 off := v.AuxInt 13607 sym := v.Aux 13608 _ = v.Args[1] 13609 v_0 := v.Args[0] 13610 if v_0.Op != OpSB { 13611 break 13612 } 13613 if !(symIsRO(sym)) { 13614 break 13615 } 13616 v.reset(OpARM64MOVDconst) 13617 v.AuxInt = int64(read16(sym, off, config.BigEndian)) 13618 return true 13619 } 13620 return false 13621 } 13622 func rewriteValueARM64_OpARM64MOVHUloadidx_0(v *Value) bool { 13623 // match: (MOVHUloadidx ptr (MOVDconst [c]) mem) 13624 // cond: 13625 // result: (MOVHUload [c] ptr mem) 13626 for { 13627 _ = v.Args[2] 13628 ptr := v.Args[0] 13629 v_1 := v.Args[1] 13630 if v_1.Op != OpARM64MOVDconst { 13631 break 13632 } 13633 c := v_1.AuxInt 13634 mem := v.Args[2] 13635 v.reset(OpARM64MOVHUload) 13636 v.AuxInt = c 13637 v.AddArg(ptr) 13638 v.AddArg(mem) 13639 return true 13640 } 13641 // match: (MOVHUloadidx (MOVDconst [c]) ptr mem) 13642 // cond: 13643 // result: (MOVHUload [c] ptr mem) 13644 for { 13645 _ = v.Args[2] 13646 v_0 := v.Args[0] 13647 if v_0.Op != OpARM64MOVDconst { 13648 break 13649 } 13650 c := v_0.AuxInt 13651 ptr := v.Args[1] 13652 mem := v.Args[2] 13653 v.reset(OpARM64MOVHUload) 13654 v.AuxInt = c 13655 v.AddArg(ptr) 13656 v.AddArg(mem) 13657 return true 13658 } 13659 // match: (MOVHUloadidx ptr (SLLconst [1] idx) mem) 13660 // cond: 13661 // result: (MOVHUloadidx2 ptr idx mem) 13662 for { 13663 _ = v.Args[2] 13664 ptr := v.Args[0] 13665 v_1 := v.Args[1] 13666 if v_1.Op != OpARM64SLLconst { 13667 break 13668 } 13669 if v_1.AuxInt != 1 { 13670 break 13671 } 13672 idx := v_1.Args[0] 13673 mem := v.Args[2] 13674 v.reset(OpARM64MOVHUloadidx2) 13675 v.AddArg(ptr) 13676 v.AddArg(idx) 13677 v.AddArg(mem) 13678 return true 13679 } 13680 // match: (MOVHUloadidx ptr (ADD idx idx) mem) 13681 // cond: 13682 // result: (MOVHUloadidx2 ptr idx mem) 13683 for { 13684 _ = v.Args[2] 13685 ptr := v.Args[0] 13686 v_1 := v.Args[1] 13687 if v_1.Op != OpARM64ADD { 13688 break 13689 } 13690 _ = v_1.Args[1] 13691 idx := v_1.Args[0] 13692 if idx != v_1.Args[1] { 13693 break 13694 } 13695 mem := v.Args[2] 13696 v.reset(OpARM64MOVHUloadidx2) 13697 v.AddArg(ptr) 13698 v.AddArg(idx) 13699 v.AddArg(mem) 13700 return true 13701 } 13702 // match: (MOVHUloadidx (ADD idx idx) ptr mem) 13703 // cond: 13704 // result: (MOVHUloadidx2 ptr idx mem) 13705 for { 13706 _ = v.Args[2] 13707 v_0 := v.Args[0] 13708 if v_0.Op != OpARM64ADD { 13709 break 13710 } 13711 _ = v_0.Args[1] 13712 idx := v_0.Args[0] 13713 if idx != v_0.Args[1] { 13714 break 13715 } 13716 ptr := v.Args[1] 13717 mem := v.Args[2] 13718 v.reset(OpARM64MOVHUloadidx2) 13719 v.AddArg(ptr) 13720 v.AddArg(idx) 13721 v.AddArg(mem) 13722 return true 13723 } 13724 // match: (MOVHUloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 13725 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 13726 // result: (MOVDconst [0]) 13727 for { 13728 _ = v.Args[2] 13729 ptr := v.Args[0] 13730 idx := v.Args[1] 13731 v_2 := v.Args[2] 13732 if v_2.Op != OpARM64MOVHstorezeroidx { 13733 break 13734 } 13735 _ = v_2.Args[2] 13736 ptr2 := v_2.Args[0] 13737 idx2 := v_2.Args[1] 13738 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 13739 break 13740 } 13741 v.reset(OpARM64MOVDconst) 13742 v.AuxInt = 0 13743 return true 13744 } 13745 return false 13746 } 13747 func rewriteValueARM64_OpARM64MOVHUloadidx2_0(v *Value) bool { 13748 // match: (MOVHUloadidx2 ptr (MOVDconst [c]) mem) 13749 // cond: 13750 // result: (MOVHUload [c<<1] ptr mem) 13751 for { 13752 _ = v.Args[2] 13753 ptr := v.Args[0] 13754 v_1 := v.Args[1] 13755 if v_1.Op != OpARM64MOVDconst { 13756 break 13757 } 13758 c := v_1.AuxInt 13759 mem := v.Args[2] 13760 v.reset(OpARM64MOVHUload) 13761 v.AuxInt = c << 1 13762 v.AddArg(ptr) 13763 v.AddArg(mem) 13764 return true 13765 } 13766 // match: (MOVHUloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 13767 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 13768 // result: (MOVDconst [0]) 13769 for { 13770 _ = v.Args[2] 13771 ptr := v.Args[0] 13772 idx := v.Args[1] 13773 v_2 := v.Args[2] 13774 if v_2.Op != OpARM64MOVHstorezeroidx2 { 13775 break 13776 } 13777 _ = v_2.Args[2] 13778 ptr2 := v_2.Args[0] 13779 idx2 := v_2.Args[1] 13780 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 13781 break 13782 } 13783 v.reset(OpARM64MOVDconst) 13784 v.AuxInt = 0 13785 return true 13786 } 13787 return false 13788 } 13789 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 13790 // match: (MOVHUreg x:(MOVBUload _ _)) 13791 // cond: 13792 // result: (MOVDreg x) 13793 for { 13794 x := v.Args[0] 13795 if x.Op != OpARM64MOVBUload { 13796 break 13797 } 13798 _ = x.Args[1] 13799 v.reset(OpARM64MOVDreg) 13800 v.AddArg(x) 13801 return true 13802 } 13803 // match: (MOVHUreg x:(MOVHUload _ _)) 13804 // cond: 13805 // result: (MOVDreg x) 13806 for { 13807 x := v.Args[0] 13808 if x.Op != OpARM64MOVHUload { 13809 break 13810 } 13811 _ = x.Args[1] 13812 v.reset(OpARM64MOVDreg) 13813 v.AddArg(x) 13814 return true 13815 } 13816 // match: (MOVHUreg x:(MOVBUloadidx _ _ _)) 13817 // cond: 13818 // result: (MOVDreg x) 13819 for { 13820 x := v.Args[0] 13821 if x.Op != OpARM64MOVBUloadidx { 13822 break 13823 } 13824 _ = x.Args[2] 13825 v.reset(OpARM64MOVDreg) 13826 v.AddArg(x) 13827 return true 13828 } 13829 // match: (MOVHUreg x:(MOVHUloadidx _ _ _)) 13830 // cond: 13831 // result: (MOVDreg x) 13832 for { 13833 x := v.Args[0] 13834 if x.Op != OpARM64MOVHUloadidx { 13835 break 13836 } 13837 _ = x.Args[2] 13838 v.reset(OpARM64MOVDreg) 13839 v.AddArg(x) 13840 return true 13841 } 13842 // match: (MOVHUreg x:(MOVHUloadidx2 _ _ _)) 13843 // cond: 13844 // result: (MOVDreg x) 13845 for { 13846 x := v.Args[0] 13847 if x.Op != OpARM64MOVHUloadidx2 { 13848 break 13849 } 13850 _ = x.Args[2] 13851 v.reset(OpARM64MOVDreg) 13852 v.AddArg(x) 13853 return true 13854 } 13855 // match: (MOVHUreg x:(MOVBUreg _)) 13856 // cond: 13857 // result: (MOVDreg x) 13858 for { 13859 x := v.Args[0] 13860 if x.Op != OpARM64MOVBUreg { 13861 break 13862 } 13863 v.reset(OpARM64MOVDreg) 13864 v.AddArg(x) 13865 return true 13866 } 13867 // match: (MOVHUreg x:(MOVHUreg _)) 13868 // cond: 13869 // result: (MOVDreg x) 13870 for { 13871 x := v.Args[0] 13872 if x.Op != OpARM64MOVHUreg { 13873 break 13874 } 13875 v.reset(OpARM64MOVDreg) 13876 v.AddArg(x) 13877 return true 13878 } 13879 // match: (MOVHUreg (ANDconst [c] x)) 13880 // cond: 13881 // result: (ANDconst [c&(1<<16-1)] x) 13882 for { 13883 v_0 := v.Args[0] 13884 if v_0.Op != OpARM64ANDconst { 13885 break 13886 } 13887 c := v_0.AuxInt 13888 x := v_0.Args[0] 13889 v.reset(OpARM64ANDconst) 13890 v.AuxInt = c & (1<<16 - 1) 13891 v.AddArg(x) 13892 return true 13893 } 13894 // match: (MOVHUreg (MOVDconst [c])) 13895 // cond: 13896 // result: (MOVDconst [int64(uint16(c))]) 13897 for { 13898 v_0 := v.Args[0] 13899 if v_0.Op != OpARM64MOVDconst { 13900 break 13901 } 13902 c := v_0.AuxInt 13903 v.reset(OpARM64MOVDconst) 13904 v.AuxInt = int64(uint16(c)) 13905 return true 13906 } 13907 // match: (MOVHUreg (SLLconst [sc] x)) 13908 // cond: isARM64BFMask(sc, 1<<16-1, sc) 13909 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x) 13910 for { 13911 v_0 := v.Args[0] 13912 if v_0.Op != OpARM64SLLconst { 13913 break 13914 } 13915 sc := v_0.AuxInt 13916 x := v_0.Args[0] 13917 if !(isARM64BFMask(sc, 1<<16-1, sc)) { 13918 break 13919 } 13920 v.reset(OpARM64UBFIZ) 13921 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc)) 13922 v.AddArg(x) 13923 return true 13924 } 13925 return false 13926 } 13927 func rewriteValueARM64_OpARM64MOVHUreg_10(v *Value) bool { 13928 // match: (MOVHUreg (SRLconst [sc] x)) 13929 // cond: isARM64BFMask(sc, 1<<16-1, 0) 13930 // result: (UBFX [arm64BFAuxInt(sc, 16)] x) 13931 for { 13932 v_0 := v.Args[0] 13933 if v_0.Op != OpARM64SRLconst { 13934 break 13935 } 13936 sc := v_0.AuxInt 13937 x := v_0.Args[0] 13938 if !(isARM64BFMask(sc, 1<<16-1, 0)) { 13939 break 13940 } 13941 v.reset(OpARM64UBFX) 13942 v.AuxInt = arm64BFAuxInt(sc, 16) 13943 v.AddArg(x) 13944 return true 13945 } 13946 return false 13947 } 13948 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 13949 b := v.Block 13950 _ = b 13951 config := b.Func.Config 13952 _ = config 13953 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 13954 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13955 // result: (MOVHload [off1+off2] {sym} ptr mem) 13956 for { 13957 off1 := v.AuxInt 13958 sym := v.Aux 13959 _ = v.Args[1] 13960 v_0 := v.Args[0] 13961 if v_0.Op != OpARM64ADDconst { 13962 break 13963 } 13964 off2 := v_0.AuxInt 13965 ptr := v_0.Args[0] 13966 mem := v.Args[1] 13967 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13968 break 13969 } 13970 v.reset(OpARM64MOVHload) 13971 v.AuxInt = off1 + off2 13972 v.Aux = sym 13973 v.AddArg(ptr) 13974 v.AddArg(mem) 13975 return true 13976 } 13977 // match: (MOVHload [off] {sym} (ADD ptr idx) mem) 13978 // cond: off == 0 && sym == nil 13979 // result: (MOVHloadidx ptr idx mem) 13980 for { 13981 off := v.AuxInt 13982 sym := v.Aux 13983 _ = v.Args[1] 13984 v_0 := v.Args[0] 13985 if v_0.Op != OpARM64ADD { 13986 break 13987 } 13988 _ = v_0.Args[1] 13989 ptr := v_0.Args[0] 13990 idx := v_0.Args[1] 13991 mem := v.Args[1] 13992 if !(off == 0 && sym == nil) { 13993 break 13994 } 13995 v.reset(OpARM64MOVHloadidx) 13996 v.AddArg(ptr) 13997 v.AddArg(idx) 13998 v.AddArg(mem) 13999 return true 14000 } 14001 // match: (MOVHload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 14002 // cond: off == 0 && sym == nil 14003 // result: (MOVHloadidx2 ptr idx mem) 14004 for { 14005 off := v.AuxInt 14006 sym := v.Aux 14007 _ = v.Args[1] 14008 v_0 := v.Args[0] 14009 if v_0.Op != OpARM64ADDshiftLL { 14010 break 14011 } 14012 if v_0.AuxInt != 1 { 14013 break 14014 } 14015 _ = v_0.Args[1] 14016 ptr := v_0.Args[0] 14017 idx := v_0.Args[1] 14018 mem := v.Args[1] 14019 if !(off == 0 && sym == nil) { 14020 break 14021 } 14022 v.reset(OpARM64MOVHloadidx2) 14023 v.AddArg(ptr) 14024 v.AddArg(idx) 14025 v.AddArg(mem) 14026 return true 14027 } 14028 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 14029 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14030 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 14031 for { 14032 off1 := v.AuxInt 14033 sym1 := v.Aux 14034 _ = v.Args[1] 14035 v_0 := v.Args[0] 14036 if v_0.Op != OpARM64MOVDaddr { 14037 break 14038 } 14039 off2 := v_0.AuxInt 14040 sym2 := v_0.Aux 14041 ptr := v_0.Args[0] 14042 mem := v.Args[1] 14043 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14044 break 14045 } 14046 v.reset(OpARM64MOVHload) 14047 v.AuxInt = off1 + off2 14048 v.Aux = mergeSym(sym1, sym2) 14049 v.AddArg(ptr) 14050 v.AddArg(mem) 14051 return true 14052 } 14053 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 14054 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 14055 // result: (MOVDconst [0]) 14056 for { 14057 off := v.AuxInt 14058 sym := v.Aux 14059 _ = v.Args[1] 14060 ptr := v.Args[0] 14061 v_1 := v.Args[1] 14062 if v_1.Op != OpARM64MOVHstorezero { 14063 break 14064 } 14065 off2 := v_1.AuxInt 14066 sym2 := v_1.Aux 14067 _ = v_1.Args[1] 14068 ptr2 := v_1.Args[0] 14069 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 14070 break 14071 } 14072 v.reset(OpARM64MOVDconst) 14073 v.AuxInt = 0 14074 return true 14075 } 14076 return false 14077 } 14078 func rewriteValueARM64_OpARM64MOVHloadidx_0(v *Value) bool { 14079 // match: (MOVHloadidx ptr (MOVDconst [c]) mem) 14080 // cond: 14081 // result: (MOVHload [c] ptr mem) 14082 for { 14083 _ = v.Args[2] 14084 ptr := v.Args[0] 14085 v_1 := v.Args[1] 14086 if v_1.Op != OpARM64MOVDconst { 14087 break 14088 } 14089 c := v_1.AuxInt 14090 mem := v.Args[2] 14091 v.reset(OpARM64MOVHload) 14092 v.AuxInt = c 14093 v.AddArg(ptr) 14094 v.AddArg(mem) 14095 return true 14096 } 14097 // match: (MOVHloadidx (MOVDconst [c]) ptr mem) 14098 // cond: 14099 // result: (MOVHload [c] ptr mem) 14100 for { 14101 _ = v.Args[2] 14102 v_0 := v.Args[0] 14103 if v_0.Op != OpARM64MOVDconst { 14104 break 14105 } 14106 c := v_0.AuxInt 14107 ptr := v.Args[1] 14108 mem := v.Args[2] 14109 v.reset(OpARM64MOVHload) 14110 v.AuxInt = c 14111 v.AddArg(ptr) 14112 v.AddArg(mem) 14113 return true 14114 } 14115 // match: (MOVHloadidx ptr (SLLconst [1] idx) mem) 14116 // cond: 14117 // result: (MOVHloadidx2 ptr idx mem) 14118 for { 14119 _ = v.Args[2] 14120 ptr := v.Args[0] 14121 v_1 := v.Args[1] 14122 if v_1.Op != OpARM64SLLconst { 14123 break 14124 } 14125 if v_1.AuxInt != 1 { 14126 break 14127 } 14128 idx := v_1.Args[0] 14129 mem := v.Args[2] 14130 v.reset(OpARM64MOVHloadidx2) 14131 v.AddArg(ptr) 14132 v.AddArg(idx) 14133 v.AddArg(mem) 14134 return true 14135 } 14136 // match: (MOVHloadidx ptr (ADD idx idx) mem) 14137 // cond: 14138 // result: (MOVHloadidx2 ptr idx mem) 14139 for { 14140 _ = v.Args[2] 14141 ptr := v.Args[0] 14142 v_1 := v.Args[1] 14143 if v_1.Op != OpARM64ADD { 14144 break 14145 } 14146 _ = v_1.Args[1] 14147 idx := v_1.Args[0] 14148 if idx != v_1.Args[1] { 14149 break 14150 } 14151 mem := v.Args[2] 14152 v.reset(OpARM64MOVHloadidx2) 14153 v.AddArg(ptr) 14154 v.AddArg(idx) 14155 v.AddArg(mem) 14156 return true 14157 } 14158 // match: (MOVHloadidx (ADD idx idx) ptr mem) 14159 // cond: 14160 // result: (MOVHloadidx2 ptr idx mem) 14161 for { 14162 _ = v.Args[2] 14163 v_0 := v.Args[0] 14164 if v_0.Op != OpARM64ADD { 14165 break 14166 } 14167 _ = v_0.Args[1] 14168 idx := v_0.Args[0] 14169 if idx != v_0.Args[1] { 14170 break 14171 } 14172 ptr := v.Args[1] 14173 mem := v.Args[2] 14174 v.reset(OpARM64MOVHloadidx2) 14175 v.AddArg(ptr) 14176 v.AddArg(idx) 14177 v.AddArg(mem) 14178 return true 14179 } 14180 // match: (MOVHloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 14181 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 14182 // result: (MOVDconst [0]) 14183 for { 14184 _ = v.Args[2] 14185 ptr := v.Args[0] 14186 idx := v.Args[1] 14187 v_2 := v.Args[2] 14188 if v_2.Op != OpARM64MOVHstorezeroidx { 14189 break 14190 } 14191 _ = v_2.Args[2] 14192 ptr2 := v_2.Args[0] 14193 idx2 := v_2.Args[1] 14194 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 14195 break 14196 } 14197 v.reset(OpARM64MOVDconst) 14198 v.AuxInt = 0 14199 return true 14200 } 14201 return false 14202 } 14203 func rewriteValueARM64_OpARM64MOVHloadidx2_0(v *Value) bool { 14204 // match: (MOVHloadidx2 ptr (MOVDconst [c]) mem) 14205 // cond: 14206 // result: (MOVHload [c<<1] ptr mem) 14207 for { 14208 _ = v.Args[2] 14209 ptr := v.Args[0] 14210 v_1 := v.Args[1] 14211 if v_1.Op != OpARM64MOVDconst { 14212 break 14213 } 14214 c := v_1.AuxInt 14215 mem := v.Args[2] 14216 v.reset(OpARM64MOVHload) 14217 v.AuxInt = c << 1 14218 v.AddArg(ptr) 14219 v.AddArg(mem) 14220 return true 14221 } 14222 // match: (MOVHloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 14223 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 14224 // result: (MOVDconst [0]) 14225 for { 14226 _ = v.Args[2] 14227 ptr := v.Args[0] 14228 idx := v.Args[1] 14229 v_2 := v.Args[2] 14230 if v_2.Op != OpARM64MOVHstorezeroidx2 { 14231 break 14232 } 14233 _ = v_2.Args[2] 14234 ptr2 := v_2.Args[0] 14235 idx2 := v_2.Args[1] 14236 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 14237 break 14238 } 14239 v.reset(OpARM64MOVDconst) 14240 v.AuxInt = 0 14241 return true 14242 } 14243 return false 14244 } 14245 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 14246 // match: (MOVHreg x:(MOVBload _ _)) 14247 // cond: 14248 // result: (MOVDreg x) 14249 for { 14250 x := v.Args[0] 14251 if x.Op != OpARM64MOVBload { 14252 break 14253 } 14254 _ = x.Args[1] 14255 v.reset(OpARM64MOVDreg) 14256 v.AddArg(x) 14257 return true 14258 } 14259 // match: (MOVHreg x:(MOVBUload _ _)) 14260 // cond: 14261 // result: (MOVDreg x) 14262 for { 14263 x := v.Args[0] 14264 if x.Op != OpARM64MOVBUload { 14265 break 14266 } 14267 _ = x.Args[1] 14268 v.reset(OpARM64MOVDreg) 14269 v.AddArg(x) 14270 return true 14271 } 14272 // match: (MOVHreg x:(MOVHload _ _)) 14273 // cond: 14274 // result: (MOVDreg x) 14275 for { 14276 x := v.Args[0] 14277 if x.Op != OpARM64MOVHload { 14278 break 14279 } 14280 _ = x.Args[1] 14281 v.reset(OpARM64MOVDreg) 14282 v.AddArg(x) 14283 return true 14284 } 14285 // match: (MOVHreg x:(MOVBloadidx _ _ _)) 14286 // cond: 14287 // result: (MOVDreg x) 14288 for { 14289 x := v.Args[0] 14290 if x.Op != OpARM64MOVBloadidx { 14291 break 14292 } 14293 _ = x.Args[2] 14294 v.reset(OpARM64MOVDreg) 14295 v.AddArg(x) 14296 return true 14297 } 14298 // match: (MOVHreg x:(MOVBUloadidx _ _ _)) 14299 // cond: 14300 // result: (MOVDreg x) 14301 for { 14302 x := v.Args[0] 14303 if x.Op != OpARM64MOVBUloadidx { 14304 break 14305 } 14306 _ = x.Args[2] 14307 v.reset(OpARM64MOVDreg) 14308 v.AddArg(x) 14309 return true 14310 } 14311 // match: (MOVHreg x:(MOVHloadidx _ _ _)) 14312 // cond: 14313 // result: (MOVDreg x) 14314 for { 14315 x := v.Args[0] 14316 if x.Op != OpARM64MOVHloadidx { 14317 break 14318 } 14319 _ = x.Args[2] 14320 v.reset(OpARM64MOVDreg) 14321 v.AddArg(x) 14322 return true 14323 } 14324 // match: (MOVHreg x:(MOVHloadidx2 _ _ _)) 14325 // cond: 14326 // result: (MOVDreg x) 14327 for { 14328 x := v.Args[0] 14329 if x.Op != OpARM64MOVHloadidx2 { 14330 break 14331 } 14332 _ = x.Args[2] 14333 v.reset(OpARM64MOVDreg) 14334 v.AddArg(x) 14335 return true 14336 } 14337 // match: (MOVHreg x:(MOVBreg _)) 14338 // cond: 14339 // result: (MOVDreg x) 14340 for { 14341 x := v.Args[0] 14342 if x.Op != OpARM64MOVBreg { 14343 break 14344 } 14345 v.reset(OpARM64MOVDreg) 14346 v.AddArg(x) 14347 return true 14348 } 14349 // match: (MOVHreg x:(MOVBUreg _)) 14350 // cond: 14351 // result: (MOVDreg x) 14352 for { 14353 x := v.Args[0] 14354 if x.Op != OpARM64MOVBUreg { 14355 break 14356 } 14357 v.reset(OpARM64MOVDreg) 14358 v.AddArg(x) 14359 return true 14360 } 14361 // match: (MOVHreg x:(MOVHreg _)) 14362 // cond: 14363 // result: (MOVDreg x) 14364 for { 14365 x := v.Args[0] 14366 if x.Op != OpARM64MOVHreg { 14367 break 14368 } 14369 v.reset(OpARM64MOVDreg) 14370 v.AddArg(x) 14371 return true 14372 } 14373 return false 14374 } 14375 func rewriteValueARM64_OpARM64MOVHreg_10(v *Value) bool { 14376 // match: (MOVHreg (MOVDconst [c])) 14377 // cond: 14378 // result: (MOVDconst [int64(int16(c))]) 14379 for { 14380 v_0 := v.Args[0] 14381 if v_0.Op != OpARM64MOVDconst { 14382 break 14383 } 14384 c := v_0.AuxInt 14385 v.reset(OpARM64MOVDconst) 14386 v.AuxInt = int64(int16(c)) 14387 return true 14388 } 14389 // match: (MOVHreg (SLLconst [lc] x)) 14390 // cond: lc < 16 14391 // result: (SBFIZ [arm64BFAuxInt(lc, 16-lc)] x) 14392 for { 14393 v_0 := v.Args[0] 14394 if v_0.Op != OpARM64SLLconst { 14395 break 14396 } 14397 lc := v_0.AuxInt 14398 x := v_0.Args[0] 14399 if !(lc < 16) { 14400 break 14401 } 14402 v.reset(OpARM64SBFIZ) 14403 v.AuxInt = arm64BFAuxInt(lc, 16-lc) 14404 v.AddArg(x) 14405 return true 14406 } 14407 return false 14408 } 14409 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 14410 b := v.Block 14411 _ = b 14412 config := b.Func.Config 14413 _ = config 14414 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 14415 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14416 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 14417 for { 14418 off1 := v.AuxInt 14419 sym := v.Aux 14420 _ = v.Args[2] 14421 v_0 := v.Args[0] 14422 if v_0.Op != OpARM64ADDconst { 14423 break 14424 } 14425 off2 := v_0.AuxInt 14426 ptr := v_0.Args[0] 14427 val := v.Args[1] 14428 mem := v.Args[2] 14429 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14430 break 14431 } 14432 v.reset(OpARM64MOVHstore) 14433 v.AuxInt = off1 + off2 14434 v.Aux = sym 14435 v.AddArg(ptr) 14436 v.AddArg(val) 14437 v.AddArg(mem) 14438 return true 14439 } 14440 // match: (MOVHstore [off] {sym} (ADD ptr idx) val mem) 14441 // cond: off == 0 && sym == nil 14442 // result: (MOVHstoreidx ptr idx val mem) 14443 for { 14444 off := v.AuxInt 14445 sym := v.Aux 14446 _ = v.Args[2] 14447 v_0 := v.Args[0] 14448 if v_0.Op != OpARM64ADD { 14449 break 14450 } 14451 _ = v_0.Args[1] 14452 ptr := v_0.Args[0] 14453 idx := v_0.Args[1] 14454 val := v.Args[1] 14455 mem := v.Args[2] 14456 if !(off == 0 && sym == nil) { 14457 break 14458 } 14459 v.reset(OpARM64MOVHstoreidx) 14460 v.AddArg(ptr) 14461 v.AddArg(idx) 14462 v.AddArg(val) 14463 v.AddArg(mem) 14464 return true 14465 } 14466 // match: (MOVHstore [off] {sym} (ADDshiftLL [1] ptr idx) val mem) 14467 // cond: off == 0 && sym == nil 14468 // result: (MOVHstoreidx2 ptr idx val mem) 14469 for { 14470 off := v.AuxInt 14471 sym := v.Aux 14472 _ = v.Args[2] 14473 v_0 := v.Args[0] 14474 if v_0.Op != OpARM64ADDshiftLL { 14475 break 14476 } 14477 if v_0.AuxInt != 1 { 14478 break 14479 } 14480 _ = v_0.Args[1] 14481 ptr := v_0.Args[0] 14482 idx := v_0.Args[1] 14483 val := v.Args[1] 14484 mem := v.Args[2] 14485 if !(off == 0 && sym == nil) { 14486 break 14487 } 14488 v.reset(OpARM64MOVHstoreidx2) 14489 v.AddArg(ptr) 14490 v.AddArg(idx) 14491 v.AddArg(val) 14492 v.AddArg(mem) 14493 return true 14494 } 14495 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 14496 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14497 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 14498 for { 14499 off1 := v.AuxInt 14500 sym1 := v.Aux 14501 _ = v.Args[2] 14502 v_0 := v.Args[0] 14503 if v_0.Op != OpARM64MOVDaddr { 14504 break 14505 } 14506 off2 := v_0.AuxInt 14507 sym2 := v_0.Aux 14508 ptr := v_0.Args[0] 14509 val := v.Args[1] 14510 mem := v.Args[2] 14511 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14512 break 14513 } 14514 v.reset(OpARM64MOVHstore) 14515 v.AuxInt = off1 + off2 14516 v.Aux = mergeSym(sym1, sym2) 14517 v.AddArg(ptr) 14518 v.AddArg(val) 14519 v.AddArg(mem) 14520 return true 14521 } 14522 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 14523 // cond: 14524 // result: (MOVHstorezero [off] {sym} ptr mem) 14525 for { 14526 off := v.AuxInt 14527 sym := v.Aux 14528 _ = v.Args[2] 14529 ptr := v.Args[0] 14530 v_1 := v.Args[1] 14531 if v_1.Op != OpARM64MOVDconst { 14532 break 14533 } 14534 if v_1.AuxInt != 0 { 14535 break 14536 } 14537 mem := v.Args[2] 14538 v.reset(OpARM64MOVHstorezero) 14539 v.AuxInt = off 14540 v.Aux = sym 14541 v.AddArg(ptr) 14542 v.AddArg(mem) 14543 return true 14544 } 14545 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 14546 // cond: 14547 // result: (MOVHstore [off] {sym} ptr x mem) 14548 for { 14549 off := v.AuxInt 14550 sym := v.Aux 14551 _ = v.Args[2] 14552 ptr := v.Args[0] 14553 v_1 := v.Args[1] 14554 if v_1.Op != OpARM64MOVHreg { 14555 break 14556 } 14557 x := v_1.Args[0] 14558 mem := v.Args[2] 14559 v.reset(OpARM64MOVHstore) 14560 v.AuxInt = off 14561 v.Aux = sym 14562 v.AddArg(ptr) 14563 v.AddArg(x) 14564 v.AddArg(mem) 14565 return true 14566 } 14567 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 14568 // cond: 14569 // result: (MOVHstore [off] {sym} ptr x mem) 14570 for { 14571 off := v.AuxInt 14572 sym := v.Aux 14573 _ = v.Args[2] 14574 ptr := v.Args[0] 14575 v_1 := v.Args[1] 14576 if v_1.Op != OpARM64MOVHUreg { 14577 break 14578 } 14579 x := v_1.Args[0] 14580 mem := v.Args[2] 14581 v.reset(OpARM64MOVHstore) 14582 v.AuxInt = off 14583 v.Aux = sym 14584 v.AddArg(ptr) 14585 v.AddArg(x) 14586 v.AddArg(mem) 14587 return true 14588 } 14589 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 14590 // cond: 14591 // result: (MOVHstore [off] {sym} ptr x mem) 14592 for { 14593 off := v.AuxInt 14594 sym := v.Aux 14595 _ = v.Args[2] 14596 ptr := v.Args[0] 14597 v_1 := v.Args[1] 14598 if v_1.Op != OpARM64MOVWreg { 14599 break 14600 } 14601 x := v_1.Args[0] 14602 mem := v.Args[2] 14603 v.reset(OpARM64MOVHstore) 14604 v.AuxInt = off 14605 v.Aux = sym 14606 v.AddArg(ptr) 14607 v.AddArg(x) 14608 v.AddArg(mem) 14609 return true 14610 } 14611 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 14612 // cond: 14613 // result: (MOVHstore [off] {sym} ptr x mem) 14614 for { 14615 off := v.AuxInt 14616 sym := v.Aux 14617 _ = v.Args[2] 14618 ptr := v.Args[0] 14619 v_1 := v.Args[1] 14620 if v_1.Op != OpARM64MOVWUreg { 14621 break 14622 } 14623 x := v_1.Args[0] 14624 mem := v.Args[2] 14625 v.reset(OpARM64MOVHstore) 14626 v.AuxInt = off 14627 v.Aux = sym 14628 v.AddArg(ptr) 14629 v.AddArg(x) 14630 v.AddArg(mem) 14631 return true 14632 } 14633 // match: (MOVHstore [i] {s} ptr0 (SRLconst [16] w) x:(MOVHstore [i-2] {s} ptr1 w mem)) 14634 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 14635 // result: (MOVWstore [i-2] {s} ptr0 w mem) 14636 for { 14637 i := v.AuxInt 14638 s := v.Aux 14639 _ = v.Args[2] 14640 ptr0 := v.Args[0] 14641 v_1 := v.Args[1] 14642 if v_1.Op != OpARM64SRLconst { 14643 break 14644 } 14645 if v_1.AuxInt != 16 { 14646 break 14647 } 14648 w := v_1.Args[0] 14649 x := v.Args[2] 14650 if x.Op != OpARM64MOVHstore { 14651 break 14652 } 14653 if x.AuxInt != i-2 { 14654 break 14655 } 14656 if x.Aux != s { 14657 break 14658 } 14659 _ = x.Args[2] 14660 ptr1 := x.Args[0] 14661 if w != x.Args[1] { 14662 break 14663 } 14664 mem := x.Args[2] 14665 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 14666 break 14667 } 14668 v.reset(OpARM64MOVWstore) 14669 v.AuxInt = i - 2 14670 v.Aux = s 14671 v.AddArg(ptr0) 14672 v.AddArg(w) 14673 v.AddArg(mem) 14674 return true 14675 } 14676 return false 14677 } 14678 func rewriteValueARM64_OpARM64MOVHstore_10(v *Value) bool { 14679 b := v.Block 14680 _ = b 14681 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx ptr1 idx1 w mem)) 14682 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 14683 // result: (MOVWstoreidx ptr1 idx1 w mem) 14684 for { 14685 if v.AuxInt != 2 { 14686 break 14687 } 14688 s := v.Aux 14689 _ = v.Args[2] 14690 v_0 := v.Args[0] 14691 if v_0.Op != OpARM64ADD { 14692 break 14693 } 14694 _ = v_0.Args[1] 14695 ptr0 := v_0.Args[0] 14696 idx0 := v_0.Args[1] 14697 v_1 := v.Args[1] 14698 if v_1.Op != OpARM64SRLconst { 14699 break 14700 } 14701 if v_1.AuxInt != 16 { 14702 break 14703 } 14704 w := v_1.Args[0] 14705 x := v.Args[2] 14706 if x.Op != OpARM64MOVHstoreidx { 14707 break 14708 } 14709 _ = x.Args[3] 14710 ptr1 := x.Args[0] 14711 idx1 := x.Args[1] 14712 if w != x.Args[2] { 14713 break 14714 } 14715 mem := x.Args[3] 14716 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 14717 break 14718 } 14719 v.reset(OpARM64MOVWstoreidx) 14720 v.AddArg(ptr1) 14721 v.AddArg(idx1) 14722 v.AddArg(w) 14723 v.AddArg(mem) 14724 return true 14725 } 14726 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 14727 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 14728 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 14729 for { 14730 if v.AuxInt != 2 { 14731 break 14732 } 14733 s := v.Aux 14734 _ = v.Args[2] 14735 v_0 := v.Args[0] 14736 if v_0.Op != OpARM64ADDshiftLL { 14737 break 14738 } 14739 if v_0.AuxInt != 1 { 14740 break 14741 } 14742 _ = v_0.Args[1] 14743 ptr0 := v_0.Args[0] 14744 idx0 := v_0.Args[1] 14745 v_1 := v.Args[1] 14746 if v_1.Op != OpARM64SRLconst { 14747 break 14748 } 14749 if v_1.AuxInt != 16 { 14750 break 14751 } 14752 w := v_1.Args[0] 14753 x := v.Args[2] 14754 if x.Op != OpARM64MOVHstoreidx2 { 14755 break 14756 } 14757 _ = x.Args[3] 14758 ptr1 := x.Args[0] 14759 idx1 := x.Args[1] 14760 if w != x.Args[2] { 14761 break 14762 } 14763 mem := x.Args[3] 14764 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 14765 break 14766 } 14767 v.reset(OpARM64MOVWstoreidx) 14768 v.AddArg(ptr1) 14769 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 14770 v0.AuxInt = 1 14771 v0.AddArg(idx1) 14772 v.AddArg(v0) 14773 v.AddArg(w) 14774 v.AddArg(mem) 14775 return true 14776 } 14777 // match: (MOVHstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(16, 16)] w) x:(MOVHstore [i-2] {s} ptr1 w mem)) 14778 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 14779 // result: (MOVWstore [i-2] {s} ptr0 w mem) 14780 for { 14781 i := v.AuxInt 14782 s := v.Aux 14783 _ = v.Args[2] 14784 ptr0 := v.Args[0] 14785 v_1 := v.Args[1] 14786 if v_1.Op != OpARM64UBFX { 14787 break 14788 } 14789 if v_1.AuxInt != arm64BFAuxInt(16, 16) { 14790 break 14791 } 14792 w := v_1.Args[0] 14793 x := v.Args[2] 14794 if x.Op != OpARM64MOVHstore { 14795 break 14796 } 14797 if x.AuxInt != i-2 { 14798 break 14799 } 14800 if x.Aux != s { 14801 break 14802 } 14803 _ = x.Args[2] 14804 ptr1 := x.Args[0] 14805 if w != x.Args[1] { 14806 break 14807 } 14808 mem := x.Args[2] 14809 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 14810 break 14811 } 14812 v.reset(OpARM64MOVWstore) 14813 v.AuxInt = i - 2 14814 v.Aux = s 14815 v.AddArg(ptr0) 14816 v.AddArg(w) 14817 v.AddArg(mem) 14818 return true 14819 } 14820 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(16, 16)] w) x:(MOVHstoreidx ptr1 idx1 w mem)) 14821 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 14822 // result: (MOVWstoreidx ptr1 idx1 w mem) 14823 for { 14824 if v.AuxInt != 2 { 14825 break 14826 } 14827 s := v.Aux 14828 _ = v.Args[2] 14829 v_0 := v.Args[0] 14830 if v_0.Op != OpARM64ADD { 14831 break 14832 } 14833 _ = v_0.Args[1] 14834 ptr0 := v_0.Args[0] 14835 idx0 := v_0.Args[1] 14836 v_1 := v.Args[1] 14837 if v_1.Op != OpARM64UBFX { 14838 break 14839 } 14840 if v_1.AuxInt != arm64BFAuxInt(16, 16) { 14841 break 14842 } 14843 w := v_1.Args[0] 14844 x := v.Args[2] 14845 if x.Op != OpARM64MOVHstoreidx { 14846 break 14847 } 14848 _ = x.Args[3] 14849 ptr1 := x.Args[0] 14850 idx1 := x.Args[1] 14851 if w != x.Args[2] { 14852 break 14853 } 14854 mem := x.Args[3] 14855 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 14856 break 14857 } 14858 v.reset(OpARM64MOVWstoreidx) 14859 v.AddArg(ptr1) 14860 v.AddArg(idx1) 14861 v.AddArg(w) 14862 v.AddArg(mem) 14863 return true 14864 } 14865 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (UBFX [arm64BFAuxInt(16, 16)] w) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 14866 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 14867 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 14868 for { 14869 if v.AuxInt != 2 { 14870 break 14871 } 14872 s := v.Aux 14873 _ = v.Args[2] 14874 v_0 := v.Args[0] 14875 if v_0.Op != OpARM64ADDshiftLL { 14876 break 14877 } 14878 if v_0.AuxInt != 1 { 14879 break 14880 } 14881 _ = v_0.Args[1] 14882 ptr0 := v_0.Args[0] 14883 idx0 := v_0.Args[1] 14884 v_1 := v.Args[1] 14885 if v_1.Op != OpARM64UBFX { 14886 break 14887 } 14888 if v_1.AuxInt != arm64BFAuxInt(16, 16) { 14889 break 14890 } 14891 w := v_1.Args[0] 14892 x := v.Args[2] 14893 if x.Op != OpARM64MOVHstoreidx2 { 14894 break 14895 } 14896 _ = x.Args[3] 14897 ptr1 := x.Args[0] 14898 idx1 := x.Args[1] 14899 if w != x.Args[2] { 14900 break 14901 } 14902 mem := x.Args[3] 14903 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 14904 break 14905 } 14906 v.reset(OpARM64MOVWstoreidx) 14907 v.AddArg(ptr1) 14908 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 14909 v0.AuxInt = 1 14910 v0.AddArg(idx1) 14911 v.AddArg(v0) 14912 v.AddArg(w) 14913 v.AddArg(mem) 14914 return true 14915 } 14916 // match: (MOVHstore [i] {s} ptr0 (SRLconst [16] (MOVDreg w)) x:(MOVHstore [i-2] {s} ptr1 w mem)) 14917 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 14918 // result: (MOVWstore [i-2] {s} ptr0 w mem) 14919 for { 14920 i := v.AuxInt 14921 s := v.Aux 14922 _ = v.Args[2] 14923 ptr0 := v.Args[0] 14924 v_1 := v.Args[1] 14925 if v_1.Op != OpARM64SRLconst { 14926 break 14927 } 14928 if v_1.AuxInt != 16 { 14929 break 14930 } 14931 v_1_0 := v_1.Args[0] 14932 if v_1_0.Op != OpARM64MOVDreg { 14933 break 14934 } 14935 w := v_1_0.Args[0] 14936 x := v.Args[2] 14937 if x.Op != OpARM64MOVHstore { 14938 break 14939 } 14940 if x.AuxInt != i-2 { 14941 break 14942 } 14943 if x.Aux != s { 14944 break 14945 } 14946 _ = x.Args[2] 14947 ptr1 := x.Args[0] 14948 if w != x.Args[1] { 14949 break 14950 } 14951 mem := x.Args[2] 14952 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 14953 break 14954 } 14955 v.reset(OpARM64MOVWstore) 14956 v.AuxInt = i - 2 14957 v.Aux = s 14958 v.AddArg(ptr0) 14959 v.AddArg(w) 14960 v.AddArg(mem) 14961 return true 14962 } 14963 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx ptr1 idx1 w mem)) 14964 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 14965 // result: (MOVWstoreidx ptr1 idx1 w mem) 14966 for { 14967 if v.AuxInt != 2 { 14968 break 14969 } 14970 s := v.Aux 14971 _ = v.Args[2] 14972 v_0 := v.Args[0] 14973 if v_0.Op != OpARM64ADD { 14974 break 14975 } 14976 _ = v_0.Args[1] 14977 ptr0 := v_0.Args[0] 14978 idx0 := v_0.Args[1] 14979 v_1 := v.Args[1] 14980 if v_1.Op != OpARM64SRLconst { 14981 break 14982 } 14983 if v_1.AuxInt != 16 { 14984 break 14985 } 14986 v_1_0 := v_1.Args[0] 14987 if v_1_0.Op != OpARM64MOVDreg { 14988 break 14989 } 14990 w := v_1_0.Args[0] 14991 x := v.Args[2] 14992 if x.Op != OpARM64MOVHstoreidx { 14993 break 14994 } 14995 _ = x.Args[3] 14996 ptr1 := x.Args[0] 14997 idx1 := x.Args[1] 14998 if w != x.Args[2] { 14999 break 15000 } 15001 mem := x.Args[3] 15002 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15003 break 15004 } 15005 v.reset(OpARM64MOVWstoreidx) 15006 v.AddArg(ptr1) 15007 v.AddArg(idx1) 15008 v.AddArg(w) 15009 v.AddArg(mem) 15010 return true 15011 } 15012 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 15013 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15014 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 15015 for { 15016 if v.AuxInt != 2 { 15017 break 15018 } 15019 s := v.Aux 15020 _ = v.Args[2] 15021 v_0 := v.Args[0] 15022 if v_0.Op != OpARM64ADDshiftLL { 15023 break 15024 } 15025 if v_0.AuxInt != 1 { 15026 break 15027 } 15028 _ = v_0.Args[1] 15029 ptr0 := v_0.Args[0] 15030 idx0 := v_0.Args[1] 15031 v_1 := v.Args[1] 15032 if v_1.Op != OpARM64SRLconst { 15033 break 15034 } 15035 if v_1.AuxInt != 16 { 15036 break 15037 } 15038 v_1_0 := v_1.Args[0] 15039 if v_1_0.Op != OpARM64MOVDreg { 15040 break 15041 } 15042 w := v_1_0.Args[0] 15043 x := v.Args[2] 15044 if x.Op != OpARM64MOVHstoreidx2 { 15045 break 15046 } 15047 _ = x.Args[3] 15048 ptr1 := x.Args[0] 15049 idx1 := x.Args[1] 15050 if w != x.Args[2] { 15051 break 15052 } 15053 mem := x.Args[3] 15054 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15055 break 15056 } 15057 v.reset(OpARM64MOVWstoreidx) 15058 v.AddArg(ptr1) 15059 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15060 v0.AuxInt = 1 15061 v0.AddArg(idx1) 15062 v.AddArg(v0) 15063 v.AddArg(w) 15064 v.AddArg(mem) 15065 return true 15066 } 15067 // match: (MOVHstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVHstore [i-2] {s} ptr1 w0:(SRLconst [j-16] w) mem)) 15068 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 15069 // result: (MOVWstore [i-2] {s} ptr0 w0 mem) 15070 for { 15071 i := v.AuxInt 15072 s := v.Aux 15073 _ = v.Args[2] 15074 ptr0 := v.Args[0] 15075 v_1 := v.Args[1] 15076 if v_1.Op != OpARM64SRLconst { 15077 break 15078 } 15079 j := v_1.AuxInt 15080 w := v_1.Args[0] 15081 x := v.Args[2] 15082 if x.Op != OpARM64MOVHstore { 15083 break 15084 } 15085 if x.AuxInt != i-2 { 15086 break 15087 } 15088 if x.Aux != s { 15089 break 15090 } 15091 _ = x.Args[2] 15092 ptr1 := x.Args[0] 15093 w0 := x.Args[1] 15094 if w0.Op != OpARM64SRLconst { 15095 break 15096 } 15097 if w0.AuxInt != j-16 { 15098 break 15099 } 15100 if w != w0.Args[0] { 15101 break 15102 } 15103 mem := x.Args[2] 15104 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 15105 break 15106 } 15107 v.reset(OpARM64MOVWstore) 15108 v.AuxInt = i - 2 15109 v.Aux = s 15110 v.AddArg(ptr0) 15111 v.AddArg(w0) 15112 v.AddArg(mem) 15113 return true 15114 } 15115 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx ptr1 idx1 w0:(SRLconst [j-16] w) mem)) 15116 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15117 // result: (MOVWstoreidx ptr1 idx1 w0 mem) 15118 for { 15119 if v.AuxInt != 2 { 15120 break 15121 } 15122 s := v.Aux 15123 _ = v.Args[2] 15124 v_0 := v.Args[0] 15125 if v_0.Op != OpARM64ADD { 15126 break 15127 } 15128 _ = v_0.Args[1] 15129 ptr0 := v_0.Args[0] 15130 idx0 := v_0.Args[1] 15131 v_1 := v.Args[1] 15132 if v_1.Op != OpARM64SRLconst { 15133 break 15134 } 15135 j := v_1.AuxInt 15136 w := v_1.Args[0] 15137 x := v.Args[2] 15138 if x.Op != OpARM64MOVHstoreidx { 15139 break 15140 } 15141 _ = x.Args[3] 15142 ptr1 := x.Args[0] 15143 idx1 := x.Args[1] 15144 w0 := x.Args[2] 15145 if w0.Op != OpARM64SRLconst { 15146 break 15147 } 15148 if w0.AuxInt != j-16 { 15149 break 15150 } 15151 if w != w0.Args[0] { 15152 break 15153 } 15154 mem := x.Args[3] 15155 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15156 break 15157 } 15158 v.reset(OpARM64MOVWstoreidx) 15159 v.AddArg(ptr1) 15160 v.AddArg(idx1) 15161 v.AddArg(w0) 15162 v.AddArg(mem) 15163 return true 15164 } 15165 return false 15166 } 15167 func rewriteValueARM64_OpARM64MOVHstore_20(v *Value) bool { 15168 b := v.Block 15169 _ = b 15170 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx2 ptr1 idx1 w0:(SRLconst [j-16] w) mem)) 15171 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15172 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w0 mem) 15173 for { 15174 if v.AuxInt != 2 { 15175 break 15176 } 15177 s := v.Aux 15178 _ = v.Args[2] 15179 v_0 := v.Args[0] 15180 if v_0.Op != OpARM64ADDshiftLL { 15181 break 15182 } 15183 if v_0.AuxInt != 1 { 15184 break 15185 } 15186 _ = v_0.Args[1] 15187 ptr0 := v_0.Args[0] 15188 idx0 := v_0.Args[1] 15189 v_1 := v.Args[1] 15190 if v_1.Op != OpARM64SRLconst { 15191 break 15192 } 15193 j := v_1.AuxInt 15194 w := v_1.Args[0] 15195 x := v.Args[2] 15196 if x.Op != OpARM64MOVHstoreidx2 { 15197 break 15198 } 15199 _ = x.Args[3] 15200 ptr1 := x.Args[0] 15201 idx1 := x.Args[1] 15202 w0 := x.Args[2] 15203 if w0.Op != OpARM64SRLconst { 15204 break 15205 } 15206 if w0.AuxInt != j-16 { 15207 break 15208 } 15209 if w != w0.Args[0] { 15210 break 15211 } 15212 mem := x.Args[3] 15213 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15214 break 15215 } 15216 v.reset(OpARM64MOVWstoreidx) 15217 v.AddArg(ptr1) 15218 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15219 v0.AuxInt = 1 15220 v0.AddArg(idx1) 15221 v.AddArg(v0) 15222 v.AddArg(w0) 15223 v.AddArg(mem) 15224 return true 15225 } 15226 return false 15227 } 15228 func rewriteValueARM64_OpARM64MOVHstoreidx_0(v *Value) bool { 15229 // match: (MOVHstoreidx ptr (MOVDconst [c]) val mem) 15230 // cond: 15231 // result: (MOVHstore [c] ptr val mem) 15232 for { 15233 _ = v.Args[3] 15234 ptr := v.Args[0] 15235 v_1 := v.Args[1] 15236 if v_1.Op != OpARM64MOVDconst { 15237 break 15238 } 15239 c := v_1.AuxInt 15240 val := v.Args[2] 15241 mem := v.Args[3] 15242 v.reset(OpARM64MOVHstore) 15243 v.AuxInt = c 15244 v.AddArg(ptr) 15245 v.AddArg(val) 15246 v.AddArg(mem) 15247 return true 15248 } 15249 // match: (MOVHstoreidx (MOVDconst [c]) idx val mem) 15250 // cond: 15251 // result: (MOVHstore [c] idx val mem) 15252 for { 15253 _ = v.Args[3] 15254 v_0 := v.Args[0] 15255 if v_0.Op != OpARM64MOVDconst { 15256 break 15257 } 15258 c := v_0.AuxInt 15259 idx := v.Args[1] 15260 val := v.Args[2] 15261 mem := v.Args[3] 15262 v.reset(OpARM64MOVHstore) 15263 v.AuxInt = c 15264 v.AddArg(idx) 15265 v.AddArg(val) 15266 v.AddArg(mem) 15267 return true 15268 } 15269 // match: (MOVHstoreidx ptr (SLLconst [1] idx) val mem) 15270 // cond: 15271 // result: (MOVHstoreidx2 ptr idx val mem) 15272 for { 15273 _ = v.Args[3] 15274 ptr := v.Args[0] 15275 v_1 := v.Args[1] 15276 if v_1.Op != OpARM64SLLconst { 15277 break 15278 } 15279 if v_1.AuxInt != 1 { 15280 break 15281 } 15282 idx := v_1.Args[0] 15283 val := v.Args[2] 15284 mem := v.Args[3] 15285 v.reset(OpARM64MOVHstoreidx2) 15286 v.AddArg(ptr) 15287 v.AddArg(idx) 15288 v.AddArg(val) 15289 v.AddArg(mem) 15290 return true 15291 } 15292 // match: (MOVHstoreidx ptr (ADD idx idx) val mem) 15293 // cond: 15294 // result: (MOVHstoreidx2 ptr idx val mem) 15295 for { 15296 _ = v.Args[3] 15297 ptr := v.Args[0] 15298 v_1 := v.Args[1] 15299 if v_1.Op != OpARM64ADD { 15300 break 15301 } 15302 _ = v_1.Args[1] 15303 idx := v_1.Args[0] 15304 if idx != v_1.Args[1] { 15305 break 15306 } 15307 val := v.Args[2] 15308 mem := v.Args[3] 15309 v.reset(OpARM64MOVHstoreidx2) 15310 v.AddArg(ptr) 15311 v.AddArg(idx) 15312 v.AddArg(val) 15313 v.AddArg(mem) 15314 return true 15315 } 15316 // match: (MOVHstoreidx (SLLconst [1] idx) ptr val mem) 15317 // cond: 15318 // result: (MOVHstoreidx2 ptr idx val mem) 15319 for { 15320 _ = v.Args[3] 15321 v_0 := v.Args[0] 15322 if v_0.Op != OpARM64SLLconst { 15323 break 15324 } 15325 if v_0.AuxInt != 1 { 15326 break 15327 } 15328 idx := v_0.Args[0] 15329 ptr := v.Args[1] 15330 val := v.Args[2] 15331 mem := v.Args[3] 15332 v.reset(OpARM64MOVHstoreidx2) 15333 v.AddArg(ptr) 15334 v.AddArg(idx) 15335 v.AddArg(val) 15336 v.AddArg(mem) 15337 return true 15338 } 15339 // match: (MOVHstoreidx (ADD idx idx) ptr val mem) 15340 // cond: 15341 // result: (MOVHstoreidx2 ptr idx val mem) 15342 for { 15343 _ = v.Args[3] 15344 v_0 := v.Args[0] 15345 if v_0.Op != OpARM64ADD { 15346 break 15347 } 15348 _ = v_0.Args[1] 15349 idx := v_0.Args[0] 15350 if idx != v_0.Args[1] { 15351 break 15352 } 15353 ptr := v.Args[1] 15354 val := v.Args[2] 15355 mem := v.Args[3] 15356 v.reset(OpARM64MOVHstoreidx2) 15357 v.AddArg(ptr) 15358 v.AddArg(idx) 15359 v.AddArg(val) 15360 v.AddArg(mem) 15361 return true 15362 } 15363 // match: (MOVHstoreidx ptr idx (MOVDconst [0]) mem) 15364 // cond: 15365 // result: (MOVHstorezeroidx ptr idx mem) 15366 for { 15367 _ = v.Args[3] 15368 ptr := v.Args[0] 15369 idx := v.Args[1] 15370 v_2 := v.Args[2] 15371 if v_2.Op != OpARM64MOVDconst { 15372 break 15373 } 15374 if v_2.AuxInt != 0 { 15375 break 15376 } 15377 mem := v.Args[3] 15378 v.reset(OpARM64MOVHstorezeroidx) 15379 v.AddArg(ptr) 15380 v.AddArg(idx) 15381 v.AddArg(mem) 15382 return true 15383 } 15384 // match: (MOVHstoreidx ptr idx (MOVHreg x) mem) 15385 // cond: 15386 // result: (MOVHstoreidx ptr idx x mem) 15387 for { 15388 _ = v.Args[3] 15389 ptr := v.Args[0] 15390 idx := v.Args[1] 15391 v_2 := v.Args[2] 15392 if v_2.Op != OpARM64MOVHreg { 15393 break 15394 } 15395 x := v_2.Args[0] 15396 mem := v.Args[3] 15397 v.reset(OpARM64MOVHstoreidx) 15398 v.AddArg(ptr) 15399 v.AddArg(idx) 15400 v.AddArg(x) 15401 v.AddArg(mem) 15402 return true 15403 } 15404 // match: (MOVHstoreidx ptr idx (MOVHUreg x) mem) 15405 // cond: 15406 // result: (MOVHstoreidx ptr idx x mem) 15407 for { 15408 _ = v.Args[3] 15409 ptr := v.Args[0] 15410 idx := v.Args[1] 15411 v_2 := v.Args[2] 15412 if v_2.Op != OpARM64MOVHUreg { 15413 break 15414 } 15415 x := v_2.Args[0] 15416 mem := v.Args[3] 15417 v.reset(OpARM64MOVHstoreidx) 15418 v.AddArg(ptr) 15419 v.AddArg(idx) 15420 v.AddArg(x) 15421 v.AddArg(mem) 15422 return true 15423 } 15424 // match: (MOVHstoreidx ptr idx (MOVWreg x) mem) 15425 // cond: 15426 // result: (MOVHstoreidx ptr idx x mem) 15427 for { 15428 _ = v.Args[3] 15429 ptr := v.Args[0] 15430 idx := v.Args[1] 15431 v_2 := v.Args[2] 15432 if v_2.Op != OpARM64MOVWreg { 15433 break 15434 } 15435 x := v_2.Args[0] 15436 mem := v.Args[3] 15437 v.reset(OpARM64MOVHstoreidx) 15438 v.AddArg(ptr) 15439 v.AddArg(idx) 15440 v.AddArg(x) 15441 v.AddArg(mem) 15442 return true 15443 } 15444 return false 15445 } 15446 func rewriteValueARM64_OpARM64MOVHstoreidx_10(v *Value) bool { 15447 // match: (MOVHstoreidx ptr idx (MOVWUreg x) mem) 15448 // cond: 15449 // result: (MOVHstoreidx ptr idx x mem) 15450 for { 15451 _ = v.Args[3] 15452 ptr := v.Args[0] 15453 idx := v.Args[1] 15454 v_2 := v.Args[2] 15455 if v_2.Op != OpARM64MOVWUreg { 15456 break 15457 } 15458 x := v_2.Args[0] 15459 mem := v.Args[3] 15460 v.reset(OpARM64MOVHstoreidx) 15461 v.AddArg(ptr) 15462 v.AddArg(idx) 15463 v.AddArg(x) 15464 v.AddArg(mem) 15465 return true 15466 } 15467 // match: (MOVHstoreidx ptr (ADDconst [2] idx) (SRLconst [16] w) x:(MOVHstoreidx ptr idx w mem)) 15468 // cond: x.Uses == 1 && clobber(x) 15469 // result: (MOVWstoreidx ptr idx w mem) 15470 for { 15471 _ = v.Args[3] 15472 ptr := v.Args[0] 15473 v_1 := v.Args[1] 15474 if v_1.Op != OpARM64ADDconst { 15475 break 15476 } 15477 if v_1.AuxInt != 2 { 15478 break 15479 } 15480 idx := v_1.Args[0] 15481 v_2 := v.Args[2] 15482 if v_2.Op != OpARM64SRLconst { 15483 break 15484 } 15485 if v_2.AuxInt != 16 { 15486 break 15487 } 15488 w := v_2.Args[0] 15489 x := v.Args[3] 15490 if x.Op != OpARM64MOVHstoreidx { 15491 break 15492 } 15493 _ = x.Args[3] 15494 if ptr != x.Args[0] { 15495 break 15496 } 15497 if idx != x.Args[1] { 15498 break 15499 } 15500 if w != x.Args[2] { 15501 break 15502 } 15503 mem := x.Args[3] 15504 if !(x.Uses == 1 && clobber(x)) { 15505 break 15506 } 15507 v.reset(OpARM64MOVWstoreidx) 15508 v.AddArg(ptr) 15509 v.AddArg(idx) 15510 v.AddArg(w) 15511 v.AddArg(mem) 15512 return true 15513 } 15514 return false 15515 } 15516 func rewriteValueARM64_OpARM64MOVHstoreidx2_0(v *Value) bool { 15517 // match: (MOVHstoreidx2 ptr (MOVDconst [c]) val mem) 15518 // cond: 15519 // result: (MOVHstore [c<<1] ptr val mem) 15520 for { 15521 _ = v.Args[3] 15522 ptr := v.Args[0] 15523 v_1 := v.Args[1] 15524 if v_1.Op != OpARM64MOVDconst { 15525 break 15526 } 15527 c := v_1.AuxInt 15528 val := v.Args[2] 15529 mem := v.Args[3] 15530 v.reset(OpARM64MOVHstore) 15531 v.AuxInt = c << 1 15532 v.AddArg(ptr) 15533 v.AddArg(val) 15534 v.AddArg(mem) 15535 return true 15536 } 15537 // match: (MOVHstoreidx2 ptr idx (MOVDconst [0]) mem) 15538 // cond: 15539 // result: (MOVHstorezeroidx2 ptr idx mem) 15540 for { 15541 _ = v.Args[3] 15542 ptr := v.Args[0] 15543 idx := v.Args[1] 15544 v_2 := v.Args[2] 15545 if v_2.Op != OpARM64MOVDconst { 15546 break 15547 } 15548 if v_2.AuxInt != 0 { 15549 break 15550 } 15551 mem := v.Args[3] 15552 v.reset(OpARM64MOVHstorezeroidx2) 15553 v.AddArg(ptr) 15554 v.AddArg(idx) 15555 v.AddArg(mem) 15556 return true 15557 } 15558 // match: (MOVHstoreidx2 ptr idx (MOVHreg x) mem) 15559 // cond: 15560 // result: (MOVHstoreidx2 ptr idx x mem) 15561 for { 15562 _ = v.Args[3] 15563 ptr := v.Args[0] 15564 idx := v.Args[1] 15565 v_2 := v.Args[2] 15566 if v_2.Op != OpARM64MOVHreg { 15567 break 15568 } 15569 x := v_2.Args[0] 15570 mem := v.Args[3] 15571 v.reset(OpARM64MOVHstoreidx2) 15572 v.AddArg(ptr) 15573 v.AddArg(idx) 15574 v.AddArg(x) 15575 v.AddArg(mem) 15576 return true 15577 } 15578 // match: (MOVHstoreidx2 ptr idx (MOVHUreg x) mem) 15579 // cond: 15580 // result: (MOVHstoreidx2 ptr idx x mem) 15581 for { 15582 _ = v.Args[3] 15583 ptr := v.Args[0] 15584 idx := v.Args[1] 15585 v_2 := v.Args[2] 15586 if v_2.Op != OpARM64MOVHUreg { 15587 break 15588 } 15589 x := v_2.Args[0] 15590 mem := v.Args[3] 15591 v.reset(OpARM64MOVHstoreidx2) 15592 v.AddArg(ptr) 15593 v.AddArg(idx) 15594 v.AddArg(x) 15595 v.AddArg(mem) 15596 return true 15597 } 15598 // match: (MOVHstoreidx2 ptr idx (MOVWreg x) mem) 15599 // cond: 15600 // result: (MOVHstoreidx2 ptr idx x mem) 15601 for { 15602 _ = v.Args[3] 15603 ptr := v.Args[0] 15604 idx := v.Args[1] 15605 v_2 := v.Args[2] 15606 if v_2.Op != OpARM64MOVWreg { 15607 break 15608 } 15609 x := v_2.Args[0] 15610 mem := v.Args[3] 15611 v.reset(OpARM64MOVHstoreidx2) 15612 v.AddArg(ptr) 15613 v.AddArg(idx) 15614 v.AddArg(x) 15615 v.AddArg(mem) 15616 return true 15617 } 15618 // match: (MOVHstoreidx2 ptr idx (MOVWUreg x) mem) 15619 // cond: 15620 // result: (MOVHstoreidx2 ptr idx x mem) 15621 for { 15622 _ = v.Args[3] 15623 ptr := v.Args[0] 15624 idx := v.Args[1] 15625 v_2 := v.Args[2] 15626 if v_2.Op != OpARM64MOVWUreg { 15627 break 15628 } 15629 x := v_2.Args[0] 15630 mem := v.Args[3] 15631 v.reset(OpARM64MOVHstoreidx2) 15632 v.AddArg(ptr) 15633 v.AddArg(idx) 15634 v.AddArg(x) 15635 v.AddArg(mem) 15636 return true 15637 } 15638 return false 15639 } 15640 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 15641 b := v.Block 15642 _ = b 15643 config := b.Func.Config 15644 _ = config 15645 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 15646 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15647 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 15648 for { 15649 off1 := v.AuxInt 15650 sym := v.Aux 15651 _ = v.Args[1] 15652 v_0 := v.Args[0] 15653 if v_0.Op != OpARM64ADDconst { 15654 break 15655 } 15656 off2 := v_0.AuxInt 15657 ptr := v_0.Args[0] 15658 mem := v.Args[1] 15659 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15660 break 15661 } 15662 v.reset(OpARM64MOVHstorezero) 15663 v.AuxInt = off1 + off2 15664 v.Aux = sym 15665 v.AddArg(ptr) 15666 v.AddArg(mem) 15667 return true 15668 } 15669 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 15670 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15671 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 15672 for { 15673 off1 := v.AuxInt 15674 sym1 := v.Aux 15675 _ = v.Args[1] 15676 v_0 := v.Args[0] 15677 if v_0.Op != OpARM64MOVDaddr { 15678 break 15679 } 15680 off2 := v_0.AuxInt 15681 sym2 := v_0.Aux 15682 ptr := v_0.Args[0] 15683 mem := v.Args[1] 15684 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15685 break 15686 } 15687 v.reset(OpARM64MOVHstorezero) 15688 v.AuxInt = off1 + off2 15689 v.Aux = mergeSym(sym1, sym2) 15690 v.AddArg(ptr) 15691 v.AddArg(mem) 15692 return true 15693 } 15694 // match: (MOVHstorezero [off] {sym} (ADD ptr idx) mem) 15695 // cond: off == 0 && sym == nil 15696 // result: (MOVHstorezeroidx ptr idx mem) 15697 for { 15698 off := v.AuxInt 15699 sym := v.Aux 15700 _ = v.Args[1] 15701 v_0 := v.Args[0] 15702 if v_0.Op != OpARM64ADD { 15703 break 15704 } 15705 _ = v_0.Args[1] 15706 ptr := v_0.Args[0] 15707 idx := v_0.Args[1] 15708 mem := v.Args[1] 15709 if !(off == 0 && sym == nil) { 15710 break 15711 } 15712 v.reset(OpARM64MOVHstorezeroidx) 15713 v.AddArg(ptr) 15714 v.AddArg(idx) 15715 v.AddArg(mem) 15716 return true 15717 } 15718 // match: (MOVHstorezero [off] {sym} (ADDshiftLL [1] ptr idx) mem) 15719 // cond: off == 0 && sym == nil 15720 // result: (MOVHstorezeroidx2 ptr idx mem) 15721 for { 15722 off := v.AuxInt 15723 sym := v.Aux 15724 _ = v.Args[1] 15725 v_0 := v.Args[0] 15726 if v_0.Op != OpARM64ADDshiftLL { 15727 break 15728 } 15729 if v_0.AuxInt != 1 { 15730 break 15731 } 15732 _ = v_0.Args[1] 15733 ptr := v_0.Args[0] 15734 idx := v_0.Args[1] 15735 mem := v.Args[1] 15736 if !(off == 0 && sym == nil) { 15737 break 15738 } 15739 v.reset(OpARM64MOVHstorezeroidx2) 15740 v.AddArg(ptr) 15741 v.AddArg(idx) 15742 v.AddArg(mem) 15743 return true 15744 } 15745 // match: (MOVHstorezero [i] {s} ptr0 x:(MOVHstorezero [j] {s} ptr1 mem)) 15746 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,2) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 15747 // result: (MOVWstorezero [min(i,j)] {s} ptr0 mem) 15748 for { 15749 i := v.AuxInt 15750 s := v.Aux 15751 _ = v.Args[1] 15752 ptr0 := v.Args[0] 15753 x := v.Args[1] 15754 if x.Op != OpARM64MOVHstorezero { 15755 break 15756 } 15757 j := x.AuxInt 15758 if x.Aux != s { 15759 break 15760 } 15761 _ = x.Args[1] 15762 ptr1 := x.Args[0] 15763 mem := x.Args[1] 15764 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 2) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 15765 break 15766 } 15767 v.reset(OpARM64MOVWstorezero) 15768 v.AuxInt = min(i, j) 15769 v.Aux = s 15770 v.AddArg(ptr0) 15771 v.AddArg(mem) 15772 return true 15773 } 15774 // match: (MOVHstorezero [2] {s} (ADD ptr0 idx0) x:(MOVHstorezeroidx ptr1 idx1 mem)) 15775 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15776 // result: (MOVWstorezeroidx ptr1 idx1 mem) 15777 for { 15778 if v.AuxInt != 2 { 15779 break 15780 } 15781 s := v.Aux 15782 _ = v.Args[1] 15783 v_0 := v.Args[0] 15784 if v_0.Op != OpARM64ADD { 15785 break 15786 } 15787 _ = v_0.Args[1] 15788 ptr0 := v_0.Args[0] 15789 idx0 := v_0.Args[1] 15790 x := v.Args[1] 15791 if x.Op != OpARM64MOVHstorezeroidx { 15792 break 15793 } 15794 _ = x.Args[2] 15795 ptr1 := x.Args[0] 15796 idx1 := x.Args[1] 15797 mem := x.Args[2] 15798 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15799 break 15800 } 15801 v.reset(OpARM64MOVWstorezeroidx) 15802 v.AddArg(ptr1) 15803 v.AddArg(idx1) 15804 v.AddArg(mem) 15805 return true 15806 } 15807 // match: (MOVHstorezero [2] {s} (ADDshiftLL [1] ptr0 idx0) x:(MOVHstorezeroidx2 ptr1 idx1 mem)) 15808 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15809 // result: (MOVWstorezeroidx ptr1 (SLLconst <idx1.Type> [1] idx1) mem) 15810 for { 15811 if v.AuxInt != 2 { 15812 break 15813 } 15814 s := v.Aux 15815 _ = v.Args[1] 15816 v_0 := v.Args[0] 15817 if v_0.Op != OpARM64ADDshiftLL { 15818 break 15819 } 15820 if v_0.AuxInt != 1 { 15821 break 15822 } 15823 _ = v_0.Args[1] 15824 ptr0 := v_0.Args[0] 15825 idx0 := v_0.Args[1] 15826 x := v.Args[1] 15827 if x.Op != OpARM64MOVHstorezeroidx2 { 15828 break 15829 } 15830 _ = x.Args[2] 15831 ptr1 := x.Args[0] 15832 idx1 := x.Args[1] 15833 mem := x.Args[2] 15834 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15835 break 15836 } 15837 v.reset(OpARM64MOVWstorezeroidx) 15838 v.AddArg(ptr1) 15839 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15840 v0.AuxInt = 1 15841 v0.AddArg(idx1) 15842 v.AddArg(v0) 15843 v.AddArg(mem) 15844 return true 15845 } 15846 return false 15847 } 15848 func rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v *Value) bool { 15849 // match: (MOVHstorezeroidx ptr (MOVDconst [c]) mem) 15850 // cond: 15851 // result: (MOVHstorezero [c] ptr mem) 15852 for { 15853 _ = v.Args[2] 15854 ptr := v.Args[0] 15855 v_1 := v.Args[1] 15856 if v_1.Op != OpARM64MOVDconst { 15857 break 15858 } 15859 c := v_1.AuxInt 15860 mem := v.Args[2] 15861 v.reset(OpARM64MOVHstorezero) 15862 v.AuxInt = c 15863 v.AddArg(ptr) 15864 v.AddArg(mem) 15865 return true 15866 } 15867 // match: (MOVHstorezeroidx (MOVDconst [c]) idx mem) 15868 // cond: 15869 // result: (MOVHstorezero [c] idx mem) 15870 for { 15871 _ = v.Args[2] 15872 v_0 := v.Args[0] 15873 if v_0.Op != OpARM64MOVDconst { 15874 break 15875 } 15876 c := v_0.AuxInt 15877 idx := v.Args[1] 15878 mem := v.Args[2] 15879 v.reset(OpARM64MOVHstorezero) 15880 v.AuxInt = c 15881 v.AddArg(idx) 15882 v.AddArg(mem) 15883 return true 15884 } 15885 // match: (MOVHstorezeroidx ptr (SLLconst [1] idx) mem) 15886 // cond: 15887 // result: (MOVHstorezeroidx2 ptr idx mem) 15888 for { 15889 _ = v.Args[2] 15890 ptr := v.Args[0] 15891 v_1 := v.Args[1] 15892 if v_1.Op != OpARM64SLLconst { 15893 break 15894 } 15895 if v_1.AuxInt != 1 { 15896 break 15897 } 15898 idx := v_1.Args[0] 15899 mem := v.Args[2] 15900 v.reset(OpARM64MOVHstorezeroidx2) 15901 v.AddArg(ptr) 15902 v.AddArg(idx) 15903 v.AddArg(mem) 15904 return true 15905 } 15906 // match: (MOVHstorezeroidx ptr (ADD idx idx) mem) 15907 // cond: 15908 // result: (MOVHstorezeroidx2 ptr idx mem) 15909 for { 15910 _ = v.Args[2] 15911 ptr := v.Args[0] 15912 v_1 := v.Args[1] 15913 if v_1.Op != OpARM64ADD { 15914 break 15915 } 15916 _ = v_1.Args[1] 15917 idx := v_1.Args[0] 15918 if idx != v_1.Args[1] { 15919 break 15920 } 15921 mem := v.Args[2] 15922 v.reset(OpARM64MOVHstorezeroidx2) 15923 v.AddArg(ptr) 15924 v.AddArg(idx) 15925 v.AddArg(mem) 15926 return true 15927 } 15928 // match: (MOVHstorezeroidx (SLLconst [1] idx) ptr mem) 15929 // cond: 15930 // result: (MOVHstorezeroidx2 ptr idx mem) 15931 for { 15932 _ = v.Args[2] 15933 v_0 := v.Args[0] 15934 if v_0.Op != OpARM64SLLconst { 15935 break 15936 } 15937 if v_0.AuxInt != 1 { 15938 break 15939 } 15940 idx := v_0.Args[0] 15941 ptr := v.Args[1] 15942 mem := v.Args[2] 15943 v.reset(OpARM64MOVHstorezeroidx2) 15944 v.AddArg(ptr) 15945 v.AddArg(idx) 15946 v.AddArg(mem) 15947 return true 15948 } 15949 // match: (MOVHstorezeroidx (ADD idx idx) ptr mem) 15950 // cond: 15951 // result: (MOVHstorezeroidx2 ptr idx mem) 15952 for { 15953 _ = v.Args[2] 15954 v_0 := v.Args[0] 15955 if v_0.Op != OpARM64ADD { 15956 break 15957 } 15958 _ = v_0.Args[1] 15959 idx := v_0.Args[0] 15960 if idx != v_0.Args[1] { 15961 break 15962 } 15963 ptr := v.Args[1] 15964 mem := v.Args[2] 15965 v.reset(OpARM64MOVHstorezeroidx2) 15966 v.AddArg(ptr) 15967 v.AddArg(idx) 15968 v.AddArg(mem) 15969 return true 15970 } 15971 // match: (MOVHstorezeroidx ptr (ADDconst [2] idx) x:(MOVHstorezeroidx ptr idx mem)) 15972 // cond: x.Uses == 1 && clobber(x) 15973 // result: (MOVWstorezeroidx ptr idx mem) 15974 for { 15975 _ = v.Args[2] 15976 ptr := v.Args[0] 15977 v_1 := v.Args[1] 15978 if v_1.Op != OpARM64ADDconst { 15979 break 15980 } 15981 if v_1.AuxInt != 2 { 15982 break 15983 } 15984 idx := v_1.Args[0] 15985 x := v.Args[2] 15986 if x.Op != OpARM64MOVHstorezeroidx { 15987 break 15988 } 15989 _ = x.Args[2] 15990 if ptr != x.Args[0] { 15991 break 15992 } 15993 if idx != x.Args[1] { 15994 break 15995 } 15996 mem := x.Args[2] 15997 if !(x.Uses == 1 && clobber(x)) { 15998 break 15999 } 16000 v.reset(OpARM64MOVWstorezeroidx) 16001 v.AddArg(ptr) 16002 v.AddArg(idx) 16003 v.AddArg(mem) 16004 return true 16005 } 16006 return false 16007 } 16008 func rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v *Value) bool { 16009 // match: (MOVHstorezeroidx2 ptr (MOVDconst [c]) mem) 16010 // cond: 16011 // result: (MOVHstorezero [c<<1] ptr mem) 16012 for { 16013 _ = v.Args[2] 16014 ptr := v.Args[0] 16015 v_1 := v.Args[1] 16016 if v_1.Op != OpARM64MOVDconst { 16017 break 16018 } 16019 c := v_1.AuxInt 16020 mem := v.Args[2] 16021 v.reset(OpARM64MOVHstorezero) 16022 v.AuxInt = c << 1 16023 v.AddArg(ptr) 16024 v.AddArg(mem) 16025 return true 16026 } 16027 return false 16028 } 16029 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool { 16030 b := v.Block 16031 _ = b 16032 config := b.Func.Config 16033 _ = config 16034 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 16035 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16036 // result: (MOVQstorezero [off1+off2] {sym} ptr mem) 16037 for { 16038 off1 := v.AuxInt 16039 sym := v.Aux 16040 _ = v.Args[1] 16041 v_0 := v.Args[0] 16042 if v_0.Op != OpARM64ADDconst { 16043 break 16044 } 16045 off2 := v_0.AuxInt 16046 ptr := v_0.Args[0] 16047 mem := v.Args[1] 16048 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16049 break 16050 } 16051 v.reset(OpARM64MOVQstorezero) 16052 v.AuxInt = off1 + off2 16053 v.Aux = sym 16054 v.AddArg(ptr) 16055 v.AddArg(mem) 16056 return true 16057 } 16058 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 16059 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16060 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 16061 for { 16062 off1 := v.AuxInt 16063 sym1 := v.Aux 16064 _ = v.Args[1] 16065 v_0 := v.Args[0] 16066 if v_0.Op != OpARM64MOVDaddr { 16067 break 16068 } 16069 off2 := v_0.AuxInt 16070 sym2 := v_0.Aux 16071 ptr := v_0.Args[0] 16072 mem := v.Args[1] 16073 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16074 break 16075 } 16076 v.reset(OpARM64MOVQstorezero) 16077 v.AuxInt = off1 + off2 16078 v.Aux = mergeSym(sym1, sym2) 16079 v.AddArg(ptr) 16080 v.AddArg(mem) 16081 return true 16082 } 16083 return false 16084 } 16085 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 16086 b := v.Block 16087 _ = b 16088 config := b.Func.Config 16089 _ = config 16090 // match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _)) 16091 // cond: 16092 // result: (FMOVSfpgp val) 16093 for { 16094 off := v.AuxInt 16095 sym := v.Aux 16096 _ = v.Args[1] 16097 ptr := v.Args[0] 16098 v_1 := v.Args[1] 16099 if v_1.Op != OpARM64FMOVSstore { 16100 break 16101 } 16102 if v_1.AuxInt != off { 16103 break 16104 } 16105 if v_1.Aux != sym { 16106 break 16107 } 16108 _ = v_1.Args[2] 16109 if ptr != v_1.Args[0] { 16110 break 16111 } 16112 val := v_1.Args[1] 16113 v.reset(OpARM64FMOVSfpgp) 16114 v.AddArg(val) 16115 return true 16116 } 16117 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 16118 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16119 // result: (MOVWUload [off1+off2] {sym} ptr mem) 16120 for { 16121 off1 := v.AuxInt 16122 sym := v.Aux 16123 _ = v.Args[1] 16124 v_0 := v.Args[0] 16125 if v_0.Op != OpARM64ADDconst { 16126 break 16127 } 16128 off2 := v_0.AuxInt 16129 ptr := v_0.Args[0] 16130 mem := v.Args[1] 16131 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16132 break 16133 } 16134 v.reset(OpARM64MOVWUload) 16135 v.AuxInt = off1 + off2 16136 v.Aux = sym 16137 v.AddArg(ptr) 16138 v.AddArg(mem) 16139 return true 16140 } 16141 // match: (MOVWUload [off] {sym} (ADD ptr idx) mem) 16142 // cond: off == 0 && sym == nil 16143 // result: (MOVWUloadidx ptr idx mem) 16144 for { 16145 off := v.AuxInt 16146 sym := v.Aux 16147 _ = v.Args[1] 16148 v_0 := v.Args[0] 16149 if v_0.Op != OpARM64ADD { 16150 break 16151 } 16152 _ = v_0.Args[1] 16153 ptr := v_0.Args[0] 16154 idx := v_0.Args[1] 16155 mem := v.Args[1] 16156 if !(off == 0 && sym == nil) { 16157 break 16158 } 16159 v.reset(OpARM64MOVWUloadidx) 16160 v.AddArg(ptr) 16161 v.AddArg(idx) 16162 v.AddArg(mem) 16163 return true 16164 } 16165 // match: (MOVWUload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 16166 // cond: off == 0 && sym == nil 16167 // result: (MOVWUloadidx4 ptr idx mem) 16168 for { 16169 off := v.AuxInt 16170 sym := v.Aux 16171 _ = v.Args[1] 16172 v_0 := v.Args[0] 16173 if v_0.Op != OpARM64ADDshiftLL { 16174 break 16175 } 16176 if v_0.AuxInt != 2 { 16177 break 16178 } 16179 _ = v_0.Args[1] 16180 ptr := v_0.Args[0] 16181 idx := v_0.Args[1] 16182 mem := v.Args[1] 16183 if !(off == 0 && sym == nil) { 16184 break 16185 } 16186 v.reset(OpARM64MOVWUloadidx4) 16187 v.AddArg(ptr) 16188 v.AddArg(idx) 16189 v.AddArg(mem) 16190 return true 16191 } 16192 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 16193 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16194 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 16195 for { 16196 off1 := v.AuxInt 16197 sym1 := v.Aux 16198 _ = v.Args[1] 16199 v_0 := v.Args[0] 16200 if v_0.Op != OpARM64MOVDaddr { 16201 break 16202 } 16203 off2 := v_0.AuxInt 16204 sym2 := v_0.Aux 16205 ptr := v_0.Args[0] 16206 mem := v.Args[1] 16207 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16208 break 16209 } 16210 v.reset(OpARM64MOVWUload) 16211 v.AuxInt = off1 + off2 16212 v.Aux = mergeSym(sym1, sym2) 16213 v.AddArg(ptr) 16214 v.AddArg(mem) 16215 return true 16216 } 16217 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 16218 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 16219 // result: (MOVDconst [0]) 16220 for { 16221 off := v.AuxInt 16222 sym := v.Aux 16223 _ = v.Args[1] 16224 ptr := v.Args[0] 16225 v_1 := v.Args[1] 16226 if v_1.Op != OpARM64MOVWstorezero { 16227 break 16228 } 16229 off2 := v_1.AuxInt 16230 sym2 := v_1.Aux 16231 _ = v_1.Args[1] 16232 ptr2 := v_1.Args[0] 16233 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 16234 break 16235 } 16236 v.reset(OpARM64MOVDconst) 16237 v.AuxInt = 0 16238 return true 16239 } 16240 // match: (MOVWUload [off] {sym} (SB) _) 16241 // cond: symIsRO(sym) 16242 // result: (MOVDconst [int64(read32(sym, off, config.BigEndian))]) 16243 for { 16244 off := v.AuxInt 16245 sym := v.Aux 16246 _ = v.Args[1] 16247 v_0 := v.Args[0] 16248 if v_0.Op != OpSB { 16249 break 16250 } 16251 if !(symIsRO(sym)) { 16252 break 16253 } 16254 v.reset(OpARM64MOVDconst) 16255 v.AuxInt = int64(read32(sym, off, config.BigEndian)) 16256 return true 16257 } 16258 return false 16259 } 16260 func rewriteValueARM64_OpARM64MOVWUloadidx_0(v *Value) bool { 16261 // match: (MOVWUloadidx ptr (MOVDconst [c]) mem) 16262 // cond: 16263 // result: (MOVWUload [c] ptr mem) 16264 for { 16265 _ = v.Args[2] 16266 ptr := v.Args[0] 16267 v_1 := v.Args[1] 16268 if v_1.Op != OpARM64MOVDconst { 16269 break 16270 } 16271 c := v_1.AuxInt 16272 mem := v.Args[2] 16273 v.reset(OpARM64MOVWUload) 16274 v.AuxInt = c 16275 v.AddArg(ptr) 16276 v.AddArg(mem) 16277 return true 16278 } 16279 // match: (MOVWUloadidx (MOVDconst [c]) ptr mem) 16280 // cond: 16281 // result: (MOVWUload [c] ptr mem) 16282 for { 16283 _ = v.Args[2] 16284 v_0 := v.Args[0] 16285 if v_0.Op != OpARM64MOVDconst { 16286 break 16287 } 16288 c := v_0.AuxInt 16289 ptr := v.Args[1] 16290 mem := v.Args[2] 16291 v.reset(OpARM64MOVWUload) 16292 v.AuxInt = c 16293 v.AddArg(ptr) 16294 v.AddArg(mem) 16295 return true 16296 } 16297 // match: (MOVWUloadidx ptr (SLLconst [2] idx) mem) 16298 // cond: 16299 // result: (MOVWUloadidx4 ptr idx mem) 16300 for { 16301 _ = v.Args[2] 16302 ptr := v.Args[0] 16303 v_1 := v.Args[1] 16304 if v_1.Op != OpARM64SLLconst { 16305 break 16306 } 16307 if v_1.AuxInt != 2 { 16308 break 16309 } 16310 idx := v_1.Args[0] 16311 mem := v.Args[2] 16312 v.reset(OpARM64MOVWUloadidx4) 16313 v.AddArg(ptr) 16314 v.AddArg(idx) 16315 v.AddArg(mem) 16316 return true 16317 } 16318 // match: (MOVWUloadidx (SLLconst [2] idx) ptr mem) 16319 // cond: 16320 // result: (MOVWUloadidx4 ptr idx mem) 16321 for { 16322 _ = v.Args[2] 16323 v_0 := v.Args[0] 16324 if v_0.Op != OpARM64SLLconst { 16325 break 16326 } 16327 if v_0.AuxInt != 2 { 16328 break 16329 } 16330 idx := v_0.Args[0] 16331 ptr := v.Args[1] 16332 mem := v.Args[2] 16333 v.reset(OpARM64MOVWUloadidx4) 16334 v.AddArg(ptr) 16335 v.AddArg(idx) 16336 v.AddArg(mem) 16337 return true 16338 } 16339 // match: (MOVWUloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 16340 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 16341 // result: (MOVDconst [0]) 16342 for { 16343 _ = v.Args[2] 16344 ptr := v.Args[0] 16345 idx := v.Args[1] 16346 v_2 := v.Args[2] 16347 if v_2.Op != OpARM64MOVWstorezeroidx { 16348 break 16349 } 16350 _ = v_2.Args[2] 16351 ptr2 := v_2.Args[0] 16352 idx2 := v_2.Args[1] 16353 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 16354 break 16355 } 16356 v.reset(OpARM64MOVDconst) 16357 v.AuxInt = 0 16358 return true 16359 } 16360 return false 16361 } 16362 func rewriteValueARM64_OpARM64MOVWUloadidx4_0(v *Value) bool { 16363 // match: (MOVWUloadidx4 ptr (MOVDconst [c]) mem) 16364 // cond: 16365 // result: (MOVWUload [c<<2] ptr mem) 16366 for { 16367 _ = v.Args[2] 16368 ptr := v.Args[0] 16369 v_1 := v.Args[1] 16370 if v_1.Op != OpARM64MOVDconst { 16371 break 16372 } 16373 c := v_1.AuxInt 16374 mem := v.Args[2] 16375 v.reset(OpARM64MOVWUload) 16376 v.AuxInt = c << 2 16377 v.AddArg(ptr) 16378 v.AddArg(mem) 16379 return true 16380 } 16381 // match: (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 16382 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 16383 // result: (MOVDconst [0]) 16384 for { 16385 _ = v.Args[2] 16386 ptr := v.Args[0] 16387 idx := v.Args[1] 16388 v_2 := v.Args[2] 16389 if v_2.Op != OpARM64MOVWstorezeroidx4 { 16390 break 16391 } 16392 _ = v_2.Args[2] 16393 ptr2 := v_2.Args[0] 16394 idx2 := v_2.Args[1] 16395 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 16396 break 16397 } 16398 v.reset(OpARM64MOVDconst) 16399 v.AuxInt = 0 16400 return true 16401 } 16402 return false 16403 } 16404 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 16405 // match: (MOVWUreg x:(MOVBUload _ _)) 16406 // cond: 16407 // result: (MOVDreg x) 16408 for { 16409 x := v.Args[0] 16410 if x.Op != OpARM64MOVBUload { 16411 break 16412 } 16413 _ = x.Args[1] 16414 v.reset(OpARM64MOVDreg) 16415 v.AddArg(x) 16416 return true 16417 } 16418 // match: (MOVWUreg x:(MOVHUload _ _)) 16419 // cond: 16420 // result: (MOVDreg x) 16421 for { 16422 x := v.Args[0] 16423 if x.Op != OpARM64MOVHUload { 16424 break 16425 } 16426 _ = x.Args[1] 16427 v.reset(OpARM64MOVDreg) 16428 v.AddArg(x) 16429 return true 16430 } 16431 // match: (MOVWUreg x:(MOVWUload _ _)) 16432 // cond: 16433 // result: (MOVDreg x) 16434 for { 16435 x := v.Args[0] 16436 if x.Op != OpARM64MOVWUload { 16437 break 16438 } 16439 _ = x.Args[1] 16440 v.reset(OpARM64MOVDreg) 16441 v.AddArg(x) 16442 return true 16443 } 16444 // match: (MOVWUreg x:(MOVBUloadidx _ _ _)) 16445 // cond: 16446 // result: (MOVDreg x) 16447 for { 16448 x := v.Args[0] 16449 if x.Op != OpARM64MOVBUloadidx { 16450 break 16451 } 16452 _ = x.Args[2] 16453 v.reset(OpARM64MOVDreg) 16454 v.AddArg(x) 16455 return true 16456 } 16457 // match: (MOVWUreg x:(MOVHUloadidx _ _ _)) 16458 // cond: 16459 // result: (MOVDreg x) 16460 for { 16461 x := v.Args[0] 16462 if x.Op != OpARM64MOVHUloadidx { 16463 break 16464 } 16465 _ = x.Args[2] 16466 v.reset(OpARM64MOVDreg) 16467 v.AddArg(x) 16468 return true 16469 } 16470 // match: (MOVWUreg x:(MOVWUloadidx _ _ _)) 16471 // cond: 16472 // result: (MOVDreg x) 16473 for { 16474 x := v.Args[0] 16475 if x.Op != OpARM64MOVWUloadidx { 16476 break 16477 } 16478 _ = x.Args[2] 16479 v.reset(OpARM64MOVDreg) 16480 v.AddArg(x) 16481 return true 16482 } 16483 // match: (MOVWUreg x:(MOVHUloadidx2 _ _ _)) 16484 // cond: 16485 // result: (MOVDreg x) 16486 for { 16487 x := v.Args[0] 16488 if x.Op != OpARM64MOVHUloadidx2 { 16489 break 16490 } 16491 _ = x.Args[2] 16492 v.reset(OpARM64MOVDreg) 16493 v.AddArg(x) 16494 return true 16495 } 16496 // match: (MOVWUreg x:(MOVWUloadidx4 _ _ _)) 16497 // cond: 16498 // result: (MOVDreg x) 16499 for { 16500 x := v.Args[0] 16501 if x.Op != OpARM64MOVWUloadidx4 { 16502 break 16503 } 16504 _ = x.Args[2] 16505 v.reset(OpARM64MOVDreg) 16506 v.AddArg(x) 16507 return true 16508 } 16509 // match: (MOVWUreg x:(MOVBUreg _)) 16510 // cond: 16511 // result: (MOVDreg x) 16512 for { 16513 x := v.Args[0] 16514 if x.Op != OpARM64MOVBUreg { 16515 break 16516 } 16517 v.reset(OpARM64MOVDreg) 16518 v.AddArg(x) 16519 return true 16520 } 16521 // match: (MOVWUreg x:(MOVHUreg _)) 16522 // cond: 16523 // result: (MOVDreg x) 16524 for { 16525 x := v.Args[0] 16526 if x.Op != OpARM64MOVHUreg { 16527 break 16528 } 16529 v.reset(OpARM64MOVDreg) 16530 v.AddArg(x) 16531 return true 16532 } 16533 return false 16534 } 16535 func rewriteValueARM64_OpARM64MOVWUreg_10(v *Value) bool { 16536 // match: (MOVWUreg x:(MOVWUreg _)) 16537 // cond: 16538 // result: (MOVDreg x) 16539 for { 16540 x := v.Args[0] 16541 if x.Op != OpARM64MOVWUreg { 16542 break 16543 } 16544 v.reset(OpARM64MOVDreg) 16545 v.AddArg(x) 16546 return true 16547 } 16548 // match: (MOVWUreg (ANDconst [c] x)) 16549 // cond: 16550 // result: (ANDconst [c&(1<<32-1)] x) 16551 for { 16552 v_0 := v.Args[0] 16553 if v_0.Op != OpARM64ANDconst { 16554 break 16555 } 16556 c := v_0.AuxInt 16557 x := v_0.Args[0] 16558 v.reset(OpARM64ANDconst) 16559 v.AuxInt = c & (1<<32 - 1) 16560 v.AddArg(x) 16561 return true 16562 } 16563 // match: (MOVWUreg (MOVDconst [c])) 16564 // cond: 16565 // result: (MOVDconst [int64(uint32(c))]) 16566 for { 16567 v_0 := v.Args[0] 16568 if v_0.Op != OpARM64MOVDconst { 16569 break 16570 } 16571 c := v_0.AuxInt 16572 v.reset(OpARM64MOVDconst) 16573 v.AuxInt = int64(uint32(c)) 16574 return true 16575 } 16576 // match: (MOVWUreg (SLLconst [sc] x)) 16577 // cond: isARM64BFMask(sc, 1<<32-1, sc) 16578 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x) 16579 for { 16580 v_0 := v.Args[0] 16581 if v_0.Op != OpARM64SLLconst { 16582 break 16583 } 16584 sc := v_0.AuxInt 16585 x := v_0.Args[0] 16586 if !(isARM64BFMask(sc, 1<<32-1, sc)) { 16587 break 16588 } 16589 v.reset(OpARM64UBFIZ) 16590 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc)) 16591 v.AddArg(x) 16592 return true 16593 } 16594 // match: (MOVWUreg (SRLconst [sc] x)) 16595 // cond: isARM64BFMask(sc, 1<<32-1, 0) 16596 // result: (UBFX [arm64BFAuxInt(sc, 32)] x) 16597 for { 16598 v_0 := v.Args[0] 16599 if v_0.Op != OpARM64SRLconst { 16600 break 16601 } 16602 sc := v_0.AuxInt 16603 x := v_0.Args[0] 16604 if !(isARM64BFMask(sc, 1<<32-1, 0)) { 16605 break 16606 } 16607 v.reset(OpARM64UBFX) 16608 v.AuxInt = arm64BFAuxInt(sc, 32) 16609 v.AddArg(x) 16610 return true 16611 } 16612 return false 16613 } 16614 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 16615 b := v.Block 16616 _ = b 16617 config := b.Func.Config 16618 _ = config 16619 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 16620 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16621 // result: (MOVWload [off1+off2] {sym} ptr mem) 16622 for { 16623 off1 := v.AuxInt 16624 sym := v.Aux 16625 _ = v.Args[1] 16626 v_0 := v.Args[0] 16627 if v_0.Op != OpARM64ADDconst { 16628 break 16629 } 16630 off2 := v_0.AuxInt 16631 ptr := v_0.Args[0] 16632 mem := v.Args[1] 16633 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16634 break 16635 } 16636 v.reset(OpARM64MOVWload) 16637 v.AuxInt = off1 + off2 16638 v.Aux = sym 16639 v.AddArg(ptr) 16640 v.AddArg(mem) 16641 return true 16642 } 16643 // match: (MOVWload [off] {sym} (ADD ptr idx) mem) 16644 // cond: off == 0 && sym == nil 16645 // result: (MOVWloadidx ptr idx mem) 16646 for { 16647 off := v.AuxInt 16648 sym := v.Aux 16649 _ = v.Args[1] 16650 v_0 := v.Args[0] 16651 if v_0.Op != OpARM64ADD { 16652 break 16653 } 16654 _ = v_0.Args[1] 16655 ptr := v_0.Args[0] 16656 idx := v_0.Args[1] 16657 mem := v.Args[1] 16658 if !(off == 0 && sym == nil) { 16659 break 16660 } 16661 v.reset(OpARM64MOVWloadidx) 16662 v.AddArg(ptr) 16663 v.AddArg(idx) 16664 v.AddArg(mem) 16665 return true 16666 } 16667 // match: (MOVWload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 16668 // cond: off == 0 && sym == nil 16669 // result: (MOVWloadidx4 ptr idx mem) 16670 for { 16671 off := v.AuxInt 16672 sym := v.Aux 16673 _ = v.Args[1] 16674 v_0 := v.Args[0] 16675 if v_0.Op != OpARM64ADDshiftLL { 16676 break 16677 } 16678 if v_0.AuxInt != 2 { 16679 break 16680 } 16681 _ = v_0.Args[1] 16682 ptr := v_0.Args[0] 16683 idx := v_0.Args[1] 16684 mem := v.Args[1] 16685 if !(off == 0 && sym == nil) { 16686 break 16687 } 16688 v.reset(OpARM64MOVWloadidx4) 16689 v.AddArg(ptr) 16690 v.AddArg(idx) 16691 v.AddArg(mem) 16692 return true 16693 } 16694 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 16695 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16696 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 16697 for { 16698 off1 := v.AuxInt 16699 sym1 := v.Aux 16700 _ = v.Args[1] 16701 v_0 := v.Args[0] 16702 if v_0.Op != OpARM64MOVDaddr { 16703 break 16704 } 16705 off2 := v_0.AuxInt 16706 sym2 := v_0.Aux 16707 ptr := v_0.Args[0] 16708 mem := v.Args[1] 16709 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16710 break 16711 } 16712 v.reset(OpARM64MOVWload) 16713 v.AuxInt = off1 + off2 16714 v.Aux = mergeSym(sym1, sym2) 16715 v.AddArg(ptr) 16716 v.AddArg(mem) 16717 return true 16718 } 16719 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 16720 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 16721 // result: (MOVDconst [0]) 16722 for { 16723 off := v.AuxInt 16724 sym := v.Aux 16725 _ = v.Args[1] 16726 ptr := v.Args[0] 16727 v_1 := v.Args[1] 16728 if v_1.Op != OpARM64MOVWstorezero { 16729 break 16730 } 16731 off2 := v_1.AuxInt 16732 sym2 := v_1.Aux 16733 _ = v_1.Args[1] 16734 ptr2 := v_1.Args[0] 16735 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 16736 break 16737 } 16738 v.reset(OpARM64MOVDconst) 16739 v.AuxInt = 0 16740 return true 16741 } 16742 return false 16743 } 16744 func rewriteValueARM64_OpARM64MOVWloadidx_0(v *Value) bool { 16745 // match: (MOVWloadidx ptr (MOVDconst [c]) mem) 16746 // cond: 16747 // result: (MOVWload [c] ptr mem) 16748 for { 16749 _ = v.Args[2] 16750 ptr := v.Args[0] 16751 v_1 := v.Args[1] 16752 if v_1.Op != OpARM64MOVDconst { 16753 break 16754 } 16755 c := v_1.AuxInt 16756 mem := v.Args[2] 16757 v.reset(OpARM64MOVWload) 16758 v.AuxInt = c 16759 v.AddArg(ptr) 16760 v.AddArg(mem) 16761 return true 16762 } 16763 // match: (MOVWloadidx (MOVDconst [c]) ptr mem) 16764 // cond: 16765 // result: (MOVWload [c] ptr mem) 16766 for { 16767 _ = v.Args[2] 16768 v_0 := v.Args[0] 16769 if v_0.Op != OpARM64MOVDconst { 16770 break 16771 } 16772 c := v_0.AuxInt 16773 ptr := v.Args[1] 16774 mem := v.Args[2] 16775 v.reset(OpARM64MOVWload) 16776 v.AuxInt = c 16777 v.AddArg(ptr) 16778 v.AddArg(mem) 16779 return true 16780 } 16781 // match: (MOVWloadidx ptr (SLLconst [2] idx) mem) 16782 // cond: 16783 // result: (MOVWloadidx4 ptr idx mem) 16784 for { 16785 _ = v.Args[2] 16786 ptr := v.Args[0] 16787 v_1 := v.Args[1] 16788 if v_1.Op != OpARM64SLLconst { 16789 break 16790 } 16791 if v_1.AuxInt != 2 { 16792 break 16793 } 16794 idx := v_1.Args[0] 16795 mem := v.Args[2] 16796 v.reset(OpARM64MOVWloadidx4) 16797 v.AddArg(ptr) 16798 v.AddArg(idx) 16799 v.AddArg(mem) 16800 return true 16801 } 16802 // match: (MOVWloadidx (SLLconst [2] idx) ptr mem) 16803 // cond: 16804 // result: (MOVWloadidx4 ptr idx mem) 16805 for { 16806 _ = v.Args[2] 16807 v_0 := v.Args[0] 16808 if v_0.Op != OpARM64SLLconst { 16809 break 16810 } 16811 if v_0.AuxInt != 2 { 16812 break 16813 } 16814 idx := v_0.Args[0] 16815 ptr := v.Args[1] 16816 mem := v.Args[2] 16817 v.reset(OpARM64MOVWloadidx4) 16818 v.AddArg(ptr) 16819 v.AddArg(idx) 16820 v.AddArg(mem) 16821 return true 16822 } 16823 // match: (MOVWloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 16824 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 16825 // result: (MOVDconst [0]) 16826 for { 16827 _ = v.Args[2] 16828 ptr := v.Args[0] 16829 idx := v.Args[1] 16830 v_2 := v.Args[2] 16831 if v_2.Op != OpARM64MOVWstorezeroidx { 16832 break 16833 } 16834 _ = v_2.Args[2] 16835 ptr2 := v_2.Args[0] 16836 idx2 := v_2.Args[1] 16837 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 16838 break 16839 } 16840 v.reset(OpARM64MOVDconst) 16841 v.AuxInt = 0 16842 return true 16843 } 16844 return false 16845 } 16846 func rewriteValueARM64_OpARM64MOVWloadidx4_0(v *Value) bool { 16847 // match: (MOVWloadidx4 ptr (MOVDconst [c]) mem) 16848 // cond: 16849 // result: (MOVWload [c<<2] ptr mem) 16850 for { 16851 _ = v.Args[2] 16852 ptr := v.Args[0] 16853 v_1 := v.Args[1] 16854 if v_1.Op != OpARM64MOVDconst { 16855 break 16856 } 16857 c := v_1.AuxInt 16858 mem := v.Args[2] 16859 v.reset(OpARM64MOVWload) 16860 v.AuxInt = c << 2 16861 v.AddArg(ptr) 16862 v.AddArg(mem) 16863 return true 16864 } 16865 // match: (MOVWloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 16866 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 16867 // result: (MOVDconst [0]) 16868 for { 16869 _ = v.Args[2] 16870 ptr := v.Args[0] 16871 idx := v.Args[1] 16872 v_2 := v.Args[2] 16873 if v_2.Op != OpARM64MOVWstorezeroidx4 { 16874 break 16875 } 16876 _ = v_2.Args[2] 16877 ptr2 := v_2.Args[0] 16878 idx2 := v_2.Args[1] 16879 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 16880 break 16881 } 16882 v.reset(OpARM64MOVDconst) 16883 v.AuxInt = 0 16884 return true 16885 } 16886 return false 16887 } 16888 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 16889 // match: (MOVWreg x:(MOVBload _ _)) 16890 // cond: 16891 // result: (MOVDreg x) 16892 for { 16893 x := v.Args[0] 16894 if x.Op != OpARM64MOVBload { 16895 break 16896 } 16897 _ = x.Args[1] 16898 v.reset(OpARM64MOVDreg) 16899 v.AddArg(x) 16900 return true 16901 } 16902 // match: (MOVWreg x:(MOVBUload _ _)) 16903 // cond: 16904 // result: (MOVDreg x) 16905 for { 16906 x := v.Args[0] 16907 if x.Op != OpARM64MOVBUload { 16908 break 16909 } 16910 _ = x.Args[1] 16911 v.reset(OpARM64MOVDreg) 16912 v.AddArg(x) 16913 return true 16914 } 16915 // match: (MOVWreg x:(MOVHload _ _)) 16916 // cond: 16917 // result: (MOVDreg x) 16918 for { 16919 x := v.Args[0] 16920 if x.Op != OpARM64MOVHload { 16921 break 16922 } 16923 _ = x.Args[1] 16924 v.reset(OpARM64MOVDreg) 16925 v.AddArg(x) 16926 return true 16927 } 16928 // match: (MOVWreg x:(MOVHUload _ _)) 16929 // cond: 16930 // result: (MOVDreg x) 16931 for { 16932 x := v.Args[0] 16933 if x.Op != OpARM64MOVHUload { 16934 break 16935 } 16936 _ = x.Args[1] 16937 v.reset(OpARM64MOVDreg) 16938 v.AddArg(x) 16939 return true 16940 } 16941 // match: (MOVWreg x:(MOVWload _ _)) 16942 // cond: 16943 // result: (MOVDreg x) 16944 for { 16945 x := v.Args[0] 16946 if x.Op != OpARM64MOVWload { 16947 break 16948 } 16949 _ = x.Args[1] 16950 v.reset(OpARM64MOVDreg) 16951 v.AddArg(x) 16952 return true 16953 } 16954 // match: (MOVWreg x:(MOVBloadidx _ _ _)) 16955 // cond: 16956 // result: (MOVDreg x) 16957 for { 16958 x := v.Args[0] 16959 if x.Op != OpARM64MOVBloadidx { 16960 break 16961 } 16962 _ = x.Args[2] 16963 v.reset(OpARM64MOVDreg) 16964 v.AddArg(x) 16965 return true 16966 } 16967 // match: (MOVWreg x:(MOVBUloadidx _ _ _)) 16968 // cond: 16969 // result: (MOVDreg x) 16970 for { 16971 x := v.Args[0] 16972 if x.Op != OpARM64MOVBUloadidx { 16973 break 16974 } 16975 _ = x.Args[2] 16976 v.reset(OpARM64MOVDreg) 16977 v.AddArg(x) 16978 return true 16979 } 16980 // match: (MOVWreg x:(MOVHloadidx _ _ _)) 16981 // cond: 16982 // result: (MOVDreg x) 16983 for { 16984 x := v.Args[0] 16985 if x.Op != OpARM64MOVHloadidx { 16986 break 16987 } 16988 _ = x.Args[2] 16989 v.reset(OpARM64MOVDreg) 16990 v.AddArg(x) 16991 return true 16992 } 16993 // match: (MOVWreg x:(MOVHUloadidx _ _ _)) 16994 // cond: 16995 // result: (MOVDreg x) 16996 for { 16997 x := v.Args[0] 16998 if x.Op != OpARM64MOVHUloadidx { 16999 break 17000 } 17001 _ = x.Args[2] 17002 v.reset(OpARM64MOVDreg) 17003 v.AddArg(x) 17004 return true 17005 } 17006 // match: (MOVWreg x:(MOVWloadidx _ _ _)) 17007 // cond: 17008 // result: (MOVDreg x) 17009 for { 17010 x := v.Args[0] 17011 if x.Op != OpARM64MOVWloadidx { 17012 break 17013 } 17014 _ = x.Args[2] 17015 v.reset(OpARM64MOVDreg) 17016 v.AddArg(x) 17017 return true 17018 } 17019 return false 17020 } 17021 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 17022 // match: (MOVWreg x:(MOVHloadidx2 _ _ _)) 17023 // cond: 17024 // result: (MOVDreg x) 17025 for { 17026 x := v.Args[0] 17027 if x.Op != OpARM64MOVHloadidx2 { 17028 break 17029 } 17030 _ = x.Args[2] 17031 v.reset(OpARM64MOVDreg) 17032 v.AddArg(x) 17033 return true 17034 } 17035 // match: (MOVWreg x:(MOVHUloadidx2 _ _ _)) 17036 // cond: 17037 // result: (MOVDreg x) 17038 for { 17039 x := v.Args[0] 17040 if x.Op != OpARM64MOVHUloadidx2 { 17041 break 17042 } 17043 _ = x.Args[2] 17044 v.reset(OpARM64MOVDreg) 17045 v.AddArg(x) 17046 return true 17047 } 17048 // match: (MOVWreg x:(MOVWloadidx4 _ _ _)) 17049 // cond: 17050 // result: (MOVDreg x) 17051 for { 17052 x := v.Args[0] 17053 if x.Op != OpARM64MOVWloadidx4 { 17054 break 17055 } 17056 _ = x.Args[2] 17057 v.reset(OpARM64MOVDreg) 17058 v.AddArg(x) 17059 return true 17060 } 17061 // match: (MOVWreg x:(MOVBreg _)) 17062 // cond: 17063 // result: (MOVDreg x) 17064 for { 17065 x := v.Args[0] 17066 if x.Op != OpARM64MOVBreg { 17067 break 17068 } 17069 v.reset(OpARM64MOVDreg) 17070 v.AddArg(x) 17071 return true 17072 } 17073 // match: (MOVWreg x:(MOVBUreg _)) 17074 // cond: 17075 // result: (MOVDreg x) 17076 for { 17077 x := v.Args[0] 17078 if x.Op != OpARM64MOVBUreg { 17079 break 17080 } 17081 v.reset(OpARM64MOVDreg) 17082 v.AddArg(x) 17083 return true 17084 } 17085 // match: (MOVWreg x:(MOVHreg _)) 17086 // cond: 17087 // result: (MOVDreg x) 17088 for { 17089 x := v.Args[0] 17090 if x.Op != OpARM64MOVHreg { 17091 break 17092 } 17093 v.reset(OpARM64MOVDreg) 17094 v.AddArg(x) 17095 return true 17096 } 17097 // match: (MOVWreg x:(MOVHreg _)) 17098 // cond: 17099 // result: (MOVDreg x) 17100 for { 17101 x := v.Args[0] 17102 if x.Op != OpARM64MOVHreg { 17103 break 17104 } 17105 v.reset(OpARM64MOVDreg) 17106 v.AddArg(x) 17107 return true 17108 } 17109 // match: (MOVWreg x:(MOVWreg _)) 17110 // cond: 17111 // result: (MOVDreg x) 17112 for { 17113 x := v.Args[0] 17114 if x.Op != OpARM64MOVWreg { 17115 break 17116 } 17117 v.reset(OpARM64MOVDreg) 17118 v.AddArg(x) 17119 return true 17120 } 17121 // match: (MOVWreg (MOVDconst [c])) 17122 // cond: 17123 // result: (MOVDconst [int64(int32(c))]) 17124 for { 17125 v_0 := v.Args[0] 17126 if v_0.Op != OpARM64MOVDconst { 17127 break 17128 } 17129 c := v_0.AuxInt 17130 v.reset(OpARM64MOVDconst) 17131 v.AuxInt = int64(int32(c)) 17132 return true 17133 } 17134 // match: (MOVWreg (SLLconst [lc] x)) 17135 // cond: lc < 32 17136 // result: (SBFIZ [arm64BFAuxInt(lc, 32-lc)] x) 17137 for { 17138 v_0 := v.Args[0] 17139 if v_0.Op != OpARM64SLLconst { 17140 break 17141 } 17142 lc := v_0.AuxInt 17143 x := v_0.Args[0] 17144 if !(lc < 32) { 17145 break 17146 } 17147 v.reset(OpARM64SBFIZ) 17148 v.AuxInt = arm64BFAuxInt(lc, 32-lc) 17149 v.AddArg(x) 17150 return true 17151 } 17152 return false 17153 } 17154 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 17155 b := v.Block 17156 _ = b 17157 config := b.Func.Config 17158 _ = config 17159 // match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem) 17160 // cond: 17161 // result: (FMOVSstore [off] {sym} ptr val mem) 17162 for { 17163 off := v.AuxInt 17164 sym := v.Aux 17165 _ = v.Args[2] 17166 ptr := v.Args[0] 17167 v_1 := v.Args[1] 17168 if v_1.Op != OpARM64FMOVSfpgp { 17169 break 17170 } 17171 val := v_1.Args[0] 17172 mem := v.Args[2] 17173 v.reset(OpARM64FMOVSstore) 17174 v.AuxInt = off 17175 v.Aux = sym 17176 v.AddArg(ptr) 17177 v.AddArg(val) 17178 v.AddArg(mem) 17179 return true 17180 } 17181 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 17182 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17183 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 17184 for { 17185 off1 := v.AuxInt 17186 sym := v.Aux 17187 _ = v.Args[2] 17188 v_0 := v.Args[0] 17189 if v_0.Op != OpARM64ADDconst { 17190 break 17191 } 17192 off2 := v_0.AuxInt 17193 ptr := v_0.Args[0] 17194 val := v.Args[1] 17195 mem := v.Args[2] 17196 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17197 break 17198 } 17199 v.reset(OpARM64MOVWstore) 17200 v.AuxInt = off1 + off2 17201 v.Aux = sym 17202 v.AddArg(ptr) 17203 v.AddArg(val) 17204 v.AddArg(mem) 17205 return true 17206 } 17207 // match: (MOVWstore [off] {sym} (ADD ptr idx) val mem) 17208 // cond: off == 0 && sym == nil 17209 // result: (MOVWstoreidx ptr idx val mem) 17210 for { 17211 off := v.AuxInt 17212 sym := v.Aux 17213 _ = v.Args[2] 17214 v_0 := v.Args[0] 17215 if v_0.Op != OpARM64ADD { 17216 break 17217 } 17218 _ = v_0.Args[1] 17219 ptr := v_0.Args[0] 17220 idx := v_0.Args[1] 17221 val := v.Args[1] 17222 mem := v.Args[2] 17223 if !(off == 0 && sym == nil) { 17224 break 17225 } 17226 v.reset(OpARM64MOVWstoreidx) 17227 v.AddArg(ptr) 17228 v.AddArg(idx) 17229 v.AddArg(val) 17230 v.AddArg(mem) 17231 return true 17232 } 17233 // match: (MOVWstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem) 17234 // cond: off == 0 && sym == nil 17235 // result: (MOVWstoreidx4 ptr idx val mem) 17236 for { 17237 off := v.AuxInt 17238 sym := v.Aux 17239 _ = v.Args[2] 17240 v_0 := v.Args[0] 17241 if v_0.Op != OpARM64ADDshiftLL { 17242 break 17243 } 17244 if v_0.AuxInt != 2 { 17245 break 17246 } 17247 _ = v_0.Args[1] 17248 ptr := v_0.Args[0] 17249 idx := v_0.Args[1] 17250 val := v.Args[1] 17251 mem := v.Args[2] 17252 if !(off == 0 && sym == nil) { 17253 break 17254 } 17255 v.reset(OpARM64MOVWstoreidx4) 17256 v.AddArg(ptr) 17257 v.AddArg(idx) 17258 v.AddArg(val) 17259 v.AddArg(mem) 17260 return true 17261 } 17262 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 17263 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17264 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 17265 for { 17266 off1 := v.AuxInt 17267 sym1 := v.Aux 17268 _ = v.Args[2] 17269 v_0 := v.Args[0] 17270 if v_0.Op != OpARM64MOVDaddr { 17271 break 17272 } 17273 off2 := v_0.AuxInt 17274 sym2 := v_0.Aux 17275 ptr := v_0.Args[0] 17276 val := v.Args[1] 17277 mem := v.Args[2] 17278 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17279 break 17280 } 17281 v.reset(OpARM64MOVWstore) 17282 v.AuxInt = off1 + off2 17283 v.Aux = mergeSym(sym1, sym2) 17284 v.AddArg(ptr) 17285 v.AddArg(val) 17286 v.AddArg(mem) 17287 return true 17288 } 17289 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 17290 // cond: 17291 // result: (MOVWstorezero [off] {sym} ptr mem) 17292 for { 17293 off := v.AuxInt 17294 sym := v.Aux 17295 _ = v.Args[2] 17296 ptr := v.Args[0] 17297 v_1 := v.Args[1] 17298 if v_1.Op != OpARM64MOVDconst { 17299 break 17300 } 17301 if v_1.AuxInt != 0 { 17302 break 17303 } 17304 mem := v.Args[2] 17305 v.reset(OpARM64MOVWstorezero) 17306 v.AuxInt = off 17307 v.Aux = sym 17308 v.AddArg(ptr) 17309 v.AddArg(mem) 17310 return true 17311 } 17312 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 17313 // cond: 17314 // result: (MOVWstore [off] {sym} ptr x mem) 17315 for { 17316 off := v.AuxInt 17317 sym := v.Aux 17318 _ = v.Args[2] 17319 ptr := v.Args[0] 17320 v_1 := v.Args[1] 17321 if v_1.Op != OpARM64MOVWreg { 17322 break 17323 } 17324 x := v_1.Args[0] 17325 mem := v.Args[2] 17326 v.reset(OpARM64MOVWstore) 17327 v.AuxInt = off 17328 v.Aux = sym 17329 v.AddArg(ptr) 17330 v.AddArg(x) 17331 v.AddArg(mem) 17332 return true 17333 } 17334 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 17335 // cond: 17336 // result: (MOVWstore [off] {sym} ptr x mem) 17337 for { 17338 off := v.AuxInt 17339 sym := v.Aux 17340 _ = v.Args[2] 17341 ptr := v.Args[0] 17342 v_1 := v.Args[1] 17343 if v_1.Op != OpARM64MOVWUreg { 17344 break 17345 } 17346 x := v_1.Args[0] 17347 mem := v.Args[2] 17348 v.reset(OpARM64MOVWstore) 17349 v.AuxInt = off 17350 v.Aux = sym 17351 v.AddArg(ptr) 17352 v.AddArg(x) 17353 v.AddArg(mem) 17354 return true 17355 } 17356 // match: (MOVWstore [i] {s} ptr0 (SRLconst [32] w) x:(MOVWstore [i-4] {s} ptr1 w mem)) 17357 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 17358 // result: (MOVDstore [i-4] {s} ptr0 w mem) 17359 for { 17360 i := v.AuxInt 17361 s := v.Aux 17362 _ = v.Args[2] 17363 ptr0 := v.Args[0] 17364 v_1 := v.Args[1] 17365 if v_1.Op != OpARM64SRLconst { 17366 break 17367 } 17368 if v_1.AuxInt != 32 { 17369 break 17370 } 17371 w := v_1.Args[0] 17372 x := v.Args[2] 17373 if x.Op != OpARM64MOVWstore { 17374 break 17375 } 17376 if x.AuxInt != i-4 { 17377 break 17378 } 17379 if x.Aux != s { 17380 break 17381 } 17382 _ = x.Args[2] 17383 ptr1 := x.Args[0] 17384 if w != x.Args[1] { 17385 break 17386 } 17387 mem := x.Args[2] 17388 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 17389 break 17390 } 17391 v.reset(OpARM64MOVDstore) 17392 v.AuxInt = i - 4 17393 v.Aux = s 17394 v.AddArg(ptr0) 17395 v.AddArg(w) 17396 v.AddArg(mem) 17397 return true 17398 } 17399 // match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx ptr1 idx1 w mem)) 17400 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 17401 // result: (MOVDstoreidx ptr1 idx1 w mem) 17402 for { 17403 if v.AuxInt != 4 { 17404 break 17405 } 17406 s := v.Aux 17407 _ = v.Args[2] 17408 v_0 := v.Args[0] 17409 if v_0.Op != OpARM64ADD { 17410 break 17411 } 17412 _ = v_0.Args[1] 17413 ptr0 := v_0.Args[0] 17414 idx0 := v_0.Args[1] 17415 v_1 := v.Args[1] 17416 if v_1.Op != OpARM64SRLconst { 17417 break 17418 } 17419 if v_1.AuxInt != 32 { 17420 break 17421 } 17422 w := v_1.Args[0] 17423 x := v.Args[2] 17424 if x.Op != OpARM64MOVWstoreidx { 17425 break 17426 } 17427 _ = x.Args[3] 17428 ptr1 := x.Args[0] 17429 idx1 := x.Args[1] 17430 if w != x.Args[2] { 17431 break 17432 } 17433 mem := x.Args[3] 17434 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 17435 break 17436 } 17437 v.reset(OpARM64MOVDstoreidx) 17438 v.AddArg(ptr1) 17439 v.AddArg(idx1) 17440 v.AddArg(w) 17441 v.AddArg(mem) 17442 return true 17443 } 17444 return false 17445 } 17446 func rewriteValueARM64_OpARM64MOVWstore_10(v *Value) bool { 17447 b := v.Block 17448 _ = b 17449 // match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx4 ptr1 idx1 w mem)) 17450 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 17451 // result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w mem) 17452 for { 17453 if v.AuxInt != 4 { 17454 break 17455 } 17456 s := v.Aux 17457 _ = v.Args[2] 17458 v_0 := v.Args[0] 17459 if v_0.Op != OpARM64ADDshiftLL { 17460 break 17461 } 17462 if v_0.AuxInt != 2 { 17463 break 17464 } 17465 _ = v_0.Args[1] 17466 ptr0 := v_0.Args[0] 17467 idx0 := v_0.Args[1] 17468 v_1 := v.Args[1] 17469 if v_1.Op != OpARM64SRLconst { 17470 break 17471 } 17472 if v_1.AuxInt != 32 { 17473 break 17474 } 17475 w := v_1.Args[0] 17476 x := v.Args[2] 17477 if x.Op != OpARM64MOVWstoreidx4 { 17478 break 17479 } 17480 _ = x.Args[3] 17481 ptr1 := x.Args[0] 17482 idx1 := x.Args[1] 17483 if w != x.Args[2] { 17484 break 17485 } 17486 mem := x.Args[3] 17487 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 17488 break 17489 } 17490 v.reset(OpARM64MOVDstoreidx) 17491 v.AddArg(ptr1) 17492 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 17493 v0.AuxInt = 2 17494 v0.AddArg(idx1) 17495 v.AddArg(v0) 17496 v.AddArg(w) 17497 v.AddArg(mem) 17498 return true 17499 } 17500 // match: (MOVWstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVWstore [i-4] {s} ptr1 w0:(SRLconst [j-32] w) mem)) 17501 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 17502 // result: (MOVDstore [i-4] {s} ptr0 w0 mem) 17503 for { 17504 i := v.AuxInt 17505 s := v.Aux 17506 _ = v.Args[2] 17507 ptr0 := v.Args[0] 17508 v_1 := v.Args[1] 17509 if v_1.Op != OpARM64SRLconst { 17510 break 17511 } 17512 j := v_1.AuxInt 17513 w := v_1.Args[0] 17514 x := v.Args[2] 17515 if x.Op != OpARM64MOVWstore { 17516 break 17517 } 17518 if x.AuxInt != i-4 { 17519 break 17520 } 17521 if x.Aux != s { 17522 break 17523 } 17524 _ = x.Args[2] 17525 ptr1 := x.Args[0] 17526 w0 := x.Args[1] 17527 if w0.Op != OpARM64SRLconst { 17528 break 17529 } 17530 if w0.AuxInt != j-32 { 17531 break 17532 } 17533 if w != w0.Args[0] { 17534 break 17535 } 17536 mem := x.Args[2] 17537 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 17538 break 17539 } 17540 v.reset(OpARM64MOVDstore) 17541 v.AuxInt = i - 4 17542 v.Aux = s 17543 v.AddArg(ptr0) 17544 v.AddArg(w0) 17545 v.AddArg(mem) 17546 return true 17547 } 17548 // match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx ptr1 idx1 w0:(SRLconst [j-32] w) mem)) 17549 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 17550 // result: (MOVDstoreidx ptr1 idx1 w0 mem) 17551 for { 17552 if v.AuxInt != 4 { 17553 break 17554 } 17555 s := v.Aux 17556 _ = v.Args[2] 17557 v_0 := v.Args[0] 17558 if v_0.Op != OpARM64ADD { 17559 break 17560 } 17561 _ = v_0.Args[1] 17562 ptr0 := v_0.Args[0] 17563 idx0 := v_0.Args[1] 17564 v_1 := v.Args[1] 17565 if v_1.Op != OpARM64SRLconst { 17566 break 17567 } 17568 j := v_1.AuxInt 17569 w := v_1.Args[0] 17570 x := v.Args[2] 17571 if x.Op != OpARM64MOVWstoreidx { 17572 break 17573 } 17574 _ = x.Args[3] 17575 ptr1 := x.Args[0] 17576 idx1 := x.Args[1] 17577 w0 := x.Args[2] 17578 if w0.Op != OpARM64SRLconst { 17579 break 17580 } 17581 if w0.AuxInt != j-32 { 17582 break 17583 } 17584 if w != w0.Args[0] { 17585 break 17586 } 17587 mem := x.Args[3] 17588 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 17589 break 17590 } 17591 v.reset(OpARM64MOVDstoreidx) 17592 v.AddArg(ptr1) 17593 v.AddArg(idx1) 17594 v.AddArg(w0) 17595 v.AddArg(mem) 17596 return true 17597 } 17598 // match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx4 ptr1 idx1 w0:(SRLconst [j-32] w) mem)) 17599 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 17600 // result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w0 mem) 17601 for { 17602 if v.AuxInt != 4 { 17603 break 17604 } 17605 s := v.Aux 17606 _ = v.Args[2] 17607 v_0 := v.Args[0] 17608 if v_0.Op != OpARM64ADDshiftLL { 17609 break 17610 } 17611 if v_0.AuxInt != 2 { 17612 break 17613 } 17614 _ = v_0.Args[1] 17615 ptr0 := v_0.Args[0] 17616 idx0 := v_0.Args[1] 17617 v_1 := v.Args[1] 17618 if v_1.Op != OpARM64SRLconst { 17619 break 17620 } 17621 j := v_1.AuxInt 17622 w := v_1.Args[0] 17623 x := v.Args[2] 17624 if x.Op != OpARM64MOVWstoreidx4 { 17625 break 17626 } 17627 _ = x.Args[3] 17628 ptr1 := x.Args[0] 17629 idx1 := x.Args[1] 17630 w0 := x.Args[2] 17631 if w0.Op != OpARM64SRLconst { 17632 break 17633 } 17634 if w0.AuxInt != j-32 { 17635 break 17636 } 17637 if w != w0.Args[0] { 17638 break 17639 } 17640 mem := x.Args[3] 17641 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 17642 break 17643 } 17644 v.reset(OpARM64MOVDstoreidx) 17645 v.AddArg(ptr1) 17646 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 17647 v0.AuxInt = 2 17648 v0.AddArg(idx1) 17649 v.AddArg(v0) 17650 v.AddArg(w0) 17651 v.AddArg(mem) 17652 return true 17653 } 17654 return false 17655 } 17656 func rewriteValueARM64_OpARM64MOVWstoreidx_0(v *Value) bool { 17657 // match: (MOVWstoreidx ptr (MOVDconst [c]) val mem) 17658 // cond: 17659 // result: (MOVWstore [c] ptr val mem) 17660 for { 17661 _ = v.Args[3] 17662 ptr := v.Args[0] 17663 v_1 := v.Args[1] 17664 if v_1.Op != OpARM64MOVDconst { 17665 break 17666 } 17667 c := v_1.AuxInt 17668 val := v.Args[2] 17669 mem := v.Args[3] 17670 v.reset(OpARM64MOVWstore) 17671 v.AuxInt = c 17672 v.AddArg(ptr) 17673 v.AddArg(val) 17674 v.AddArg(mem) 17675 return true 17676 } 17677 // match: (MOVWstoreidx (MOVDconst [c]) idx val mem) 17678 // cond: 17679 // result: (MOVWstore [c] idx val mem) 17680 for { 17681 _ = v.Args[3] 17682 v_0 := v.Args[0] 17683 if v_0.Op != OpARM64MOVDconst { 17684 break 17685 } 17686 c := v_0.AuxInt 17687 idx := v.Args[1] 17688 val := v.Args[2] 17689 mem := v.Args[3] 17690 v.reset(OpARM64MOVWstore) 17691 v.AuxInt = c 17692 v.AddArg(idx) 17693 v.AddArg(val) 17694 v.AddArg(mem) 17695 return true 17696 } 17697 // match: (MOVWstoreidx ptr (SLLconst [2] idx) val mem) 17698 // cond: 17699 // result: (MOVWstoreidx4 ptr idx val mem) 17700 for { 17701 _ = v.Args[3] 17702 ptr := v.Args[0] 17703 v_1 := v.Args[1] 17704 if v_1.Op != OpARM64SLLconst { 17705 break 17706 } 17707 if v_1.AuxInt != 2 { 17708 break 17709 } 17710 idx := v_1.Args[0] 17711 val := v.Args[2] 17712 mem := v.Args[3] 17713 v.reset(OpARM64MOVWstoreidx4) 17714 v.AddArg(ptr) 17715 v.AddArg(idx) 17716 v.AddArg(val) 17717 v.AddArg(mem) 17718 return true 17719 } 17720 // match: (MOVWstoreidx (SLLconst [2] idx) ptr val mem) 17721 // cond: 17722 // result: (MOVWstoreidx4 ptr idx val mem) 17723 for { 17724 _ = v.Args[3] 17725 v_0 := v.Args[0] 17726 if v_0.Op != OpARM64SLLconst { 17727 break 17728 } 17729 if v_0.AuxInt != 2 { 17730 break 17731 } 17732 idx := v_0.Args[0] 17733 ptr := v.Args[1] 17734 val := v.Args[2] 17735 mem := v.Args[3] 17736 v.reset(OpARM64MOVWstoreidx4) 17737 v.AddArg(ptr) 17738 v.AddArg(idx) 17739 v.AddArg(val) 17740 v.AddArg(mem) 17741 return true 17742 } 17743 // match: (MOVWstoreidx ptr idx (MOVDconst [0]) mem) 17744 // cond: 17745 // result: (MOVWstorezeroidx ptr idx mem) 17746 for { 17747 _ = v.Args[3] 17748 ptr := v.Args[0] 17749 idx := v.Args[1] 17750 v_2 := v.Args[2] 17751 if v_2.Op != OpARM64MOVDconst { 17752 break 17753 } 17754 if v_2.AuxInt != 0 { 17755 break 17756 } 17757 mem := v.Args[3] 17758 v.reset(OpARM64MOVWstorezeroidx) 17759 v.AddArg(ptr) 17760 v.AddArg(idx) 17761 v.AddArg(mem) 17762 return true 17763 } 17764 // match: (MOVWstoreidx ptr idx (MOVWreg x) mem) 17765 // cond: 17766 // result: (MOVWstoreidx ptr idx x mem) 17767 for { 17768 _ = v.Args[3] 17769 ptr := v.Args[0] 17770 idx := v.Args[1] 17771 v_2 := v.Args[2] 17772 if v_2.Op != OpARM64MOVWreg { 17773 break 17774 } 17775 x := v_2.Args[0] 17776 mem := v.Args[3] 17777 v.reset(OpARM64MOVWstoreidx) 17778 v.AddArg(ptr) 17779 v.AddArg(idx) 17780 v.AddArg(x) 17781 v.AddArg(mem) 17782 return true 17783 } 17784 // match: (MOVWstoreidx ptr idx (MOVWUreg x) mem) 17785 // cond: 17786 // result: (MOVWstoreidx ptr idx x mem) 17787 for { 17788 _ = v.Args[3] 17789 ptr := v.Args[0] 17790 idx := v.Args[1] 17791 v_2 := v.Args[2] 17792 if v_2.Op != OpARM64MOVWUreg { 17793 break 17794 } 17795 x := v_2.Args[0] 17796 mem := v.Args[3] 17797 v.reset(OpARM64MOVWstoreidx) 17798 v.AddArg(ptr) 17799 v.AddArg(idx) 17800 v.AddArg(x) 17801 v.AddArg(mem) 17802 return true 17803 } 17804 // match: (MOVWstoreidx ptr (ADDconst [4] idx) (SRLconst [32] w) x:(MOVWstoreidx ptr idx w mem)) 17805 // cond: x.Uses == 1 && clobber(x) 17806 // result: (MOVDstoreidx ptr idx w mem) 17807 for { 17808 _ = v.Args[3] 17809 ptr := v.Args[0] 17810 v_1 := v.Args[1] 17811 if v_1.Op != OpARM64ADDconst { 17812 break 17813 } 17814 if v_1.AuxInt != 4 { 17815 break 17816 } 17817 idx := v_1.Args[0] 17818 v_2 := v.Args[2] 17819 if v_2.Op != OpARM64SRLconst { 17820 break 17821 } 17822 if v_2.AuxInt != 32 { 17823 break 17824 } 17825 w := v_2.Args[0] 17826 x := v.Args[3] 17827 if x.Op != OpARM64MOVWstoreidx { 17828 break 17829 } 17830 _ = x.Args[3] 17831 if ptr != x.Args[0] { 17832 break 17833 } 17834 if idx != x.Args[1] { 17835 break 17836 } 17837 if w != x.Args[2] { 17838 break 17839 } 17840 mem := x.Args[3] 17841 if !(x.Uses == 1 && clobber(x)) { 17842 break 17843 } 17844 v.reset(OpARM64MOVDstoreidx) 17845 v.AddArg(ptr) 17846 v.AddArg(idx) 17847 v.AddArg(w) 17848 v.AddArg(mem) 17849 return true 17850 } 17851 return false 17852 } 17853 func rewriteValueARM64_OpARM64MOVWstoreidx4_0(v *Value) bool { 17854 // match: (MOVWstoreidx4 ptr (MOVDconst [c]) val mem) 17855 // cond: 17856 // result: (MOVWstore [c<<2] ptr val mem) 17857 for { 17858 _ = v.Args[3] 17859 ptr := v.Args[0] 17860 v_1 := v.Args[1] 17861 if v_1.Op != OpARM64MOVDconst { 17862 break 17863 } 17864 c := v_1.AuxInt 17865 val := v.Args[2] 17866 mem := v.Args[3] 17867 v.reset(OpARM64MOVWstore) 17868 v.AuxInt = c << 2 17869 v.AddArg(ptr) 17870 v.AddArg(val) 17871 v.AddArg(mem) 17872 return true 17873 } 17874 // match: (MOVWstoreidx4 ptr idx (MOVDconst [0]) mem) 17875 // cond: 17876 // result: (MOVWstorezeroidx4 ptr idx mem) 17877 for { 17878 _ = v.Args[3] 17879 ptr := v.Args[0] 17880 idx := v.Args[1] 17881 v_2 := v.Args[2] 17882 if v_2.Op != OpARM64MOVDconst { 17883 break 17884 } 17885 if v_2.AuxInt != 0 { 17886 break 17887 } 17888 mem := v.Args[3] 17889 v.reset(OpARM64MOVWstorezeroidx4) 17890 v.AddArg(ptr) 17891 v.AddArg(idx) 17892 v.AddArg(mem) 17893 return true 17894 } 17895 // match: (MOVWstoreidx4 ptr idx (MOVWreg x) mem) 17896 // cond: 17897 // result: (MOVWstoreidx4 ptr idx x mem) 17898 for { 17899 _ = v.Args[3] 17900 ptr := v.Args[0] 17901 idx := v.Args[1] 17902 v_2 := v.Args[2] 17903 if v_2.Op != OpARM64MOVWreg { 17904 break 17905 } 17906 x := v_2.Args[0] 17907 mem := v.Args[3] 17908 v.reset(OpARM64MOVWstoreidx4) 17909 v.AddArg(ptr) 17910 v.AddArg(idx) 17911 v.AddArg(x) 17912 v.AddArg(mem) 17913 return true 17914 } 17915 // match: (MOVWstoreidx4 ptr idx (MOVWUreg x) mem) 17916 // cond: 17917 // result: (MOVWstoreidx4 ptr idx x mem) 17918 for { 17919 _ = v.Args[3] 17920 ptr := v.Args[0] 17921 idx := v.Args[1] 17922 v_2 := v.Args[2] 17923 if v_2.Op != OpARM64MOVWUreg { 17924 break 17925 } 17926 x := v_2.Args[0] 17927 mem := v.Args[3] 17928 v.reset(OpARM64MOVWstoreidx4) 17929 v.AddArg(ptr) 17930 v.AddArg(idx) 17931 v.AddArg(x) 17932 v.AddArg(mem) 17933 return true 17934 } 17935 return false 17936 } 17937 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 17938 b := v.Block 17939 _ = b 17940 config := b.Func.Config 17941 _ = config 17942 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 17943 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17944 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 17945 for { 17946 off1 := v.AuxInt 17947 sym := v.Aux 17948 _ = v.Args[1] 17949 v_0 := v.Args[0] 17950 if v_0.Op != OpARM64ADDconst { 17951 break 17952 } 17953 off2 := v_0.AuxInt 17954 ptr := v_0.Args[0] 17955 mem := v.Args[1] 17956 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17957 break 17958 } 17959 v.reset(OpARM64MOVWstorezero) 17960 v.AuxInt = off1 + off2 17961 v.Aux = sym 17962 v.AddArg(ptr) 17963 v.AddArg(mem) 17964 return true 17965 } 17966 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 17967 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17968 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 17969 for { 17970 off1 := v.AuxInt 17971 sym1 := v.Aux 17972 _ = v.Args[1] 17973 v_0 := v.Args[0] 17974 if v_0.Op != OpARM64MOVDaddr { 17975 break 17976 } 17977 off2 := v_0.AuxInt 17978 sym2 := v_0.Aux 17979 ptr := v_0.Args[0] 17980 mem := v.Args[1] 17981 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17982 break 17983 } 17984 v.reset(OpARM64MOVWstorezero) 17985 v.AuxInt = off1 + off2 17986 v.Aux = mergeSym(sym1, sym2) 17987 v.AddArg(ptr) 17988 v.AddArg(mem) 17989 return true 17990 } 17991 // match: (MOVWstorezero [off] {sym} (ADD ptr idx) mem) 17992 // cond: off == 0 && sym == nil 17993 // result: (MOVWstorezeroidx ptr idx mem) 17994 for { 17995 off := v.AuxInt 17996 sym := v.Aux 17997 _ = v.Args[1] 17998 v_0 := v.Args[0] 17999 if v_0.Op != OpARM64ADD { 18000 break 18001 } 18002 _ = v_0.Args[1] 18003 ptr := v_0.Args[0] 18004 idx := v_0.Args[1] 18005 mem := v.Args[1] 18006 if !(off == 0 && sym == nil) { 18007 break 18008 } 18009 v.reset(OpARM64MOVWstorezeroidx) 18010 v.AddArg(ptr) 18011 v.AddArg(idx) 18012 v.AddArg(mem) 18013 return true 18014 } 18015 // match: (MOVWstorezero [off] {sym} (ADDshiftLL [2] ptr idx) mem) 18016 // cond: off == 0 && sym == nil 18017 // result: (MOVWstorezeroidx4 ptr idx mem) 18018 for { 18019 off := v.AuxInt 18020 sym := v.Aux 18021 _ = v.Args[1] 18022 v_0 := v.Args[0] 18023 if v_0.Op != OpARM64ADDshiftLL { 18024 break 18025 } 18026 if v_0.AuxInt != 2 { 18027 break 18028 } 18029 _ = v_0.Args[1] 18030 ptr := v_0.Args[0] 18031 idx := v_0.Args[1] 18032 mem := v.Args[1] 18033 if !(off == 0 && sym == nil) { 18034 break 18035 } 18036 v.reset(OpARM64MOVWstorezeroidx4) 18037 v.AddArg(ptr) 18038 v.AddArg(idx) 18039 v.AddArg(mem) 18040 return true 18041 } 18042 // match: (MOVWstorezero [i] {s} ptr0 x:(MOVWstorezero [j] {s} ptr1 mem)) 18043 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,4) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 18044 // result: (MOVDstorezero [min(i,j)] {s} ptr0 mem) 18045 for { 18046 i := v.AuxInt 18047 s := v.Aux 18048 _ = v.Args[1] 18049 ptr0 := v.Args[0] 18050 x := v.Args[1] 18051 if x.Op != OpARM64MOVWstorezero { 18052 break 18053 } 18054 j := x.AuxInt 18055 if x.Aux != s { 18056 break 18057 } 18058 _ = x.Args[1] 18059 ptr1 := x.Args[0] 18060 mem := x.Args[1] 18061 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 4) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 18062 break 18063 } 18064 v.reset(OpARM64MOVDstorezero) 18065 v.AuxInt = min(i, j) 18066 v.Aux = s 18067 v.AddArg(ptr0) 18068 v.AddArg(mem) 18069 return true 18070 } 18071 // match: (MOVWstorezero [4] {s} (ADD ptr0 idx0) x:(MOVWstorezeroidx ptr1 idx1 mem)) 18072 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 18073 // result: (MOVDstorezeroidx ptr1 idx1 mem) 18074 for { 18075 if v.AuxInt != 4 { 18076 break 18077 } 18078 s := v.Aux 18079 _ = v.Args[1] 18080 v_0 := v.Args[0] 18081 if v_0.Op != OpARM64ADD { 18082 break 18083 } 18084 _ = v_0.Args[1] 18085 ptr0 := v_0.Args[0] 18086 idx0 := v_0.Args[1] 18087 x := v.Args[1] 18088 if x.Op != OpARM64MOVWstorezeroidx { 18089 break 18090 } 18091 _ = x.Args[2] 18092 ptr1 := x.Args[0] 18093 idx1 := x.Args[1] 18094 mem := x.Args[2] 18095 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 18096 break 18097 } 18098 v.reset(OpARM64MOVDstorezeroidx) 18099 v.AddArg(ptr1) 18100 v.AddArg(idx1) 18101 v.AddArg(mem) 18102 return true 18103 } 18104 // match: (MOVWstorezero [4] {s} (ADDshiftLL [2] ptr0 idx0) x:(MOVWstorezeroidx4 ptr1 idx1 mem)) 18105 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 18106 // result: (MOVDstorezeroidx ptr1 (SLLconst <idx1.Type> [2] idx1) mem) 18107 for { 18108 if v.AuxInt != 4 { 18109 break 18110 } 18111 s := v.Aux 18112 _ = v.Args[1] 18113 v_0 := v.Args[0] 18114 if v_0.Op != OpARM64ADDshiftLL { 18115 break 18116 } 18117 if v_0.AuxInt != 2 { 18118 break 18119 } 18120 _ = v_0.Args[1] 18121 ptr0 := v_0.Args[0] 18122 idx0 := v_0.Args[1] 18123 x := v.Args[1] 18124 if x.Op != OpARM64MOVWstorezeroidx4 { 18125 break 18126 } 18127 _ = x.Args[2] 18128 ptr1 := x.Args[0] 18129 idx1 := x.Args[1] 18130 mem := x.Args[2] 18131 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 18132 break 18133 } 18134 v.reset(OpARM64MOVDstorezeroidx) 18135 v.AddArg(ptr1) 18136 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 18137 v0.AuxInt = 2 18138 v0.AddArg(idx1) 18139 v.AddArg(v0) 18140 v.AddArg(mem) 18141 return true 18142 } 18143 return false 18144 } 18145 func rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v *Value) bool { 18146 // match: (MOVWstorezeroidx ptr (MOVDconst [c]) mem) 18147 // cond: 18148 // result: (MOVWstorezero [c] ptr mem) 18149 for { 18150 _ = v.Args[2] 18151 ptr := v.Args[0] 18152 v_1 := v.Args[1] 18153 if v_1.Op != OpARM64MOVDconst { 18154 break 18155 } 18156 c := v_1.AuxInt 18157 mem := v.Args[2] 18158 v.reset(OpARM64MOVWstorezero) 18159 v.AuxInt = c 18160 v.AddArg(ptr) 18161 v.AddArg(mem) 18162 return true 18163 } 18164 // match: (MOVWstorezeroidx (MOVDconst [c]) idx mem) 18165 // cond: 18166 // result: (MOVWstorezero [c] idx mem) 18167 for { 18168 _ = v.Args[2] 18169 v_0 := v.Args[0] 18170 if v_0.Op != OpARM64MOVDconst { 18171 break 18172 } 18173 c := v_0.AuxInt 18174 idx := v.Args[1] 18175 mem := v.Args[2] 18176 v.reset(OpARM64MOVWstorezero) 18177 v.AuxInt = c 18178 v.AddArg(idx) 18179 v.AddArg(mem) 18180 return true 18181 } 18182 // match: (MOVWstorezeroidx ptr (SLLconst [2] idx) mem) 18183 // cond: 18184 // result: (MOVWstorezeroidx4 ptr idx mem) 18185 for { 18186 _ = v.Args[2] 18187 ptr := v.Args[0] 18188 v_1 := v.Args[1] 18189 if v_1.Op != OpARM64SLLconst { 18190 break 18191 } 18192 if v_1.AuxInt != 2 { 18193 break 18194 } 18195 idx := v_1.Args[0] 18196 mem := v.Args[2] 18197 v.reset(OpARM64MOVWstorezeroidx4) 18198 v.AddArg(ptr) 18199 v.AddArg(idx) 18200 v.AddArg(mem) 18201 return true 18202 } 18203 // match: (MOVWstorezeroidx (SLLconst [2] idx) ptr mem) 18204 // cond: 18205 // result: (MOVWstorezeroidx4 ptr idx mem) 18206 for { 18207 _ = v.Args[2] 18208 v_0 := v.Args[0] 18209 if v_0.Op != OpARM64SLLconst { 18210 break 18211 } 18212 if v_0.AuxInt != 2 { 18213 break 18214 } 18215 idx := v_0.Args[0] 18216 ptr := v.Args[1] 18217 mem := v.Args[2] 18218 v.reset(OpARM64MOVWstorezeroidx4) 18219 v.AddArg(ptr) 18220 v.AddArg(idx) 18221 v.AddArg(mem) 18222 return true 18223 } 18224 // match: (MOVWstorezeroidx ptr (ADDconst [4] idx) x:(MOVWstorezeroidx ptr idx mem)) 18225 // cond: x.Uses == 1 && clobber(x) 18226 // result: (MOVDstorezeroidx ptr idx mem) 18227 for { 18228 _ = v.Args[2] 18229 ptr := v.Args[0] 18230 v_1 := v.Args[1] 18231 if v_1.Op != OpARM64ADDconst { 18232 break 18233 } 18234 if v_1.AuxInt != 4 { 18235 break 18236 } 18237 idx := v_1.Args[0] 18238 x := v.Args[2] 18239 if x.Op != OpARM64MOVWstorezeroidx { 18240 break 18241 } 18242 _ = x.Args[2] 18243 if ptr != x.Args[0] { 18244 break 18245 } 18246 if idx != x.Args[1] { 18247 break 18248 } 18249 mem := x.Args[2] 18250 if !(x.Uses == 1 && clobber(x)) { 18251 break 18252 } 18253 v.reset(OpARM64MOVDstorezeroidx) 18254 v.AddArg(ptr) 18255 v.AddArg(idx) 18256 v.AddArg(mem) 18257 return true 18258 } 18259 return false 18260 } 18261 func rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v *Value) bool { 18262 // match: (MOVWstorezeroidx4 ptr (MOVDconst [c]) mem) 18263 // cond: 18264 // result: (MOVWstorezero [c<<2] ptr mem) 18265 for { 18266 _ = v.Args[2] 18267 ptr := v.Args[0] 18268 v_1 := v.Args[1] 18269 if v_1.Op != OpARM64MOVDconst { 18270 break 18271 } 18272 c := v_1.AuxInt 18273 mem := v.Args[2] 18274 v.reset(OpARM64MOVWstorezero) 18275 v.AuxInt = c << 2 18276 v.AddArg(ptr) 18277 v.AddArg(mem) 18278 return true 18279 } 18280 return false 18281 } 18282 func rewriteValueARM64_OpARM64MSUB_0(v *Value) bool { 18283 b := v.Block 18284 _ = b 18285 // match: (MSUB a x (MOVDconst [-1])) 18286 // cond: 18287 // result: (ADD a x) 18288 for { 18289 _ = v.Args[2] 18290 a := v.Args[0] 18291 x := v.Args[1] 18292 v_2 := v.Args[2] 18293 if v_2.Op != OpARM64MOVDconst { 18294 break 18295 } 18296 if v_2.AuxInt != -1 { 18297 break 18298 } 18299 v.reset(OpARM64ADD) 18300 v.AddArg(a) 18301 v.AddArg(x) 18302 return true 18303 } 18304 // match: (MSUB a _ (MOVDconst [0])) 18305 // cond: 18306 // result: a 18307 for { 18308 _ = v.Args[2] 18309 a := v.Args[0] 18310 v_2 := v.Args[2] 18311 if v_2.Op != OpARM64MOVDconst { 18312 break 18313 } 18314 if v_2.AuxInt != 0 { 18315 break 18316 } 18317 v.reset(OpCopy) 18318 v.Type = a.Type 18319 v.AddArg(a) 18320 return true 18321 } 18322 // match: (MSUB a x (MOVDconst [1])) 18323 // cond: 18324 // result: (SUB a x) 18325 for { 18326 _ = v.Args[2] 18327 a := v.Args[0] 18328 x := v.Args[1] 18329 v_2 := v.Args[2] 18330 if v_2.Op != OpARM64MOVDconst { 18331 break 18332 } 18333 if v_2.AuxInt != 1 { 18334 break 18335 } 18336 v.reset(OpARM64SUB) 18337 v.AddArg(a) 18338 v.AddArg(x) 18339 return true 18340 } 18341 // match: (MSUB a x (MOVDconst [c])) 18342 // cond: isPowerOfTwo(c) 18343 // result: (SUBshiftLL a x [log2(c)]) 18344 for { 18345 _ = v.Args[2] 18346 a := v.Args[0] 18347 x := v.Args[1] 18348 v_2 := v.Args[2] 18349 if v_2.Op != OpARM64MOVDconst { 18350 break 18351 } 18352 c := v_2.AuxInt 18353 if !(isPowerOfTwo(c)) { 18354 break 18355 } 18356 v.reset(OpARM64SUBshiftLL) 18357 v.AuxInt = log2(c) 18358 v.AddArg(a) 18359 v.AddArg(x) 18360 return true 18361 } 18362 // match: (MSUB a x (MOVDconst [c])) 18363 // cond: isPowerOfTwo(c-1) && c>=3 18364 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 18365 for { 18366 _ = v.Args[2] 18367 a := v.Args[0] 18368 x := v.Args[1] 18369 v_2 := v.Args[2] 18370 if v_2.Op != OpARM64MOVDconst { 18371 break 18372 } 18373 c := v_2.AuxInt 18374 if !(isPowerOfTwo(c-1) && c >= 3) { 18375 break 18376 } 18377 v.reset(OpARM64SUB) 18378 v.AddArg(a) 18379 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18380 v0.AuxInt = log2(c - 1) 18381 v0.AddArg(x) 18382 v0.AddArg(x) 18383 v.AddArg(v0) 18384 return true 18385 } 18386 // match: (MSUB a x (MOVDconst [c])) 18387 // cond: isPowerOfTwo(c+1) && c>=7 18388 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 18389 for { 18390 _ = v.Args[2] 18391 a := v.Args[0] 18392 x := v.Args[1] 18393 v_2 := v.Args[2] 18394 if v_2.Op != OpARM64MOVDconst { 18395 break 18396 } 18397 c := v_2.AuxInt 18398 if !(isPowerOfTwo(c+1) && c >= 7) { 18399 break 18400 } 18401 v.reset(OpARM64ADD) 18402 v.AddArg(a) 18403 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18404 v0.AuxInt = log2(c + 1) 18405 v0.AddArg(x) 18406 v0.AddArg(x) 18407 v.AddArg(v0) 18408 return true 18409 } 18410 // match: (MSUB a x (MOVDconst [c])) 18411 // cond: c%3 == 0 && isPowerOfTwo(c/3) 18412 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 18413 for { 18414 _ = v.Args[2] 18415 a := v.Args[0] 18416 x := v.Args[1] 18417 v_2 := v.Args[2] 18418 if v_2.Op != OpARM64MOVDconst { 18419 break 18420 } 18421 c := v_2.AuxInt 18422 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 18423 break 18424 } 18425 v.reset(OpARM64ADDshiftLL) 18426 v.AuxInt = log2(c / 3) 18427 v.AddArg(a) 18428 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18429 v0.AuxInt = 2 18430 v0.AddArg(x) 18431 v0.AddArg(x) 18432 v.AddArg(v0) 18433 return true 18434 } 18435 // match: (MSUB a x (MOVDconst [c])) 18436 // cond: c%5 == 0 && isPowerOfTwo(c/5) 18437 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 18438 for { 18439 _ = v.Args[2] 18440 a := v.Args[0] 18441 x := v.Args[1] 18442 v_2 := v.Args[2] 18443 if v_2.Op != OpARM64MOVDconst { 18444 break 18445 } 18446 c := v_2.AuxInt 18447 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 18448 break 18449 } 18450 v.reset(OpARM64SUBshiftLL) 18451 v.AuxInt = log2(c / 5) 18452 v.AddArg(a) 18453 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18454 v0.AuxInt = 2 18455 v0.AddArg(x) 18456 v0.AddArg(x) 18457 v.AddArg(v0) 18458 return true 18459 } 18460 // match: (MSUB a x (MOVDconst [c])) 18461 // cond: c%7 == 0 && isPowerOfTwo(c/7) 18462 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 18463 for { 18464 _ = v.Args[2] 18465 a := v.Args[0] 18466 x := v.Args[1] 18467 v_2 := v.Args[2] 18468 if v_2.Op != OpARM64MOVDconst { 18469 break 18470 } 18471 c := v_2.AuxInt 18472 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 18473 break 18474 } 18475 v.reset(OpARM64ADDshiftLL) 18476 v.AuxInt = log2(c / 7) 18477 v.AddArg(a) 18478 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18479 v0.AuxInt = 3 18480 v0.AddArg(x) 18481 v0.AddArg(x) 18482 v.AddArg(v0) 18483 return true 18484 } 18485 // match: (MSUB a x (MOVDconst [c])) 18486 // cond: c%9 == 0 && isPowerOfTwo(c/9) 18487 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 18488 for { 18489 _ = v.Args[2] 18490 a := v.Args[0] 18491 x := v.Args[1] 18492 v_2 := v.Args[2] 18493 if v_2.Op != OpARM64MOVDconst { 18494 break 18495 } 18496 c := v_2.AuxInt 18497 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 18498 break 18499 } 18500 v.reset(OpARM64SUBshiftLL) 18501 v.AuxInt = log2(c / 9) 18502 v.AddArg(a) 18503 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18504 v0.AuxInt = 3 18505 v0.AddArg(x) 18506 v0.AddArg(x) 18507 v.AddArg(v0) 18508 return true 18509 } 18510 return false 18511 } 18512 func rewriteValueARM64_OpARM64MSUB_10(v *Value) bool { 18513 b := v.Block 18514 _ = b 18515 // match: (MSUB a (MOVDconst [-1]) x) 18516 // cond: 18517 // result: (ADD a x) 18518 for { 18519 _ = v.Args[2] 18520 a := v.Args[0] 18521 v_1 := v.Args[1] 18522 if v_1.Op != OpARM64MOVDconst { 18523 break 18524 } 18525 if v_1.AuxInt != -1 { 18526 break 18527 } 18528 x := v.Args[2] 18529 v.reset(OpARM64ADD) 18530 v.AddArg(a) 18531 v.AddArg(x) 18532 return true 18533 } 18534 // match: (MSUB a (MOVDconst [0]) _) 18535 // cond: 18536 // result: a 18537 for { 18538 _ = v.Args[2] 18539 a := v.Args[0] 18540 v_1 := v.Args[1] 18541 if v_1.Op != OpARM64MOVDconst { 18542 break 18543 } 18544 if v_1.AuxInt != 0 { 18545 break 18546 } 18547 v.reset(OpCopy) 18548 v.Type = a.Type 18549 v.AddArg(a) 18550 return true 18551 } 18552 // match: (MSUB a (MOVDconst [1]) x) 18553 // cond: 18554 // result: (SUB a x) 18555 for { 18556 _ = v.Args[2] 18557 a := v.Args[0] 18558 v_1 := v.Args[1] 18559 if v_1.Op != OpARM64MOVDconst { 18560 break 18561 } 18562 if v_1.AuxInt != 1 { 18563 break 18564 } 18565 x := v.Args[2] 18566 v.reset(OpARM64SUB) 18567 v.AddArg(a) 18568 v.AddArg(x) 18569 return true 18570 } 18571 // match: (MSUB a (MOVDconst [c]) x) 18572 // cond: isPowerOfTwo(c) 18573 // result: (SUBshiftLL a x [log2(c)]) 18574 for { 18575 _ = v.Args[2] 18576 a := v.Args[0] 18577 v_1 := v.Args[1] 18578 if v_1.Op != OpARM64MOVDconst { 18579 break 18580 } 18581 c := v_1.AuxInt 18582 x := v.Args[2] 18583 if !(isPowerOfTwo(c)) { 18584 break 18585 } 18586 v.reset(OpARM64SUBshiftLL) 18587 v.AuxInt = log2(c) 18588 v.AddArg(a) 18589 v.AddArg(x) 18590 return true 18591 } 18592 // match: (MSUB a (MOVDconst [c]) x) 18593 // cond: isPowerOfTwo(c-1) && c>=3 18594 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 18595 for { 18596 _ = v.Args[2] 18597 a := v.Args[0] 18598 v_1 := v.Args[1] 18599 if v_1.Op != OpARM64MOVDconst { 18600 break 18601 } 18602 c := v_1.AuxInt 18603 x := v.Args[2] 18604 if !(isPowerOfTwo(c-1) && c >= 3) { 18605 break 18606 } 18607 v.reset(OpARM64SUB) 18608 v.AddArg(a) 18609 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18610 v0.AuxInt = log2(c - 1) 18611 v0.AddArg(x) 18612 v0.AddArg(x) 18613 v.AddArg(v0) 18614 return true 18615 } 18616 // match: (MSUB a (MOVDconst [c]) x) 18617 // cond: isPowerOfTwo(c+1) && c>=7 18618 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 18619 for { 18620 _ = v.Args[2] 18621 a := v.Args[0] 18622 v_1 := v.Args[1] 18623 if v_1.Op != OpARM64MOVDconst { 18624 break 18625 } 18626 c := v_1.AuxInt 18627 x := v.Args[2] 18628 if !(isPowerOfTwo(c+1) && c >= 7) { 18629 break 18630 } 18631 v.reset(OpARM64ADD) 18632 v.AddArg(a) 18633 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18634 v0.AuxInt = log2(c + 1) 18635 v0.AddArg(x) 18636 v0.AddArg(x) 18637 v.AddArg(v0) 18638 return true 18639 } 18640 // match: (MSUB a (MOVDconst [c]) x) 18641 // cond: c%3 == 0 && isPowerOfTwo(c/3) 18642 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 18643 for { 18644 _ = v.Args[2] 18645 a := v.Args[0] 18646 v_1 := v.Args[1] 18647 if v_1.Op != OpARM64MOVDconst { 18648 break 18649 } 18650 c := v_1.AuxInt 18651 x := v.Args[2] 18652 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 18653 break 18654 } 18655 v.reset(OpARM64ADDshiftLL) 18656 v.AuxInt = log2(c / 3) 18657 v.AddArg(a) 18658 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18659 v0.AuxInt = 2 18660 v0.AddArg(x) 18661 v0.AddArg(x) 18662 v.AddArg(v0) 18663 return true 18664 } 18665 // match: (MSUB a (MOVDconst [c]) x) 18666 // cond: c%5 == 0 && isPowerOfTwo(c/5) 18667 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 18668 for { 18669 _ = v.Args[2] 18670 a := v.Args[0] 18671 v_1 := v.Args[1] 18672 if v_1.Op != OpARM64MOVDconst { 18673 break 18674 } 18675 c := v_1.AuxInt 18676 x := v.Args[2] 18677 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 18678 break 18679 } 18680 v.reset(OpARM64SUBshiftLL) 18681 v.AuxInt = log2(c / 5) 18682 v.AddArg(a) 18683 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18684 v0.AuxInt = 2 18685 v0.AddArg(x) 18686 v0.AddArg(x) 18687 v.AddArg(v0) 18688 return true 18689 } 18690 // match: (MSUB a (MOVDconst [c]) x) 18691 // cond: c%7 == 0 && isPowerOfTwo(c/7) 18692 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 18693 for { 18694 _ = v.Args[2] 18695 a := v.Args[0] 18696 v_1 := v.Args[1] 18697 if v_1.Op != OpARM64MOVDconst { 18698 break 18699 } 18700 c := v_1.AuxInt 18701 x := v.Args[2] 18702 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 18703 break 18704 } 18705 v.reset(OpARM64ADDshiftLL) 18706 v.AuxInt = log2(c / 7) 18707 v.AddArg(a) 18708 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18709 v0.AuxInt = 3 18710 v0.AddArg(x) 18711 v0.AddArg(x) 18712 v.AddArg(v0) 18713 return true 18714 } 18715 // match: (MSUB a (MOVDconst [c]) x) 18716 // cond: c%9 == 0 && isPowerOfTwo(c/9) 18717 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 18718 for { 18719 _ = v.Args[2] 18720 a := v.Args[0] 18721 v_1 := v.Args[1] 18722 if v_1.Op != OpARM64MOVDconst { 18723 break 18724 } 18725 c := v_1.AuxInt 18726 x := v.Args[2] 18727 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 18728 break 18729 } 18730 v.reset(OpARM64SUBshiftLL) 18731 v.AuxInt = log2(c / 9) 18732 v.AddArg(a) 18733 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18734 v0.AuxInt = 3 18735 v0.AddArg(x) 18736 v0.AddArg(x) 18737 v.AddArg(v0) 18738 return true 18739 } 18740 return false 18741 } 18742 func rewriteValueARM64_OpARM64MSUB_20(v *Value) bool { 18743 b := v.Block 18744 _ = b 18745 // match: (MSUB (MOVDconst [c]) x y) 18746 // cond: 18747 // result: (ADDconst [c] (MNEG <x.Type> x y)) 18748 for { 18749 _ = v.Args[2] 18750 v_0 := v.Args[0] 18751 if v_0.Op != OpARM64MOVDconst { 18752 break 18753 } 18754 c := v_0.AuxInt 18755 x := v.Args[1] 18756 y := v.Args[2] 18757 v.reset(OpARM64ADDconst) 18758 v.AuxInt = c 18759 v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type) 18760 v0.AddArg(x) 18761 v0.AddArg(y) 18762 v.AddArg(v0) 18763 return true 18764 } 18765 // match: (MSUB a (MOVDconst [c]) (MOVDconst [d])) 18766 // cond: 18767 // result: (SUBconst [c*d] a) 18768 for { 18769 _ = v.Args[2] 18770 a := v.Args[0] 18771 v_1 := v.Args[1] 18772 if v_1.Op != OpARM64MOVDconst { 18773 break 18774 } 18775 c := v_1.AuxInt 18776 v_2 := v.Args[2] 18777 if v_2.Op != OpARM64MOVDconst { 18778 break 18779 } 18780 d := v_2.AuxInt 18781 v.reset(OpARM64SUBconst) 18782 v.AuxInt = c * d 18783 v.AddArg(a) 18784 return true 18785 } 18786 return false 18787 } 18788 func rewriteValueARM64_OpARM64MSUBW_0(v *Value) bool { 18789 b := v.Block 18790 _ = b 18791 // match: (MSUBW a x (MOVDconst [c])) 18792 // cond: int32(c)==-1 18793 // result: (ADD a x) 18794 for { 18795 _ = v.Args[2] 18796 a := v.Args[0] 18797 x := v.Args[1] 18798 v_2 := v.Args[2] 18799 if v_2.Op != OpARM64MOVDconst { 18800 break 18801 } 18802 c := v_2.AuxInt 18803 if !(int32(c) == -1) { 18804 break 18805 } 18806 v.reset(OpARM64ADD) 18807 v.AddArg(a) 18808 v.AddArg(x) 18809 return true 18810 } 18811 // match: (MSUBW a _ (MOVDconst [c])) 18812 // cond: int32(c)==0 18813 // result: a 18814 for { 18815 _ = v.Args[2] 18816 a := v.Args[0] 18817 v_2 := v.Args[2] 18818 if v_2.Op != OpARM64MOVDconst { 18819 break 18820 } 18821 c := v_2.AuxInt 18822 if !(int32(c) == 0) { 18823 break 18824 } 18825 v.reset(OpCopy) 18826 v.Type = a.Type 18827 v.AddArg(a) 18828 return true 18829 } 18830 // match: (MSUBW a x (MOVDconst [c])) 18831 // cond: int32(c)==1 18832 // result: (SUB a x) 18833 for { 18834 _ = v.Args[2] 18835 a := v.Args[0] 18836 x := v.Args[1] 18837 v_2 := v.Args[2] 18838 if v_2.Op != OpARM64MOVDconst { 18839 break 18840 } 18841 c := v_2.AuxInt 18842 if !(int32(c) == 1) { 18843 break 18844 } 18845 v.reset(OpARM64SUB) 18846 v.AddArg(a) 18847 v.AddArg(x) 18848 return true 18849 } 18850 // match: (MSUBW a x (MOVDconst [c])) 18851 // cond: isPowerOfTwo(c) 18852 // result: (SUBshiftLL a x [log2(c)]) 18853 for { 18854 _ = v.Args[2] 18855 a := v.Args[0] 18856 x := v.Args[1] 18857 v_2 := v.Args[2] 18858 if v_2.Op != OpARM64MOVDconst { 18859 break 18860 } 18861 c := v_2.AuxInt 18862 if !(isPowerOfTwo(c)) { 18863 break 18864 } 18865 v.reset(OpARM64SUBshiftLL) 18866 v.AuxInt = log2(c) 18867 v.AddArg(a) 18868 v.AddArg(x) 18869 return true 18870 } 18871 // match: (MSUBW a x (MOVDconst [c])) 18872 // cond: isPowerOfTwo(c-1) && int32(c)>=3 18873 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 18874 for { 18875 _ = v.Args[2] 18876 a := v.Args[0] 18877 x := v.Args[1] 18878 v_2 := v.Args[2] 18879 if v_2.Op != OpARM64MOVDconst { 18880 break 18881 } 18882 c := v_2.AuxInt 18883 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 18884 break 18885 } 18886 v.reset(OpARM64SUB) 18887 v.AddArg(a) 18888 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18889 v0.AuxInt = log2(c - 1) 18890 v0.AddArg(x) 18891 v0.AddArg(x) 18892 v.AddArg(v0) 18893 return true 18894 } 18895 // match: (MSUBW a x (MOVDconst [c])) 18896 // cond: isPowerOfTwo(c+1) && int32(c)>=7 18897 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 18898 for { 18899 _ = v.Args[2] 18900 a := v.Args[0] 18901 x := v.Args[1] 18902 v_2 := v.Args[2] 18903 if v_2.Op != OpARM64MOVDconst { 18904 break 18905 } 18906 c := v_2.AuxInt 18907 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 18908 break 18909 } 18910 v.reset(OpARM64ADD) 18911 v.AddArg(a) 18912 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18913 v0.AuxInt = log2(c + 1) 18914 v0.AddArg(x) 18915 v0.AddArg(x) 18916 v.AddArg(v0) 18917 return true 18918 } 18919 // match: (MSUBW a x (MOVDconst [c])) 18920 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 18921 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 18922 for { 18923 _ = v.Args[2] 18924 a := v.Args[0] 18925 x := v.Args[1] 18926 v_2 := v.Args[2] 18927 if v_2.Op != OpARM64MOVDconst { 18928 break 18929 } 18930 c := v_2.AuxInt 18931 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 18932 break 18933 } 18934 v.reset(OpARM64ADDshiftLL) 18935 v.AuxInt = log2(c / 3) 18936 v.AddArg(a) 18937 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18938 v0.AuxInt = 2 18939 v0.AddArg(x) 18940 v0.AddArg(x) 18941 v.AddArg(v0) 18942 return true 18943 } 18944 // match: (MSUBW a x (MOVDconst [c])) 18945 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 18946 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 18947 for { 18948 _ = v.Args[2] 18949 a := v.Args[0] 18950 x := v.Args[1] 18951 v_2 := v.Args[2] 18952 if v_2.Op != OpARM64MOVDconst { 18953 break 18954 } 18955 c := v_2.AuxInt 18956 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 18957 break 18958 } 18959 v.reset(OpARM64SUBshiftLL) 18960 v.AuxInt = log2(c / 5) 18961 v.AddArg(a) 18962 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18963 v0.AuxInt = 2 18964 v0.AddArg(x) 18965 v0.AddArg(x) 18966 v.AddArg(v0) 18967 return true 18968 } 18969 // match: (MSUBW a x (MOVDconst [c])) 18970 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 18971 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 18972 for { 18973 _ = v.Args[2] 18974 a := v.Args[0] 18975 x := v.Args[1] 18976 v_2 := v.Args[2] 18977 if v_2.Op != OpARM64MOVDconst { 18978 break 18979 } 18980 c := v_2.AuxInt 18981 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 18982 break 18983 } 18984 v.reset(OpARM64ADDshiftLL) 18985 v.AuxInt = log2(c / 7) 18986 v.AddArg(a) 18987 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18988 v0.AuxInt = 3 18989 v0.AddArg(x) 18990 v0.AddArg(x) 18991 v.AddArg(v0) 18992 return true 18993 } 18994 // match: (MSUBW a x (MOVDconst [c])) 18995 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 18996 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 18997 for { 18998 _ = v.Args[2] 18999 a := v.Args[0] 19000 x := v.Args[1] 19001 v_2 := v.Args[2] 19002 if v_2.Op != OpARM64MOVDconst { 19003 break 19004 } 19005 c := v_2.AuxInt 19006 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 19007 break 19008 } 19009 v.reset(OpARM64SUBshiftLL) 19010 v.AuxInt = log2(c / 9) 19011 v.AddArg(a) 19012 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19013 v0.AuxInt = 3 19014 v0.AddArg(x) 19015 v0.AddArg(x) 19016 v.AddArg(v0) 19017 return true 19018 } 19019 return false 19020 } 19021 func rewriteValueARM64_OpARM64MSUBW_10(v *Value) bool { 19022 b := v.Block 19023 _ = b 19024 // match: (MSUBW a (MOVDconst [c]) x) 19025 // cond: int32(c)==-1 19026 // result: (ADD a x) 19027 for { 19028 _ = v.Args[2] 19029 a := v.Args[0] 19030 v_1 := v.Args[1] 19031 if v_1.Op != OpARM64MOVDconst { 19032 break 19033 } 19034 c := v_1.AuxInt 19035 x := v.Args[2] 19036 if !(int32(c) == -1) { 19037 break 19038 } 19039 v.reset(OpARM64ADD) 19040 v.AddArg(a) 19041 v.AddArg(x) 19042 return true 19043 } 19044 // match: (MSUBW a (MOVDconst [c]) _) 19045 // cond: int32(c)==0 19046 // result: a 19047 for { 19048 _ = v.Args[2] 19049 a := v.Args[0] 19050 v_1 := v.Args[1] 19051 if v_1.Op != OpARM64MOVDconst { 19052 break 19053 } 19054 c := v_1.AuxInt 19055 if !(int32(c) == 0) { 19056 break 19057 } 19058 v.reset(OpCopy) 19059 v.Type = a.Type 19060 v.AddArg(a) 19061 return true 19062 } 19063 // match: (MSUBW a (MOVDconst [c]) x) 19064 // cond: int32(c)==1 19065 // result: (SUB a x) 19066 for { 19067 _ = v.Args[2] 19068 a := v.Args[0] 19069 v_1 := v.Args[1] 19070 if v_1.Op != OpARM64MOVDconst { 19071 break 19072 } 19073 c := v_1.AuxInt 19074 x := v.Args[2] 19075 if !(int32(c) == 1) { 19076 break 19077 } 19078 v.reset(OpARM64SUB) 19079 v.AddArg(a) 19080 v.AddArg(x) 19081 return true 19082 } 19083 // match: (MSUBW a (MOVDconst [c]) x) 19084 // cond: isPowerOfTwo(c) 19085 // result: (SUBshiftLL a x [log2(c)]) 19086 for { 19087 _ = v.Args[2] 19088 a := v.Args[0] 19089 v_1 := v.Args[1] 19090 if v_1.Op != OpARM64MOVDconst { 19091 break 19092 } 19093 c := v_1.AuxInt 19094 x := v.Args[2] 19095 if !(isPowerOfTwo(c)) { 19096 break 19097 } 19098 v.reset(OpARM64SUBshiftLL) 19099 v.AuxInt = log2(c) 19100 v.AddArg(a) 19101 v.AddArg(x) 19102 return true 19103 } 19104 // match: (MSUBW a (MOVDconst [c]) x) 19105 // cond: isPowerOfTwo(c-1) && int32(c)>=3 19106 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 19107 for { 19108 _ = v.Args[2] 19109 a := v.Args[0] 19110 v_1 := v.Args[1] 19111 if v_1.Op != OpARM64MOVDconst { 19112 break 19113 } 19114 c := v_1.AuxInt 19115 x := v.Args[2] 19116 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 19117 break 19118 } 19119 v.reset(OpARM64SUB) 19120 v.AddArg(a) 19121 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19122 v0.AuxInt = log2(c - 1) 19123 v0.AddArg(x) 19124 v0.AddArg(x) 19125 v.AddArg(v0) 19126 return true 19127 } 19128 // match: (MSUBW a (MOVDconst [c]) x) 19129 // cond: isPowerOfTwo(c+1) && int32(c)>=7 19130 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 19131 for { 19132 _ = v.Args[2] 19133 a := v.Args[0] 19134 v_1 := v.Args[1] 19135 if v_1.Op != OpARM64MOVDconst { 19136 break 19137 } 19138 c := v_1.AuxInt 19139 x := v.Args[2] 19140 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 19141 break 19142 } 19143 v.reset(OpARM64ADD) 19144 v.AddArg(a) 19145 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 19146 v0.AuxInt = log2(c + 1) 19147 v0.AddArg(x) 19148 v0.AddArg(x) 19149 v.AddArg(v0) 19150 return true 19151 } 19152 // match: (MSUBW a (MOVDconst [c]) x) 19153 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 19154 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 19155 for { 19156 _ = v.Args[2] 19157 a := v.Args[0] 19158 v_1 := v.Args[1] 19159 if v_1.Op != OpARM64MOVDconst { 19160 break 19161 } 19162 c := v_1.AuxInt 19163 x := v.Args[2] 19164 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 19165 break 19166 } 19167 v.reset(OpARM64ADDshiftLL) 19168 v.AuxInt = log2(c / 3) 19169 v.AddArg(a) 19170 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 19171 v0.AuxInt = 2 19172 v0.AddArg(x) 19173 v0.AddArg(x) 19174 v.AddArg(v0) 19175 return true 19176 } 19177 // match: (MSUBW a (MOVDconst [c]) x) 19178 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 19179 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 19180 for { 19181 _ = v.Args[2] 19182 a := v.Args[0] 19183 v_1 := v.Args[1] 19184 if v_1.Op != OpARM64MOVDconst { 19185 break 19186 } 19187 c := v_1.AuxInt 19188 x := v.Args[2] 19189 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 19190 break 19191 } 19192 v.reset(OpARM64SUBshiftLL) 19193 v.AuxInt = log2(c / 5) 19194 v.AddArg(a) 19195 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19196 v0.AuxInt = 2 19197 v0.AddArg(x) 19198 v0.AddArg(x) 19199 v.AddArg(v0) 19200 return true 19201 } 19202 // match: (MSUBW a (MOVDconst [c]) x) 19203 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 19204 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 19205 for { 19206 _ = v.Args[2] 19207 a := v.Args[0] 19208 v_1 := v.Args[1] 19209 if v_1.Op != OpARM64MOVDconst { 19210 break 19211 } 19212 c := v_1.AuxInt 19213 x := v.Args[2] 19214 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 19215 break 19216 } 19217 v.reset(OpARM64ADDshiftLL) 19218 v.AuxInt = log2(c / 7) 19219 v.AddArg(a) 19220 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 19221 v0.AuxInt = 3 19222 v0.AddArg(x) 19223 v0.AddArg(x) 19224 v.AddArg(v0) 19225 return true 19226 } 19227 // match: (MSUBW a (MOVDconst [c]) x) 19228 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 19229 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 19230 for { 19231 _ = v.Args[2] 19232 a := v.Args[0] 19233 v_1 := v.Args[1] 19234 if v_1.Op != OpARM64MOVDconst { 19235 break 19236 } 19237 c := v_1.AuxInt 19238 x := v.Args[2] 19239 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 19240 break 19241 } 19242 v.reset(OpARM64SUBshiftLL) 19243 v.AuxInt = log2(c / 9) 19244 v.AddArg(a) 19245 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19246 v0.AuxInt = 3 19247 v0.AddArg(x) 19248 v0.AddArg(x) 19249 v.AddArg(v0) 19250 return true 19251 } 19252 return false 19253 } 19254 func rewriteValueARM64_OpARM64MSUBW_20(v *Value) bool { 19255 b := v.Block 19256 _ = b 19257 // match: (MSUBW (MOVDconst [c]) x y) 19258 // cond: 19259 // result: (ADDconst [c] (MNEGW <x.Type> x y)) 19260 for { 19261 _ = v.Args[2] 19262 v_0 := v.Args[0] 19263 if v_0.Op != OpARM64MOVDconst { 19264 break 19265 } 19266 c := v_0.AuxInt 19267 x := v.Args[1] 19268 y := v.Args[2] 19269 v.reset(OpARM64ADDconst) 19270 v.AuxInt = c 19271 v0 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type) 19272 v0.AddArg(x) 19273 v0.AddArg(y) 19274 v.AddArg(v0) 19275 return true 19276 } 19277 // match: (MSUBW a (MOVDconst [c]) (MOVDconst [d])) 19278 // cond: 19279 // result: (SUBconst [int64(int32(c)*int32(d))] a) 19280 for { 19281 _ = v.Args[2] 19282 a := v.Args[0] 19283 v_1 := v.Args[1] 19284 if v_1.Op != OpARM64MOVDconst { 19285 break 19286 } 19287 c := v_1.AuxInt 19288 v_2 := v.Args[2] 19289 if v_2.Op != OpARM64MOVDconst { 19290 break 19291 } 19292 d := v_2.AuxInt 19293 v.reset(OpARM64SUBconst) 19294 v.AuxInt = int64(int32(c) * int32(d)) 19295 v.AddArg(a) 19296 return true 19297 } 19298 return false 19299 } 19300 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 19301 // match: (MUL (NEG x) y) 19302 // cond: 19303 // result: (MNEG x y) 19304 for { 19305 _ = v.Args[1] 19306 v_0 := v.Args[0] 19307 if v_0.Op != OpARM64NEG { 19308 break 19309 } 19310 x := v_0.Args[0] 19311 y := v.Args[1] 19312 v.reset(OpARM64MNEG) 19313 v.AddArg(x) 19314 v.AddArg(y) 19315 return true 19316 } 19317 // match: (MUL y (NEG x)) 19318 // cond: 19319 // result: (MNEG x y) 19320 for { 19321 _ = v.Args[1] 19322 y := v.Args[0] 19323 v_1 := v.Args[1] 19324 if v_1.Op != OpARM64NEG { 19325 break 19326 } 19327 x := v_1.Args[0] 19328 v.reset(OpARM64MNEG) 19329 v.AddArg(x) 19330 v.AddArg(y) 19331 return true 19332 } 19333 // match: (MUL x (MOVDconst [-1])) 19334 // cond: 19335 // result: (NEG x) 19336 for { 19337 _ = v.Args[1] 19338 x := v.Args[0] 19339 v_1 := v.Args[1] 19340 if v_1.Op != OpARM64MOVDconst { 19341 break 19342 } 19343 if v_1.AuxInt != -1 { 19344 break 19345 } 19346 v.reset(OpARM64NEG) 19347 v.AddArg(x) 19348 return true 19349 } 19350 // match: (MUL (MOVDconst [-1]) x) 19351 // cond: 19352 // result: (NEG x) 19353 for { 19354 _ = v.Args[1] 19355 v_0 := v.Args[0] 19356 if v_0.Op != OpARM64MOVDconst { 19357 break 19358 } 19359 if v_0.AuxInt != -1 { 19360 break 19361 } 19362 x := v.Args[1] 19363 v.reset(OpARM64NEG) 19364 v.AddArg(x) 19365 return true 19366 } 19367 // match: (MUL _ (MOVDconst [0])) 19368 // cond: 19369 // result: (MOVDconst [0]) 19370 for { 19371 _ = v.Args[1] 19372 v_1 := v.Args[1] 19373 if v_1.Op != OpARM64MOVDconst { 19374 break 19375 } 19376 if v_1.AuxInt != 0 { 19377 break 19378 } 19379 v.reset(OpARM64MOVDconst) 19380 v.AuxInt = 0 19381 return true 19382 } 19383 // match: (MUL (MOVDconst [0]) _) 19384 // cond: 19385 // result: (MOVDconst [0]) 19386 for { 19387 _ = v.Args[1] 19388 v_0 := v.Args[0] 19389 if v_0.Op != OpARM64MOVDconst { 19390 break 19391 } 19392 if v_0.AuxInt != 0 { 19393 break 19394 } 19395 v.reset(OpARM64MOVDconst) 19396 v.AuxInt = 0 19397 return true 19398 } 19399 // match: (MUL x (MOVDconst [1])) 19400 // cond: 19401 // result: x 19402 for { 19403 _ = v.Args[1] 19404 x := v.Args[0] 19405 v_1 := v.Args[1] 19406 if v_1.Op != OpARM64MOVDconst { 19407 break 19408 } 19409 if v_1.AuxInt != 1 { 19410 break 19411 } 19412 v.reset(OpCopy) 19413 v.Type = x.Type 19414 v.AddArg(x) 19415 return true 19416 } 19417 // match: (MUL (MOVDconst [1]) x) 19418 // cond: 19419 // result: x 19420 for { 19421 _ = v.Args[1] 19422 v_0 := v.Args[0] 19423 if v_0.Op != OpARM64MOVDconst { 19424 break 19425 } 19426 if v_0.AuxInt != 1 { 19427 break 19428 } 19429 x := v.Args[1] 19430 v.reset(OpCopy) 19431 v.Type = x.Type 19432 v.AddArg(x) 19433 return true 19434 } 19435 // match: (MUL x (MOVDconst [c])) 19436 // cond: isPowerOfTwo(c) 19437 // result: (SLLconst [log2(c)] x) 19438 for { 19439 _ = v.Args[1] 19440 x := v.Args[0] 19441 v_1 := v.Args[1] 19442 if v_1.Op != OpARM64MOVDconst { 19443 break 19444 } 19445 c := v_1.AuxInt 19446 if !(isPowerOfTwo(c)) { 19447 break 19448 } 19449 v.reset(OpARM64SLLconst) 19450 v.AuxInt = log2(c) 19451 v.AddArg(x) 19452 return true 19453 } 19454 // match: (MUL (MOVDconst [c]) x) 19455 // cond: isPowerOfTwo(c) 19456 // result: (SLLconst [log2(c)] x) 19457 for { 19458 _ = v.Args[1] 19459 v_0 := v.Args[0] 19460 if v_0.Op != OpARM64MOVDconst { 19461 break 19462 } 19463 c := v_0.AuxInt 19464 x := v.Args[1] 19465 if !(isPowerOfTwo(c)) { 19466 break 19467 } 19468 v.reset(OpARM64SLLconst) 19469 v.AuxInt = log2(c) 19470 v.AddArg(x) 19471 return true 19472 } 19473 return false 19474 } 19475 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 19476 b := v.Block 19477 _ = b 19478 // match: (MUL x (MOVDconst [c])) 19479 // cond: isPowerOfTwo(c-1) && c >= 3 19480 // result: (ADDshiftLL x x [log2(c-1)]) 19481 for { 19482 _ = v.Args[1] 19483 x := v.Args[0] 19484 v_1 := v.Args[1] 19485 if v_1.Op != OpARM64MOVDconst { 19486 break 19487 } 19488 c := v_1.AuxInt 19489 if !(isPowerOfTwo(c-1) && c >= 3) { 19490 break 19491 } 19492 v.reset(OpARM64ADDshiftLL) 19493 v.AuxInt = log2(c - 1) 19494 v.AddArg(x) 19495 v.AddArg(x) 19496 return true 19497 } 19498 // match: (MUL (MOVDconst [c]) x) 19499 // cond: isPowerOfTwo(c-1) && c >= 3 19500 // result: (ADDshiftLL x x [log2(c-1)]) 19501 for { 19502 _ = v.Args[1] 19503 v_0 := v.Args[0] 19504 if v_0.Op != OpARM64MOVDconst { 19505 break 19506 } 19507 c := v_0.AuxInt 19508 x := v.Args[1] 19509 if !(isPowerOfTwo(c-1) && c >= 3) { 19510 break 19511 } 19512 v.reset(OpARM64ADDshiftLL) 19513 v.AuxInt = log2(c - 1) 19514 v.AddArg(x) 19515 v.AddArg(x) 19516 return true 19517 } 19518 // match: (MUL x (MOVDconst [c])) 19519 // cond: isPowerOfTwo(c+1) && c >= 7 19520 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 19521 for { 19522 _ = v.Args[1] 19523 x := v.Args[0] 19524 v_1 := v.Args[1] 19525 if v_1.Op != OpARM64MOVDconst { 19526 break 19527 } 19528 c := v_1.AuxInt 19529 if !(isPowerOfTwo(c+1) && c >= 7) { 19530 break 19531 } 19532 v.reset(OpARM64ADDshiftLL) 19533 v.AuxInt = log2(c + 1) 19534 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19535 v0.AddArg(x) 19536 v.AddArg(v0) 19537 v.AddArg(x) 19538 return true 19539 } 19540 // match: (MUL (MOVDconst [c]) x) 19541 // cond: isPowerOfTwo(c+1) && c >= 7 19542 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 19543 for { 19544 _ = v.Args[1] 19545 v_0 := v.Args[0] 19546 if v_0.Op != OpARM64MOVDconst { 19547 break 19548 } 19549 c := v_0.AuxInt 19550 x := v.Args[1] 19551 if !(isPowerOfTwo(c+1) && c >= 7) { 19552 break 19553 } 19554 v.reset(OpARM64ADDshiftLL) 19555 v.AuxInt = log2(c + 1) 19556 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19557 v0.AddArg(x) 19558 v.AddArg(v0) 19559 v.AddArg(x) 19560 return true 19561 } 19562 // match: (MUL x (MOVDconst [c])) 19563 // cond: c%3 == 0 && isPowerOfTwo(c/3) 19564 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 19565 for { 19566 _ = v.Args[1] 19567 x := v.Args[0] 19568 v_1 := v.Args[1] 19569 if v_1.Op != OpARM64MOVDconst { 19570 break 19571 } 19572 c := v_1.AuxInt 19573 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 19574 break 19575 } 19576 v.reset(OpARM64SLLconst) 19577 v.AuxInt = log2(c / 3) 19578 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19579 v0.AuxInt = 1 19580 v0.AddArg(x) 19581 v0.AddArg(x) 19582 v.AddArg(v0) 19583 return true 19584 } 19585 // match: (MUL (MOVDconst [c]) x) 19586 // cond: c%3 == 0 && isPowerOfTwo(c/3) 19587 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 19588 for { 19589 _ = v.Args[1] 19590 v_0 := v.Args[0] 19591 if v_0.Op != OpARM64MOVDconst { 19592 break 19593 } 19594 c := v_0.AuxInt 19595 x := v.Args[1] 19596 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 19597 break 19598 } 19599 v.reset(OpARM64SLLconst) 19600 v.AuxInt = log2(c / 3) 19601 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19602 v0.AuxInt = 1 19603 v0.AddArg(x) 19604 v0.AddArg(x) 19605 v.AddArg(v0) 19606 return true 19607 } 19608 // match: (MUL x (MOVDconst [c])) 19609 // cond: c%5 == 0 && isPowerOfTwo(c/5) 19610 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 19611 for { 19612 _ = v.Args[1] 19613 x := v.Args[0] 19614 v_1 := v.Args[1] 19615 if v_1.Op != OpARM64MOVDconst { 19616 break 19617 } 19618 c := v_1.AuxInt 19619 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 19620 break 19621 } 19622 v.reset(OpARM64SLLconst) 19623 v.AuxInt = log2(c / 5) 19624 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19625 v0.AuxInt = 2 19626 v0.AddArg(x) 19627 v0.AddArg(x) 19628 v.AddArg(v0) 19629 return true 19630 } 19631 // match: (MUL (MOVDconst [c]) x) 19632 // cond: c%5 == 0 && isPowerOfTwo(c/5) 19633 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 19634 for { 19635 _ = v.Args[1] 19636 v_0 := v.Args[0] 19637 if v_0.Op != OpARM64MOVDconst { 19638 break 19639 } 19640 c := v_0.AuxInt 19641 x := v.Args[1] 19642 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 19643 break 19644 } 19645 v.reset(OpARM64SLLconst) 19646 v.AuxInt = log2(c / 5) 19647 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19648 v0.AuxInt = 2 19649 v0.AddArg(x) 19650 v0.AddArg(x) 19651 v.AddArg(v0) 19652 return true 19653 } 19654 // match: (MUL x (MOVDconst [c])) 19655 // cond: c%7 == 0 && isPowerOfTwo(c/7) 19656 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 19657 for { 19658 _ = v.Args[1] 19659 x := v.Args[0] 19660 v_1 := v.Args[1] 19661 if v_1.Op != OpARM64MOVDconst { 19662 break 19663 } 19664 c := v_1.AuxInt 19665 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 19666 break 19667 } 19668 v.reset(OpARM64SLLconst) 19669 v.AuxInt = log2(c / 7) 19670 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19671 v0.AuxInt = 3 19672 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19673 v1.AddArg(x) 19674 v0.AddArg(v1) 19675 v0.AddArg(x) 19676 v.AddArg(v0) 19677 return true 19678 } 19679 // match: (MUL (MOVDconst [c]) x) 19680 // cond: c%7 == 0 && isPowerOfTwo(c/7) 19681 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 19682 for { 19683 _ = v.Args[1] 19684 v_0 := v.Args[0] 19685 if v_0.Op != OpARM64MOVDconst { 19686 break 19687 } 19688 c := v_0.AuxInt 19689 x := v.Args[1] 19690 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 19691 break 19692 } 19693 v.reset(OpARM64SLLconst) 19694 v.AuxInt = log2(c / 7) 19695 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19696 v0.AuxInt = 3 19697 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19698 v1.AddArg(x) 19699 v0.AddArg(v1) 19700 v0.AddArg(x) 19701 v.AddArg(v0) 19702 return true 19703 } 19704 return false 19705 } 19706 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 19707 b := v.Block 19708 _ = b 19709 // match: (MUL x (MOVDconst [c])) 19710 // cond: c%9 == 0 && isPowerOfTwo(c/9) 19711 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 19712 for { 19713 _ = v.Args[1] 19714 x := v.Args[0] 19715 v_1 := v.Args[1] 19716 if v_1.Op != OpARM64MOVDconst { 19717 break 19718 } 19719 c := v_1.AuxInt 19720 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 19721 break 19722 } 19723 v.reset(OpARM64SLLconst) 19724 v.AuxInt = log2(c / 9) 19725 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19726 v0.AuxInt = 3 19727 v0.AddArg(x) 19728 v0.AddArg(x) 19729 v.AddArg(v0) 19730 return true 19731 } 19732 // match: (MUL (MOVDconst [c]) x) 19733 // cond: c%9 == 0 && isPowerOfTwo(c/9) 19734 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 19735 for { 19736 _ = v.Args[1] 19737 v_0 := v.Args[0] 19738 if v_0.Op != OpARM64MOVDconst { 19739 break 19740 } 19741 c := v_0.AuxInt 19742 x := v.Args[1] 19743 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 19744 break 19745 } 19746 v.reset(OpARM64SLLconst) 19747 v.AuxInt = log2(c / 9) 19748 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19749 v0.AuxInt = 3 19750 v0.AddArg(x) 19751 v0.AddArg(x) 19752 v.AddArg(v0) 19753 return true 19754 } 19755 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 19756 // cond: 19757 // result: (MOVDconst [c*d]) 19758 for { 19759 _ = v.Args[1] 19760 v_0 := v.Args[0] 19761 if v_0.Op != OpARM64MOVDconst { 19762 break 19763 } 19764 c := v_0.AuxInt 19765 v_1 := v.Args[1] 19766 if v_1.Op != OpARM64MOVDconst { 19767 break 19768 } 19769 d := v_1.AuxInt 19770 v.reset(OpARM64MOVDconst) 19771 v.AuxInt = c * d 19772 return true 19773 } 19774 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 19775 // cond: 19776 // result: (MOVDconst [c*d]) 19777 for { 19778 _ = v.Args[1] 19779 v_0 := v.Args[0] 19780 if v_0.Op != OpARM64MOVDconst { 19781 break 19782 } 19783 d := v_0.AuxInt 19784 v_1 := v.Args[1] 19785 if v_1.Op != OpARM64MOVDconst { 19786 break 19787 } 19788 c := v_1.AuxInt 19789 v.reset(OpARM64MOVDconst) 19790 v.AuxInt = c * d 19791 return true 19792 } 19793 return false 19794 } 19795 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 19796 // match: (MULW (NEG x) y) 19797 // cond: 19798 // result: (MNEGW x y) 19799 for { 19800 _ = v.Args[1] 19801 v_0 := v.Args[0] 19802 if v_0.Op != OpARM64NEG { 19803 break 19804 } 19805 x := v_0.Args[0] 19806 y := v.Args[1] 19807 v.reset(OpARM64MNEGW) 19808 v.AddArg(x) 19809 v.AddArg(y) 19810 return true 19811 } 19812 // match: (MULW y (NEG x)) 19813 // cond: 19814 // result: (MNEGW x y) 19815 for { 19816 _ = v.Args[1] 19817 y := v.Args[0] 19818 v_1 := v.Args[1] 19819 if v_1.Op != OpARM64NEG { 19820 break 19821 } 19822 x := v_1.Args[0] 19823 v.reset(OpARM64MNEGW) 19824 v.AddArg(x) 19825 v.AddArg(y) 19826 return true 19827 } 19828 // match: (MULW x (MOVDconst [c])) 19829 // cond: int32(c)==-1 19830 // result: (NEG x) 19831 for { 19832 _ = v.Args[1] 19833 x := v.Args[0] 19834 v_1 := v.Args[1] 19835 if v_1.Op != OpARM64MOVDconst { 19836 break 19837 } 19838 c := v_1.AuxInt 19839 if !(int32(c) == -1) { 19840 break 19841 } 19842 v.reset(OpARM64NEG) 19843 v.AddArg(x) 19844 return true 19845 } 19846 // match: (MULW (MOVDconst [c]) x) 19847 // cond: int32(c)==-1 19848 // result: (NEG x) 19849 for { 19850 _ = v.Args[1] 19851 v_0 := v.Args[0] 19852 if v_0.Op != OpARM64MOVDconst { 19853 break 19854 } 19855 c := v_0.AuxInt 19856 x := v.Args[1] 19857 if !(int32(c) == -1) { 19858 break 19859 } 19860 v.reset(OpARM64NEG) 19861 v.AddArg(x) 19862 return true 19863 } 19864 // match: (MULW _ (MOVDconst [c])) 19865 // cond: int32(c)==0 19866 // result: (MOVDconst [0]) 19867 for { 19868 _ = v.Args[1] 19869 v_1 := v.Args[1] 19870 if v_1.Op != OpARM64MOVDconst { 19871 break 19872 } 19873 c := v_1.AuxInt 19874 if !(int32(c) == 0) { 19875 break 19876 } 19877 v.reset(OpARM64MOVDconst) 19878 v.AuxInt = 0 19879 return true 19880 } 19881 // match: (MULW (MOVDconst [c]) _) 19882 // cond: int32(c)==0 19883 // result: (MOVDconst [0]) 19884 for { 19885 _ = v.Args[1] 19886 v_0 := v.Args[0] 19887 if v_0.Op != OpARM64MOVDconst { 19888 break 19889 } 19890 c := v_0.AuxInt 19891 if !(int32(c) == 0) { 19892 break 19893 } 19894 v.reset(OpARM64MOVDconst) 19895 v.AuxInt = 0 19896 return true 19897 } 19898 // match: (MULW x (MOVDconst [c])) 19899 // cond: int32(c)==1 19900 // result: x 19901 for { 19902 _ = v.Args[1] 19903 x := v.Args[0] 19904 v_1 := v.Args[1] 19905 if v_1.Op != OpARM64MOVDconst { 19906 break 19907 } 19908 c := v_1.AuxInt 19909 if !(int32(c) == 1) { 19910 break 19911 } 19912 v.reset(OpCopy) 19913 v.Type = x.Type 19914 v.AddArg(x) 19915 return true 19916 } 19917 // match: (MULW (MOVDconst [c]) x) 19918 // cond: int32(c)==1 19919 // result: x 19920 for { 19921 _ = v.Args[1] 19922 v_0 := v.Args[0] 19923 if v_0.Op != OpARM64MOVDconst { 19924 break 19925 } 19926 c := v_0.AuxInt 19927 x := v.Args[1] 19928 if !(int32(c) == 1) { 19929 break 19930 } 19931 v.reset(OpCopy) 19932 v.Type = x.Type 19933 v.AddArg(x) 19934 return true 19935 } 19936 // match: (MULW x (MOVDconst [c])) 19937 // cond: isPowerOfTwo(c) 19938 // result: (SLLconst [log2(c)] x) 19939 for { 19940 _ = v.Args[1] 19941 x := v.Args[0] 19942 v_1 := v.Args[1] 19943 if v_1.Op != OpARM64MOVDconst { 19944 break 19945 } 19946 c := v_1.AuxInt 19947 if !(isPowerOfTwo(c)) { 19948 break 19949 } 19950 v.reset(OpARM64SLLconst) 19951 v.AuxInt = log2(c) 19952 v.AddArg(x) 19953 return true 19954 } 19955 // match: (MULW (MOVDconst [c]) x) 19956 // cond: isPowerOfTwo(c) 19957 // result: (SLLconst [log2(c)] x) 19958 for { 19959 _ = v.Args[1] 19960 v_0 := v.Args[0] 19961 if v_0.Op != OpARM64MOVDconst { 19962 break 19963 } 19964 c := v_0.AuxInt 19965 x := v.Args[1] 19966 if !(isPowerOfTwo(c)) { 19967 break 19968 } 19969 v.reset(OpARM64SLLconst) 19970 v.AuxInt = log2(c) 19971 v.AddArg(x) 19972 return true 19973 } 19974 return false 19975 } 19976 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 19977 b := v.Block 19978 _ = b 19979 // match: (MULW x (MOVDconst [c])) 19980 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 19981 // result: (ADDshiftLL x x [log2(c-1)]) 19982 for { 19983 _ = v.Args[1] 19984 x := v.Args[0] 19985 v_1 := v.Args[1] 19986 if v_1.Op != OpARM64MOVDconst { 19987 break 19988 } 19989 c := v_1.AuxInt 19990 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 19991 break 19992 } 19993 v.reset(OpARM64ADDshiftLL) 19994 v.AuxInt = log2(c - 1) 19995 v.AddArg(x) 19996 v.AddArg(x) 19997 return true 19998 } 19999 // match: (MULW (MOVDconst [c]) x) 20000 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 20001 // result: (ADDshiftLL x x [log2(c-1)]) 20002 for { 20003 _ = v.Args[1] 20004 v_0 := v.Args[0] 20005 if v_0.Op != OpARM64MOVDconst { 20006 break 20007 } 20008 c := v_0.AuxInt 20009 x := v.Args[1] 20010 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 20011 break 20012 } 20013 v.reset(OpARM64ADDshiftLL) 20014 v.AuxInt = log2(c - 1) 20015 v.AddArg(x) 20016 v.AddArg(x) 20017 return true 20018 } 20019 // match: (MULW x (MOVDconst [c])) 20020 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 20021 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 20022 for { 20023 _ = v.Args[1] 20024 x := v.Args[0] 20025 v_1 := v.Args[1] 20026 if v_1.Op != OpARM64MOVDconst { 20027 break 20028 } 20029 c := v_1.AuxInt 20030 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 20031 break 20032 } 20033 v.reset(OpARM64ADDshiftLL) 20034 v.AuxInt = log2(c + 1) 20035 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20036 v0.AddArg(x) 20037 v.AddArg(v0) 20038 v.AddArg(x) 20039 return true 20040 } 20041 // match: (MULW (MOVDconst [c]) x) 20042 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 20043 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 20044 for { 20045 _ = v.Args[1] 20046 v_0 := v.Args[0] 20047 if v_0.Op != OpARM64MOVDconst { 20048 break 20049 } 20050 c := v_0.AuxInt 20051 x := v.Args[1] 20052 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 20053 break 20054 } 20055 v.reset(OpARM64ADDshiftLL) 20056 v.AuxInt = log2(c + 1) 20057 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20058 v0.AddArg(x) 20059 v.AddArg(v0) 20060 v.AddArg(x) 20061 return true 20062 } 20063 // match: (MULW x (MOVDconst [c])) 20064 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 20065 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 20066 for { 20067 _ = v.Args[1] 20068 x := v.Args[0] 20069 v_1 := v.Args[1] 20070 if v_1.Op != OpARM64MOVDconst { 20071 break 20072 } 20073 c := v_1.AuxInt 20074 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 20075 break 20076 } 20077 v.reset(OpARM64SLLconst) 20078 v.AuxInt = log2(c / 3) 20079 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20080 v0.AuxInt = 1 20081 v0.AddArg(x) 20082 v0.AddArg(x) 20083 v.AddArg(v0) 20084 return true 20085 } 20086 // match: (MULW (MOVDconst [c]) x) 20087 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 20088 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 20089 for { 20090 _ = v.Args[1] 20091 v_0 := v.Args[0] 20092 if v_0.Op != OpARM64MOVDconst { 20093 break 20094 } 20095 c := v_0.AuxInt 20096 x := v.Args[1] 20097 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 20098 break 20099 } 20100 v.reset(OpARM64SLLconst) 20101 v.AuxInt = log2(c / 3) 20102 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20103 v0.AuxInt = 1 20104 v0.AddArg(x) 20105 v0.AddArg(x) 20106 v.AddArg(v0) 20107 return true 20108 } 20109 // match: (MULW x (MOVDconst [c])) 20110 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 20111 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 20112 for { 20113 _ = v.Args[1] 20114 x := v.Args[0] 20115 v_1 := v.Args[1] 20116 if v_1.Op != OpARM64MOVDconst { 20117 break 20118 } 20119 c := v_1.AuxInt 20120 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 20121 break 20122 } 20123 v.reset(OpARM64SLLconst) 20124 v.AuxInt = log2(c / 5) 20125 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20126 v0.AuxInt = 2 20127 v0.AddArg(x) 20128 v0.AddArg(x) 20129 v.AddArg(v0) 20130 return true 20131 } 20132 // match: (MULW (MOVDconst [c]) x) 20133 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 20134 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 20135 for { 20136 _ = v.Args[1] 20137 v_0 := v.Args[0] 20138 if v_0.Op != OpARM64MOVDconst { 20139 break 20140 } 20141 c := v_0.AuxInt 20142 x := v.Args[1] 20143 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 20144 break 20145 } 20146 v.reset(OpARM64SLLconst) 20147 v.AuxInt = log2(c / 5) 20148 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20149 v0.AuxInt = 2 20150 v0.AddArg(x) 20151 v0.AddArg(x) 20152 v.AddArg(v0) 20153 return true 20154 } 20155 // match: (MULW x (MOVDconst [c])) 20156 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 20157 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 20158 for { 20159 _ = v.Args[1] 20160 x := v.Args[0] 20161 v_1 := v.Args[1] 20162 if v_1.Op != OpARM64MOVDconst { 20163 break 20164 } 20165 c := v_1.AuxInt 20166 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 20167 break 20168 } 20169 v.reset(OpARM64SLLconst) 20170 v.AuxInt = log2(c / 7) 20171 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20172 v0.AuxInt = 3 20173 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20174 v1.AddArg(x) 20175 v0.AddArg(v1) 20176 v0.AddArg(x) 20177 v.AddArg(v0) 20178 return true 20179 } 20180 // match: (MULW (MOVDconst [c]) x) 20181 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 20182 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 20183 for { 20184 _ = v.Args[1] 20185 v_0 := v.Args[0] 20186 if v_0.Op != OpARM64MOVDconst { 20187 break 20188 } 20189 c := v_0.AuxInt 20190 x := v.Args[1] 20191 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 20192 break 20193 } 20194 v.reset(OpARM64SLLconst) 20195 v.AuxInt = log2(c / 7) 20196 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20197 v0.AuxInt = 3 20198 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20199 v1.AddArg(x) 20200 v0.AddArg(v1) 20201 v0.AddArg(x) 20202 v.AddArg(v0) 20203 return true 20204 } 20205 return false 20206 } 20207 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 20208 b := v.Block 20209 _ = b 20210 // match: (MULW x (MOVDconst [c])) 20211 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 20212 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 20213 for { 20214 _ = v.Args[1] 20215 x := v.Args[0] 20216 v_1 := v.Args[1] 20217 if v_1.Op != OpARM64MOVDconst { 20218 break 20219 } 20220 c := v_1.AuxInt 20221 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 20222 break 20223 } 20224 v.reset(OpARM64SLLconst) 20225 v.AuxInt = log2(c / 9) 20226 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20227 v0.AuxInt = 3 20228 v0.AddArg(x) 20229 v0.AddArg(x) 20230 v.AddArg(v0) 20231 return true 20232 } 20233 // match: (MULW (MOVDconst [c]) x) 20234 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 20235 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 20236 for { 20237 _ = v.Args[1] 20238 v_0 := v.Args[0] 20239 if v_0.Op != OpARM64MOVDconst { 20240 break 20241 } 20242 c := v_0.AuxInt 20243 x := v.Args[1] 20244 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 20245 break 20246 } 20247 v.reset(OpARM64SLLconst) 20248 v.AuxInt = log2(c / 9) 20249 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20250 v0.AuxInt = 3 20251 v0.AddArg(x) 20252 v0.AddArg(x) 20253 v.AddArg(v0) 20254 return true 20255 } 20256 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 20257 // cond: 20258 // result: (MOVDconst [int64(int32(c)*int32(d))]) 20259 for { 20260 _ = v.Args[1] 20261 v_0 := v.Args[0] 20262 if v_0.Op != OpARM64MOVDconst { 20263 break 20264 } 20265 c := v_0.AuxInt 20266 v_1 := v.Args[1] 20267 if v_1.Op != OpARM64MOVDconst { 20268 break 20269 } 20270 d := v_1.AuxInt 20271 v.reset(OpARM64MOVDconst) 20272 v.AuxInt = int64(int32(c) * int32(d)) 20273 return true 20274 } 20275 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 20276 // cond: 20277 // result: (MOVDconst [int64(int32(c)*int32(d))]) 20278 for { 20279 _ = v.Args[1] 20280 v_0 := v.Args[0] 20281 if v_0.Op != OpARM64MOVDconst { 20282 break 20283 } 20284 d := v_0.AuxInt 20285 v_1 := v.Args[1] 20286 if v_1.Op != OpARM64MOVDconst { 20287 break 20288 } 20289 c := v_1.AuxInt 20290 v.reset(OpARM64MOVDconst) 20291 v.AuxInt = int64(int32(c) * int32(d)) 20292 return true 20293 } 20294 return false 20295 } 20296 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 20297 // match: (MVN (MOVDconst [c])) 20298 // cond: 20299 // result: (MOVDconst [^c]) 20300 for { 20301 v_0 := v.Args[0] 20302 if v_0.Op != OpARM64MOVDconst { 20303 break 20304 } 20305 c := v_0.AuxInt 20306 v.reset(OpARM64MOVDconst) 20307 v.AuxInt = ^c 20308 return true 20309 } 20310 // match: (MVN x:(SLLconst [c] y)) 20311 // cond: clobberIfDead(x) 20312 // result: (MVNshiftLL [c] y) 20313 for { 20314 x := v.Args[0] 20315 if x.Op != OpARM64SLLconst { 20316 break 20317 } 20318 c := x.AuxInt 20319 y := x.Args[0] 20320 if !(clobberIfDead(x)) { 20321 break 20322 } 20323 v.reset(OpARM64MVNshiftLL) 20324 v.AuxInt = c 20325 v.AddArg(y) 20326 return true 20327 } 20328 // match: (MVN x:(SRLconst [c] y)) 20329 // cond: clobberIfDead(x) 20330 // result: (MVNshiftRL [c] y) 20331 for { 20332 x := v.Args[0] 20333 if x.Op != OpARM64SRLconst { 20334 break 20335 } 20336 c := x.AuxInt 20337 y := x.Args[0] 20338 if !(clobberIfDead(x)) { 20339 break 20340 } 20341 v.reset(OpARM64MVNshiftRL) 20342 v.AuxInt = c 20343 v.AddArg(y) 20344 return true 20345 } 20346 // match: (MVN x:(SRAconst [c] y)) 20347 // cond: clobberIfDead(x) 20348 // result: (MVNshiftRA [c] y) 20349 for { 20350 x := v.Args[0] 20351 if x.Op != OpARM64SRAconst { 20352 break 20353 } 20354 c := x.AuxInt 20355 y := x.Args[0] 20356 if !(clobberIfDead(x)) { 20357 break 20358 } 20359 v.reset(OpARM64MVNshiftRA) 20360 v.AuxInt = c 20361 v.AddArg(y) 20362 return true 20363 } 20364 return false 20365 } 20366 func rewriteValueARM64_OpARM64MVNshiftLL_0(v *Value) bool { 20367 // match: (MVNshiftLL (MOVDconst [c]) [d]) 20368 // cond: 20369 // result: (MOVDconst [^int64(uint64(c)<<uint64(d))]) 20370 for { 20371 d := v.AuxInt 20372 v_0 := v.Args[0] 20373 if v_0.Op != OpARM64MOVDconst { 20374 break 20375 } 20376 c := v_0.AuxInt 20377 v.reset(OpARM64MOVDconst) 20378 v.AuxInt = ^int64(uint64(c) << uint64(d)) 20379 return true 20380 } 20381 return false 20382 } 20383 func rewriteValueARM64_OpARM64MVNshiftRA_0(v *Value) bool { 20384 // match: (MVNshiftRA (MOVDconst [c]) [d]) 20385 // cond: 20386 // result: (MOVDconst [^(c>>uint64(d))]) 20387 for { 20388 d := v.AuxInt 20389 v_0 := v.Args[0] 20390 if v_0.Op != OpARM64MOVDconst { 20391 break 20392 } 20393 c := v_0.AuxInt 20394 v.reset(OpARM64MOVDconst) 20395 v.AuxInt = ^(c >> uint64(d)) 20396 return true 20397 } 20398 return false 20399 } 20400 func rewriteValueARM64_OpARM64MVNshiftRL_0(v *Value) bool { 20401 // match: (MVNshiftRL (MOVDconst [c]) [d]) 20402 // cond: 20403 // result: (MOVDconst [^int64(uint64(c)>>uint64(d))]) 20404 for { 20405 d := v.AuxInt 20406 v_0 := v.Args[0] 20407 if v_0.Op != OpARM64MOVDconst { 20408 break 20409 } 20410 c := v_0.AuxInt 20411 v.reset(OpARM64MOVDconst) 20412 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 20413 return true 20414 } 20415 return false 20416 } 20417 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 20418 // match: (NEG (MUL x y)) 20419 // cond: 20420 // result: (MNEG x y) 20421 for { 20422 v_0 := v.Args[0] 20423 if v_0.Op != OpARM64MUL { 20424 break 20425 } 20426 _ = v_0.Args[1] 20427 x := v_0.Args[0] 20428 y := v_0.Args[1] 20429 v.reset(OpARM64MNEG) 20430 v.AddArg(x) 20431 v.AddArg(y) 20432 return true 20433 } 20434 // match: (NEG (MULW x y)) 20435 // cond: 20436 // result: (MNEGW x y) 20437 for { 20438 v_0 := v.Args[0] 20439 if v_0.Op != OpARM64MULW { 20440 break 20441 } 20442 _ = v_0.Args[1] 20443 x := v_0.Args[0] 20444 y := v_0.Args[1] 20445 v.reset(OpARM64MNEGW) 20446 v.AddArg(x) 20447 v.AddArg(y) 20448 return true 20449 } 20450 // match: (NEG (MOVDconst [c])) 20451 // cond: 20452 // result: (MOVDconst [-c]) 20453 for { 20454 v_0 := v.Args[0] 20455 if v_0.Op != OpARM64MOVDconst { 20456 break 20457 } 20458 c := v_0.AuxInt 20459 v.reset(OpARM64MOVDconst) 20460 v.AuxInt = -c 20461 return true 20462 } 20463 // match: (NEG x:(SLLconst [c] y)) 20464 // cond: clobberIfDead(x) 20465 // result: (NEGshiftLL [c] y) 20466 for { 20467 x := v.Args[0] 20468 if x.Op != OpARM64SLLconst { 20469 break 20470 } 20471 c := x.AuxInt 20472 y := x.Args[0] 20473 if !(clobberIfDead(x)) { 20474 break 20475 } 20476 v.reset(OpARM64NEGshiftLL) 20477 v.AuxInt = c 20478 v.AddArg(y) 20479 return true 20480 } 20481 // match: (NEG x:(SRLconst [c] y)) 20482 // cond: clobberIfDead(x) 20483 // result: (NEGshiftRL [c] y) 20484 for { 20485 x := v.Args[0] 20486 if x.Op != OpARM64SRLconst { 20487 break 20488 } 20489 c := x.AuxInt 20490 y := x.Args[0] 20491 if !(clobberIfDead(x)) { 20492 break 20493 } 20494 v.reset(OpARM64NEGshiftRL) 20495 v.AuxInt = c 20496 v.AddArg(y) 20497 return true 20498 } 20499 // match: (NEG x:(SRAconst [c] y)) 20500 // cond: clobberIfDead(x) 20501 // result: (NEGshiftRA [c] y) 20502 for { 20503 x := v.Args[0] 20504 if x.Op != OpARM64SRAconst { 20505 break 20506 } 20507 c := x.AuxInt 20508 y := x.Args[0] 20509 if !(clobberIfDead(x)) { 20510 break 20511 } 20512 v.reset(OpARM64NEGshiftRA) 20513 v.AuxInt = c 20514 v.AddArg(y) 20515 return true 20516 } 20517 return false 20518 } 20519 func rewriteValueARM64_OpARM64NEGshiftLL_0(v *Value) bool { 20520 // match: (NEGshiftLL (MOVDconst [c]) [d]) 20521 // cond: 20522 // result: (MOVDconst [-int64(uint64(c)<<uint64(d))]) 20523 for { 20524 d := v.AuxInt 20525 v_0 := v.Args[0] 20526 if v_0.Op != OpARM64MOVDconst { 20527 break 20528 } 20529 c := v_0.AuxInt 20530 v.reset(OpARM64MOVDconst) 20531 v.AuxInt = -int64(uint64(c) << uint64(d)) 20532 return true 20533 } 20534 return false 20535 } 20536 func rewriteValueARM64_OpARM64NEGshiftRA_0(v *Value) bool { 20537 // match: (NEGshiftRA (MOVDconst [c]) [d]) 20538 // cond: 20539 // result: (MOVDconst [-(c>>uint64(d))]) 20540 for { 20541 d := v.AuxInt 20542 v_0 := v.Args[0] 20543 if v_0.Op != OpARM64MOVDconst { 20544 break 20545 } 20546 c := v_0.AuxInt 20547 v.reset(OpARM64MOVDconst) 20548 v.AuxInt = -(c >> uint64(d)) 20549 return true 20550 } 20551 return false 20552 } 20553 func rewriteValueARM64_OpARM64NEGshiftRL_0(v *Value) bool { 20554 // match: (NEGshiftRL (MOVDconst [c]) [d]) 20555 // cond: 20556 // result: (MOVDconst [-int64(uint64(c)>>uint64(d))]) 20557 for { 20558 d := v.AuxInt 20559 v_0 := v.Args[0] 20560 if v_0.Op != OpARM64MOVDconst { 20561 break 20562 } 20563 c := v_0.AuxInt 20564 v.reset(OpARM64MOVDconst) 20565 v.AuxInt = -int64(uint64(c) >> uint64(d)) 20566 return true 20567 } 20568 return false 20569 } 20570 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 20571 // match: (NotEqual (FlagEQ)) 20572 // cond: 20573 // result: (MOVDconst [0]) 20574 for { 20575 v_0 := v.Args[0] 20576 if v_0.Op != OpARM64FlagEQ { 20577 break 20578 } 20579 v.reset(OpARM64MOVDconst) 20580 v.AuxInt = 0 20581 return true 20582 } 20583 // match: (NotEqual (FlagLT_ULT)) 20584 // cond: 20585 // result: (MOVDconst [1]) 20586 for { 20587 v_0 := v.Args[0] 20588 if v_0.Op != OpARM64FlagLT_ULT { 20589 break 20590 } 20591 v.reset(OpARM64MOVDconst) 20592 v.AuxInt = 1 20593 return true 20594 } 20595 // match: (NotEqual (FlagLT_UGT)) 20596 // cond: 20597 // result: (MOVDconst [1]) 20598 for { 20599 v_0 := v.Args[0] 20600 if v_0.Op != OpARM64FlagLT_UGT { 20601 break 20602 } 20603 v.reset(OpARM64MOVDconst) 20604 v.AuxInt = 1 20605 return true 20606 } 20607 // match: (NotEqual (FlagGT_ULT)) 20608 // cond: 20609 // result: (MOVDconst [1]) 20610 for { 20611 v_0 := v.Args[0] 20612 if v_0.Op != OpARM64FlagGT_ULT { 20613 break 20614 } 20615 v.reset(OpARM64MOVDconst) 20616 v.AuxInt = 1 20617 return true 20618 } 20619 // match: (NotEqual (FlagGT_UGT)) 20620 // cond: 20621 // result: (MOVDconst [1]) 20622 for { 20623 v_0 := v.Args[0] 20624 if v_0.Op != OpARM64FlagGT_UGT { 20625 break 20626 } 20627 v.reset(OpARM64MOVDconst) 20628 v.AuxInt = 1 20629 return true 20630 } 20631 // match: (NotEqual (InvertFlags x)) 20632 // cond: 20633 // result: (NotEqual x) 20634 for { 20635 v_0 := v.Args[0] 20636 if v_0.Op != OpARM64InvertFlags { 20637 break 20638 } 20639 x := v_0.Args[0] 20640 v.reset(OpARM64NotEqual) 20641 v.AddArg(x) 20642 return true 20643 } 20644 return false 20645 } 20646 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 20647 // match: (OR x (MOVDconst [c])) 20648 // cond: 20649 // result: (ORconst [c] x) 20650 for { 20651 _ = v.Args[1] 20652 x := v.Args[0] 20653 v_1 := v.Args[1] 20654 if v_1.Op != OpARM64MOVDconst { 20655 break 20656 } 20657 c := v_1.AuxInt 20658 v.reset(OpARM64ORconst) 20659 v.AuxInt = c 20660 v.AddArg(x) 20661 return true 20662 } 20663 // match: (OR (MOVDconst [c]) x) 20664 // cond: 20665 // result: (ORconst [c] x) 20666 for { 20667 _ = v.Args[1] 20668 v_0 := v.Args[0] 20669 if v_0.Op != OpARM64MOVDconst { 20670 break 20671 } 20672 c := v_0.AuxInt 20673 x := v.Args[1] 20674 v.reset(OpARM64ORconst) 20675 v.AuxInt = c 20676 v.AddArg(x) 20677 return true 20678 } 20679 // match: (OR x x) 20680 // cond: 20681 // result: x 20682 for { 20683 _ = v.Args[1] 20684 x := v.Args[0] 20685 if x != v.Args[1] { 20686 break 20687 } 20688 v.reset(OpCopy) 20689 v.Type = x.Type 20690 v.AddArg(x) 20691 return true 20692 } 20693 // match: (OR x (MVN y)) 20694 // cond: 20695 // result: (ORN x y) 20696 for { 20697 _ = v.Args[1] 20698 x := v.Args[0] 20699 v_1 := v.Args[1] 20700 if v_1.Op != OpARM64MVN { 20701 break 20702 } 20703 y := v_1.Args[0] 20704 v.reset(OpARM64ORN) 20705 v.AddArg(x) 20706 v.AddArg(y) 20707 return true 20708 } 20709 // match: (OR (MVN y) x) 20710 // cond: 20711 // result: (ORN x y) 20712 for { 20713 _ = v.Args[1] 20714 v_0 := v.Args[0] 20715 if v_0.Op != OpARM64MVN { 20716 break 20717 } 20718 y := v_0.Args[0] 20719 x := v.Args[1] 20720 v.reset(OpARM64ORN) 20721 v.AddArg(x) 20722 v.AddArg(y) 20723 return true 20724 } 20725 // match: (OR x0 x1:(SLLconst [c] y)) 20726 // cond: clobberIfDead(x1) 20727 // result: (ORshiftLL x0 y [c]) 20728 for { 20729 _ = v.Args[1] 20730 x0 := v.Args[0] 20731 x1 := v.Args[1] 20732 if x1.Op != OpARM64SLLconst { 20733 break 20734 } 20735 c := x1.AuxInt 20736 y := x1.Args[0] 20737 if !(clobberIfDead(x1)) { 20738 break 20739 } 20740 v.reset(OpARM64ORshiftLL) 20741 v.AuxInt = c 20742 v.AddArg(x0) 20743 v.AddArg(y) 20744 return true 20745 } 20746 // match: (OR x1:(SLLconst [c] y) x0) 20747 // cond: clobberIfDead(x1) 20748 // result: (ORshiftLL x0 y [c]) 20749 for { 20750 _ = v.Args[1] 20751 x1 := v.Args[0] 20752 if x1.Op != OpARM64SLLconst { 20753 break 20754 } 20755 c := x1.AuxInt 20756 y := x1.Args[0] 20757 x0 := v.Args[1] 20758 if !(clobberIfDead(x1)) { 20759 break 20760 } 20761 v.reset(OpARM64ORshiftLL) 20762 v.AuxInt = c 20763 v.AddArg(x0) 20764 v.AddArg(y) 20765 return true 20766 } 20767 // match: (OR x0 x1:(SRLconst [c] y)) 20768 // cond: clobberIfDead(x1) 20769 // result: (ORshiftRL x0 y [c]) 20770 for { 20771 _ = v.Args[1] 20772 x0 := v.Args[0] 20773 x1 := v.Args[1] 20774 if x1.Op != OpARM64SRLconst { 20775 break 20776 } 20777 c := x1.AuxInt 20778 y := x1.Args[0] 20779 if !(clobberIfDead(x1)) { 20780 break 20781 } 20782 v.reset(OpARM64ORshiftRL) 20783 v.AuxInt = c 20784 v.AddArg(x0) 20785 v.AddArg(y) 20786 return true 20787 } 20788 // match: (OR x1:(SRLconst [c] y) x0) 20789 // cond: clobberIfDead(x1) 20790 // result: (ORshiftRL x0 y [c]) 20791 for { 20792 _ = v.Args[1] 20793 x1 := v.Args[0] 20794 if x1.Op != OpARM64SRLconst { 20795 break 20796 } 20797 c := x1.AuxInt 20798 y := x1.Args[0] 20799 x0 := v.Args[1] 20800 if !(clobberIfDead(x1)) { 20801 break 20802 } 20803 v.reset(OpARM64ORshiftRL) 20804 v.AuxInt = c 20805 v.AddArg(x0) 20806 v.AddArg(y) 20807 return true 20808 } 20809 // match: (OR x0 x1:(SRAconst [c] y)) 20810 // cond: clobberIfDead(x1) 20811 // result: (ORshiftRA x0 y [c]) 20812 for { 20813 _ = v.Args[1] 20814 x0 := v.Args[0] 20815 x1 := v.Args[1] 20816 if x1.Op != OpARM64SRAconst { 20817 break 20818 } 20819 c := x1.AuxInt 20820 y := x1.Args[0] 20821 if !(clobberIfDead(x1)) { 20822 break 20823 } 20824 v.reset(OpARM64ORshiftRA) 20825 v.AuxInt = c 20826 v.AddArg(x0) 20827 v.AddArg(y) 20828 return true 20829 } 20830 return false 20831 } 20832 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 20833 b := v.Block 20834 _ = b 20835 typ := &b.Func.Config.Types 20836 _ = typ 20837 // match: (OR x1:(SRAconst [c] y) x0) 20838 // cond: clobberIfDead(x1) 20839 // result: (ORshiftRA x0 y [c]) 20840 for { 20841 _ = v.Args[1] 20842 x1 := v.Args[0] 20843 if x1.Op != OpARM64SRAconst { 20844 break 20845 } 20846 c := x1.AuxInt 20847 y := x1.Args[0] 20848 x0 := v.Args[1] 20849 if !(clobberIfDead(x1)) { 20850 break 20851 } 20852 v.reset(OpARM64ORshiftRA) 20853 v.AuxInt = c 20854 v.AddArg(x0) 20855 v.AddArg(y) 20856 return true 20857 } 20858 // match: (OR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 20859 // cond: cc.(Op) == OpARM64LessThanU 20860 // result: (ROR x (NEG <t> y)) 20861 for { 20862 _ = v.Args[1] 20863 v_0 := v.Args[0] 20864 if v_0.Op != OpARM64SLL { 20865 break 20866 } 20867 _ = v_0.Args[1] 20868 x := v_0.Args[0] 20869 v_0_1 := v_0.Args[1] 20870 if v_0_1.Op != OpARM64ANDconst { 20871 break 20872 } 20873 t := v_0_1.Type 20874 if v_0_1.AuxInt != 63 { 20875 break 20876 } 20877 y := v_0_1.Args[0] 20878 v_1 := v.Args[1] 20879 if v_1.Op != OpARM64CSEL0 { 20880 break 20881 } 20882 if v_1.Type != typ.UInt64 { 20883 break 20884 } 20885 cc := v_1.Aux 20886 _ = v_1.Args[1] 20887 v_1_0 := v_1.Args[0] 20888 if v_1_0.Op != OpARM64SRL { 20889 break 20890 } 20891 if v_1_0.Type != typ.UInt64 { 20892 break 20893 } 20894 _ = v_1_0.Args[1] 20895 if x != v_1_0.Args[0] { 20896 break 20897 } 20898 v_1_0_1 := v_1_0.Args[1] 20899 if v_1_0_1.Op != OpARM64SUB { 20900 break 20901 } 20902 if v_1_0_1.Type != t { 20903 break 20904 } 20905 _ = v_1_0_1.Args[1] 20906 v_1_0_1_0 := v_1_0_1.Args[0] 20907 if v_1_0_1_0.Op != OpARM64MOVDconst { 20908 break 20909 } 20910 if v_1_0_1_0.AuxInt != 64 { 20911 break 20912 } 20913 v_1_0_1_1 := v_1_0_1.Args[1] 20914 if v_1_0_1_1.Op != OpARM64ANDconst { 20915 break 20916 } 20917 if v_1_0_1_1.Type != t { 20918 break 20919 } 20920 if v_1_0_1_1.AuxInt != 63 { 20921 break 20922 } 20923 if y != v_1_0_1_1.Args[0] { 20924 break 20925 } 20926 v_1_1 := v_1.Args[1] 20927 if v_1_1.Op != OpARM64CMPconst { 20928 break 20929 } 20930 if v_1_1.AuxInt != 64 { 20931 break 20932 } 20933 v_1_1_0 := v_1_1.Args[0] 20934 if v_1_1_0.Op != OpARM64SUB { 20935 break 20936 } 20937 if v_1_1_0.Type != t { 20938 break 20939 } 20940 _ = v_1_1_0.Args[1] 20941 v_1_1_0_0 := v_1_1_0.Args[0] 20942 if v_1_1_0_0.Op != OpARM64MOVDconst { 20943 break 20944 } 20945 if v_1_1_0_0.AuxInt != 64 { 20946 break 20947 } 20948 v_1_1_0_1 := v_1_1_0.Args[1] 20949 if v_1_1_0_1.Op != OpARM64ANDconst { 20950 break 20951 } 20952 if v_1_1_0_1.Type != t { 20953 break 20954 } 20955 if v_1_1_0_1.AuxInt != 63 { 20956 break 20957 } 20958 if y != v_1_1_0_1.Args[0] { 20959 break 20960 } 20961 if !(cc.(Op) == OpARM64LessThanU) { 20962 break 20963 } 20964 v.reset(OpARM64ROR) 20965 v.AddArg(x) 20966 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 20967 v0.AddArg(y) 20968 v.AddArg(v0) 20969 return true 20970 } 20971 // match: (OR (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 20972 // cond: cc.(Op) == OpARM64LessThanU 20973 // result: (ROR x (NEG <t> y)) 20974 for { 20975 _ = v.Args[1] 20976 v_0 := v.Args[0] 20977 if v_0.Op != OpARM64CSEL0 { 20978 break 20979 } 20980 if v_0.Type != typ.UInt64 { 20981 break 20982 } 20983 cc := v_0.Aux 20984 _ = v_0.Args[1] 20985 v_0_0 := v_0.Args[0] 20986 if v_0_0.Op != OpARM64SRL { 20987 break 20988 } 20989 if v_0_0.Type != typ.UInt64 { 20990 break 20991 } 20992 _ = v_0_0.Args[1] 20993 x := v_0_0.Args[0] 20994 v_0_0_1 := v_0_0.Args[1] 20995 if v_0_0_1.Op != OpARM64SUB { 20996 break 20997 } 20998 t := v_0_0_1.Type 20999 _ = v_0_0_1.Args[1] 21000 v_0_0_1_0 := v_0_0_1.Args[0] 21001 if v_0_0_1_0.Op != OpARM64MOVDconst { 21002 break 21003 } 21004 if v_0_0_1_0.AuxInt != 64 { 21005 break 21006 } 21007 v_0_0_1_1 := v_0_0_1.Args[1] 21008 if v_0_0_1_1.Op != OpARM64ANDconst { 21009 break 21010 } 21011 if v_0_0_1_1.Type != t { 21012 break 21013 } 21014 if v_0_0_1_1.AuxInt != 63 { 21015 break 21016 } 21017 y := v_0_0_1_1.Args[0] 21018 v_0_1 := v_0.Args[1] 21019 if v_0_1.Op != OpARM64CMPconst { 21020 break 21021 } 21022 if v_0_1.AuxInt != 64 { 21023 break 21024 } 21025 v_0_1_0 := v_0_1.Args[0] 21026 if v_0_1_0.Op != OpARM64SUB { 21027 break 21028 } 21029 if v_0_1_0.Type != t { 21030 break 21031 } 21032 _ = v_0_1_0.Args[1] 21033 v_0_1_0_0 := v_0_1_0.Args[0] 21034 if v_0_1_0_0.Op != OpARM64MOVDconst { 21035 break 21036 } 21037 if v_0_1_0_0.AuxInt != 64 { 21038 break 21039 } 21040 v_0_1_0_1 := v_0_1_0.Args[1] 21041 if v_0_1_0_1.Op != OpARM64ANDconst { 21042 break 21043 } 21044 if v_0_1_0_1.Type != t { 21045 break 21046 } 21047 if v_0_1_0_1.AuxInt != 63 { 21048 break 21049 } 21050 if y != v_0_1_0_1.Args[0] { 21051 break 21052 } 21053 v_1 := v.Args[1] 21054 if v_1.Op != OpARM64SLL { 21055 break 21056 } 21057 _ = v_1.Args[1] 21058 if x != v_1.Args[0] { 21059 break 21060 } 21061 v_1_1 := v_1.Args[1] 21062 if v_1_1.Op != OpARM64ANDconst { 21063 break 21064 } 21065 if v_1_1.Type != t { 21066 break 21067 } 21068 if v_1_1.AuxInt != 63 { 21069 break 21070 } 21071 if y != v_1_1.Args[0] { 21072 break 21073 } 21074 if !(cc.(Op) == OpARM64LessThanU) { 21075 break 21076 } 21077 v.reset(OpARM64ROR) 21078 v.AddArg(x) 21079 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 21080 v0.AddArg(y) 21081 v.AddArg(v0) 21082 return true 21083 } 21084 // match: (OR (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 21085 // cond: cc.(Op) == OpARM64LessThanU 21086 // result: (ROR x y) 21087 for { 21088 _ = v.Args[1] 21089 v_0 := v.Args[0] 21090 if v_0.Op != OpARM64SRL { 21091 break 21092 } 21093 if v_0.Type != typ.UInt64 { 21094 break 21095 } 21096 _ = v_0.Args[1] 21097 x := v_0.Args[0] 21098 v_0_1 := v_0.Args[1] 21099 if v_0_1.Op != OpARM64ANDconst { 21100 break 21101 } 21102 t := v_0_1.Type 21103 if v_0_1.AuxInt != 63 { 21104 break 21105 } 21106 y := v_0_1.Args[0] 21107 v_1 := v.Args[1] 21108 if v_1.Op != OpARM64CSEL0 { 21109 break 21110 } 21111 if v_1.Type != typ.UInt64 { 21112 break 21113 } 21114 cc := v_1.Aux 21115 _ = v_1.Args[1] 21116 v_1_0 := v_1.Args[0] 21117 if v_1_0.Op != OpARM64SLL { 21118 break 21119 } 21120 _ = v_1_0.Args[1] 21121 if x != v_1_0.Args[0] { 21122 break 21123 } 21124 v_1_0_1 := v_1_0.Args[1] 21125 if v_1_0_1.Op != OpARM64SUB { 21126 break 21127 } 21128 if v_1_0_1.Type != t { 21129 break 21130 } 21131 _ = v_1_0_1.Args[1] 21132 v_1_0_1_0 := v_1_0_1.Args[0] 21133 if v_1_0_1_0.Op != OpARM64MOVDconst { 21134 break 21135 } 21136 if v_1_0_1_0.AuxInt != 64 { 21137 break 21138 } 21139 v_1_0_1_1 := v_1_0_1.Args[1] 21140 if v_1_0_1_1.Op != OpARM64ANDconst { 21141 break 21142 } 21143 if v_1_0_1_1.Type != t { 21144 break 21145 } 21146 if v_1_0_1_1.AuxInt != 63 { 21147 break 21148 } 21149 if y != v_1_0_1_1.Args[0] { 21150 break 21151 } 21152 v_1_1 := v_1.Args[1] 21153 if v_1_1.Op != OpARM64CMPconst { 21154 break 21155 } 21156 if v_1_1.AuxInt != 64 { 21157 break 21158 } 21159 v_1_1_0 := v_1_1.Args[0] 21160 if v_1_1_0.Op != OpARM64SUB { 21161 break 21162 } 21163 if v_1_1_0.Type != t { 21164 break 21165 } 21166 _ = v_1_1_0.Args[1] 21167 v_1_1_0_0 := v_1_1_0.Args[0] 21168 if v_1_1_0_0.Op != OpARM64MOVDconst { 21169 break 21170 } 21171 if v_1_1_0_0.AuxInt != 64 { 21172 break 21173 } 21174 v_1_1_0_1 := v_1_1_0.Args[1] 21175 if v_1_1_0_1.Op != OpARM64ANDconst { 21176 break 21177 } 21178 if v_1_1_0_1.Type != t { 21179 break 21180 } 21181 if v_1_1_0_1.AuxInt != 63 { 21182 break 21183 } 21184 if y != v_1_1_0_1.Args[0] { 21185 break 21186 } 21187 if !(cc.(Op) == OpARM64LessThanU) { 21188 break 21189 } 21190 v.reset(OpARM64ROR) 21191 v.AddArg(x) 21192 v.AddArg(y) 21193 return true 21194 } 21195 // match: (OR (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 21196 // cond: cc.(Op) == OpARM64LessThanU 21197 // result: (ROR x y) 21198 for { 21199 _ = v.Args[1] 21200 v_0 := v.Args[0] 21201 if v_0.Op != OpARM64CSEL0 { 21202 break 21203 } 21204 if v_0.Type != typ.UInt64 { 21205 break 21206 } 21207 cc := v_0.Aux 21208 _ = v_0.Args[1] 21209 v_0_0 := v_0.Args[0] 21210 if v_0_0.Op != OpARM64SLL { 21211 break 21212 } 21213 _ = v_0_0.Args[1] 21214 x := v_0_0.Args[0] 21215 v_0_0_1 := v_0_0.Args[1] 21216 if v_0_0_1.Op != OpARM64SUB { 21217 break 21218 } 21219 t := v_0_0_1.Type 21220 _ = v_0_0_1.Args[1] 21221 v_0_0_1_0 := v_0_0_1.Args[0] 21222 if v_0_0_1_0.Op != OpARM64MOVDconst { 21223 break 21224 } 21225 if v_0_0_1_0.AuxInt != 64 { 21226 break 21227 } 21228 v_0_0_1_1 := v_0_0_1.Args[1] 21229 if v_0_0_1_1.Op != OpARM64ANDconst { 21230 break 21231 } 21232 if v_0_0_1_1.Type != t { 21233 break 21234 } 21235 if v_0_0_1_1.AuxInt != 63 { 21236 break 21237 } 21238 y := v_0_0_1_1.Args[0] 21239 v_0_1 := v_0.Args[1] 21240 if v_0_1.Op != OpARM64CMPconst { 21241 break 21242 } 21243 if v_0_1.AuxInt != 64 { 21244 break 21245 } 21246 v_0_1_0 := v_0_1.Args[0] 21247 if v_0_1_0.Op != OpARM64SUB { 21248 break 21249 } 21250 if v_0_1_0.Type != t { 21251 break 21252 } 21253 _ = v_0_1_0.Args[1] 21254 v_0_1_0_0 := v_0_1_0.Args[0] 21255 if v_0_1_0_0.Op != OpARM64MOVDconst { 21256 break 21257 } 21258 if v_0_1_0_0.AuxInt != 64 { 21259 break 21260 } 21261 v_0_1_0_1 := v_0_1_0.Args[1] 21262 if v_0_1_0_1.Op != OpARM64ANDconst { 21263 break 21264 } 21265 if v_0_1_0_1.Type != t { 21266 break 21267 } 21268 if v_0_1_0_1.AuxInt != 63 { 21269 break 21270 } 21271 if y != v_0_1_0_1.Args[0] { 21272 break 21273 } 21274 v_1 := v.Args[1] 21275 if v_1.Op != OpARM64SRL { 21276 break 21277 } 21278 if v_1.Type != typ.UInt64 { 21279 break 21280 } 21281 _ = v_1.Args[1] 21282 if x != v_1.Args[0] { 21283 break 21284 } 21285 v_1_1 := v_1.Args[1] 21286 if v_1_1.Op != OpARM64ANDconst { 21287 break 21288 } 21289 if v_1_1.Type != t { 21290 break 21291 } 21292 if v_1_1.AuxInt != 63 { 21293 break 21294 } 21295 if y != v_1_1.Args[0] { 21296 break 21297 } 21298 if !(cc.(Op) == OpARM64LessThanU) { 21299 break 21300 } 21301 v.reset(OpARM64ROR) 21302 v.AddArg(x) 21303 v.AddArg(y) 21304 return true 21305 } 21306 // match: (OR (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 21307 // cond: cc.(Op) == OpARM64LessThanU 21308 // result: (RORW x (NEG <t> y)) 21309 for { 21310 _ = v.Args[1] 21311 v_0 := v.Args[0] 21312 if v_0.Op != OpARM64SLL { 21313 break 21314 } 21315 _ = v_0.Args[1] 21316 x := v_0.Args[0] 21317 v_0_1 := v_0.Args[1] 21318 if v_0_1.Op != OpARM64ANDconst { 21319 break 21320 } 21321 t := v_0_1.Type 21322 if v_0_1.AuxInt != 31 { 21323 break 21324 } 21325 y := v_0_1.Args[0] 21326 v_1 := v.Args[1] 21327 if v_1.Op != OpARM64CSEL0 { 21328 break 21329 } 21330 if v_1.Type != typ.UInt32 { 21331 break 21332 } 21333 cc := v_1.Aux 21334 _ = v_1.Args[1] 21335 v_1_0 := v_1.Args[0] 21336 if v_1_0.Op != OpARM64SRL { 21337 break 21338 } 21339 if v_1_0.Type != typ.UInt32 { 21340 break 21341 } 21342 _ = v_1_0.Args[1] 21343 v_1_0_0 := v_1_0.Args[0] 21344 if v_1_0_0.Op != OpARM64MOVWUreg { 21345 break 21346 } 21347 if x != v_1_0_0.Args[0] { 21348 break 21349 } 21350 v_1_0_1 := v_1_0.Args[1] 21351 if v_1_0_1.Op != OpARM64SUB { 21352 break 21353 } 21354 if v_1_0_1.Type != t { 21355 break 21356 } 21357 _ = v_1_0_1.Args[1] 21358 v_1_0_1_0 := v_1_0_1.Args[0] 21359 if v_1_0_1_0.Op != OpARM64MOVDconst { 21360 break 21361 } 21362 if v_1_0_1_0.AuxInt != 32 { 21363 break 21364 } 21365 v_1_0_1_1 := v_1_0_1.Args[1] 21366 if v_1_0_1_1.Op != OpARM64ANDconst { 21367 break 21368 } 21369 if v_1_0_1_1.Type != t { 21370 break 21371 } 21372 if v_1_0_1_1.AuxInt != 31 { 21373 break 21374 } 21375 if y != v_1_0_1_1.Args[0] { 21376 break 21377 } 21378 v_1_1 := v_1.Args[1] 21379 if v_1_1.Op != OpARM64CMPconst { 21380 break 21381 } 21382 if v_1_1.AuxInt != 64 { 21383 break 21384 } 21385 v_1_1_0 := v_1_1.Args[0] 21386 if v_1_1_0.Op != OpARM64SUB { 21387 break 21388 } 21389 if v_1_1_0.Type != t { 21390 break 21391 } 21392 _ = v_1_1_0.Args[1] 21393 v_1_1_0_0 := v_1_1_0.Args[0] 21394 if v_1_1_0_0.Op != OpARM64MOVDconst { 21395 break 21396 } 21397 if v_1_1_0_0.AuxInt != 32 { 21398 break 21399 } 21400 v_1_1_0_1 := v_1_1_0.Args[1] 21401 if v_1_1_0_1.Op != OpARM64ANDconst { 21402 break 21403 } 21404 if v_1_1_0_1.Type != t { 21405 break 21406 } 21407 if v_1_1_0_1.AuxInt != 31 { 21408 break 21409 } 21410 if y != v_1_1_0_1.Args[0] { 21411 break 21412 } 21413 if !(cc.(Op) == OpARM64LessThanU) { 21414 break 21415 } 21416 v.reset(OpARM64RORW) 21417 v.AddArg(x) 21418 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 21419 v0.AddArg(y) 21420 v.AddArg(v0) 21421 return true 21422 } 21423 // match: (OR (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 21424 // cond: cc.(Op) == OpARM64LessThanU 21425 // result: (RORW x (NEG <t> y)) 21426 for { 21427 _ = v.Args[1] 21428 v_0 := v.Args[0] 21429 if v_0.Op != OpARM64CSEL0 { 21430 break 21431 } 21432 if v_0.Type != typ.UInt32 { 21433 break 21434 } 21435 cc := v_0.Aux 21436 _ = v_0.Args[1] 21437 v_0_0 := v_0.Args[0] 21438 if v_0_0.Op != OpARM64SRL { 21439 break 21440 } 21441 if v_0_0.Type != typ.UInt32 { 21442 break 21443 } 21444 _ = v_0_0.Args[1] 21445 v_0_0_0 := v_0_0.Args[0] 21446 if v_0_0_0.Op != OpARM64MOVWUreg { 21447 break 21448 } 21449 x := v_0_0_0.Args[0] 21450 v_0_0_1 := v_0_0.Args[1] 21451 if v_0_0_1.Op != OpARM64SUB { 21452 break 21453 } 21454 t := v_0_0_1.Type 21455 _ = v_0_0_1.Args[1] 21456 v_0_0_1_0 := v_0_0_1.Args[0] 21457 if v_0_0_1_0.Op != OpARM64MOVDconst { 21458 break 21459 } 21460 if v_0_0_1_0.AuxInt != 32 { 21461 break 21462 } 21463 v_0_0_1_1 := v_0_0_1.Args[1] 21464 if v_0_0_1_1.Op != OpARM64ANDconst { 21465 break 21466 } 21467 if v_0_0_1_1.Type != t { 21468 break 21469 } 21470 if v_0_0_1_1.AuxInt != 31 { 21471 break 21472 } 21473 y := v_0_0_1_1.Args[0] 21474 v_0_1 := v_0.Args[1] 21475 if v_0_1.Op != OpARM64CMPconst { 21476 break 21477 } 21478 if v_0_1.AuxInt != 64 { 21479 break 21480 } 21481 v_0_1_0 := v_0_1.Args[0] 21482 if v_0_1_0.Op != OpARM64SUB { 21483 break 21484 } 21485 if v_0_1_0.Type != t { 21486 break 21487 } 21488 _ = v_0_1_0.Args[1] 21489 v_0_1_0_0 := v_0_1_0.Args[0] 21490 if v_0_1_0_0.Op != OpARM64MOVDconst { 21491 break 21492 } 21493 if v_0_1_0_0.AuxInt != 32 { 21494 break 21495 } 21496 v_0_1_0_1 := v_0_1_0.Args[1] 21497 if v_0_1_0_1.Op != OpARM64ANDconst { 21498 break 21499 } 21500 if v_0_1_0_1.Type != t { 21501 break 21502 } 21503 if v_0_1_0_1.AuxInt != 31 { 21504 break 21505 } 21506 if y != v_0_1_0_1.Args[0] { 21507 break 21508 } 21509 v_1 := v.Args[1] 21510 if v_1.Op != OpARM64SLL { 21511 break 21512 } 21513 _ = v_1.Args[1] 21514 if x != v_1.Args[0] { 21515 break 21516 } 21517 v_1_1 := v_1.Args[1] 21518 if v_1_1.Op != OpARM64ANDconst { 21519 break 21520 } 21521 if v_1_1.Type != t { 21522 break 21523 } 21524 if v_1_1.AuxInt != 31 { 21525 break 21526 } 21527 if y != v_1_1.Args[0] { 21528 break 21529 } 21530 if !(cc.(Op) == OpARM64LessThanU) { 21531 break 21532 } 21533 v.reset(OpARM64RORW) 21534 v.AddArg(x) 21535 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 21536 v0.AddArg(y) 21537 v.AddArg(v0) 21538 return true 21539 } 21540 // match: (OR (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 21541 // cond: cc.(Op) == OpARM64LessThanU 21542 // result: (RORW x y) 21543 for { 21544 _ = v.Args[1] 21545 v_0 := v.Args[0] 21546 if v_0.Op != OpARM64SRL { 21547 break 21548 } 21549 if v_0.Type != typ.UInt32 { 21550 break 21551 } 21552 _ = v_0.Args[1] 21553 v_0_0 := v_0.Args[0] 21554 if v_0_0.Op != OpARM64MOVWUreg { 21555 break 21556 } 21557 x := v_0_0.Args[0] 21558 v_0_1 := v_0.Args[1] 21559 if v_0_1.Op != OpARM64ANDconst { 21560 break 21561 } 21562 t := v_0_1.Type 21563 if v_0_1.AuxInt != 31 { 21564 break 21565 } 21566 y := v_0_1.Args[0] 21567 v_1 := v.Args[1] 21568 if v_1.Op != OpARM64CSEL0 { 21569 break 21570 } 21571 if v_1.Type != typ.UInt32 { 21572 break 21573 } 21574 cc := v_1.Aux 21575 _ = v_1.Args[1] 21576 v_1_0 := v_1.Args[0] 21577 if v_1_0.Op != OpARM64SLL { 21578 break 21579 } 21580 _ = v_1_0.Args[1] 21581 if x != v_1_0.Args[0] { 21582 break 21583 } 21584 v_1_0_1 := v_1_0.Args[1] 21585 if v_1_0_1.Op != OpARM64SUB { 21586 break 21587 } 21588 if v_1_0_1.Type != t { 21589 break 21590 } 21591 _ = v_1_0_1.Args[1] 21592 v_1_0_1_0 := v_1_0_1.Args[0] 21593 if v_1_0_1_0.Op != OpARM64MOVDconst { 21594 break 21595 } 21596 if v_1_0_1_0.AuxInt != 32 { 21597 break 21598 } 21599 v_1_0_1_1 := v_1_0_1.Args[1] 21600 if v_1_0_1_1.Op != OpARM64ANDconst { 21601 break 21602 } 21603 if v_1_0_1_1.Type != t { 21604 break 21605 } 21606 if v_1_0_1_1.AuxInt != 31 { 21607 break 21608 } 21609 if y != v_1_0_1_1.Args[0] { 21610 break 21611 } 21612 v_1_1 := v_1.Args[1] 21613 if v_1_1.Op != OpARM64CMPconst { 21614 break 21615 } 21616 if v_1_1.AuxInt != 64 { 21617 break 21618 } 21619 v_1_1_0 := v_1_1.Args[0] 21620 if v_1_1_0.Op != OpARM64SUB { 21621 break 21622 } 21623 if v_1_1_0.Type != t { 21624 break 21625 } 21626 _ = v_1_1_0.Args[1] 21627 v_1_1_0_0 := v_1_1_0.Args[0] 21628 if v_1_1_0_0.Op != OpARM64MOVDconst { 21629 break 21630 } 21631 if v_1_1_0_0.AuxInt != 32 { 21632 break 21633 } 21634 v_1_1_0_1 := v_1_1_0.Args[1] 21635 if v_1_1_0_1.Op != OpARM64ANDconst { 21636 break 21637 } 21638 if v_1_1_0_1.Type != t { 21639 break 21640 } 21641 if v_1_1_0_1.AuxInt != 31 { 21642 break 21643 } 21644 if y != v_1_1_0_1.Args[0] { 21645 break 21646 } 21647 if !(cc.(Op) == OpARM64LessThanU) { 21648 break 21649 } 21650 v.reset(OpARM64RORW) 21651 v.AddArg(x) 21652 v.AddArg(y) 21653 return true 21654 } 21655 // match: (OR (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 21656 // cond: cc.(Op) == OpARM64LessThanU 21657 // result: (RORW x y) 21658 for { 21659 _ = v.Args[1] 21660 v_0 := v.Args[0] 21661 if v_0.Op != OpARM64CSEL0 { 21662 break 21663 } 21664 if v_0.Type != typ.UInt32 { 21665 break 21666 } 21667 cc := v_0.Aux 21668 _ = v_0.Args[1] 21669 v_0_0 := v_0.Args[0] 21670 if v_0_0.Op != OpARM64SLL { 21671 break 21672 } 21673 _ = v_0_0.Args[1] 21674 x := v_0_0.Args[0] 21675 v_0_0_1 := v_0_0.Args[1] 21676 if v_0_0_1.Op != OpARM64SUB { 21677 break 21678 } 21679 t := v_0_0_1.Type 21680 _ = v_0_0_1.Args[1] 21681 v_0_0_1_0 := v_0_0_1.Args[0] 21682 if v_0_0_1_0.Op != OpARM64MOVDconst { 21683 break 21684 } 21685 if v_0_0_1_0.AuxInt != 32 { 21686 break 21687 } 21688 v_0_0_1_1 := v_0_0_1.Args[1] 21689 if v_0_0_1_1.Op != OpARM64ANDconst { 21690 break 21691 } 21692 if v_0_0_1_1.Type != t { 21693 break 21694 } 21695 if v_0_0_1_1.AuxInt != 31 { 21696 break 21697 } 21698 y := v_0_0_1_1.Args[0] 21699 v_0_1 := v_0.Args[1] 21700 if v_0_1.Op != OpARM64CMPconst { 21701 break 21702 } 21703 if v_0_1.AuxInt != 64 { 21704 break 21705 } 21706 v_0_1_0 := v_0_1.Args[0] 21707 if v_0_1_0.Op != OpARM64SUB { 21708 break 21709 } 21710 if v_0_1_0.Type != t { 21711 break 21712 } 21713 _ = v_0_1_0.Args[1] 21714 v_0_1_0_0 := v_0_1_0.Args[0] 21715 if v_0_1_0_0.Op != OpARM64MOVDconst { 21716 break 21717 } 21718 if v_0_1_0_0.AuxInt != 32 { 21719 break 21720 } 21721 v_0_1_0_1 := v_0_1_0.Args[1] 21722 if v_0_1_0_1.Op != OpARM64ANDconst { 21723 break 21724 } 21725 if v_0_1_0_1.Type != t { 21726 break 21727 } 21728 if v_0_1_0_1.AuxInt != 31 { 21729 break 21730 } 21731 if y != v_0_1_0_1.Args[0] { 21732 break 21733 } 21734 v_1 := v.Args[1] 21735 if v_1.Op != OpARM64SRL { 21736 break 21737 } 21738 if v_1.Type != typ.UInt32 { 21739 break 21740 } 21741 _ = v_1.Args[1] 21742 v_1_0 := v_1.Args[0] 21743 if v_1_0.Op != OpARM64MOVWUreg { 21744 break 21745 } 21746 if x != v_1_0.Args[0] { 21747 break 21748 } 21749 v_1_1 := v_1.Args[1] 21750 if v_1_1.Op != OpARM64ANDconst { 21751 break 21752 } 21753 if v_1_1.Type != t { 21754 break 21755 } 21756 if v_1_1.AuxInt != 31 { 21757 break 21758 } 21759 if y != v_1_1.Args[0] { 21760 break 21761 } 21762 if !(cc.(Op) == OpARM64LessThanU) { 21763 break 21764 } 21765 v.reset(OpARM64RORW) 21766 v.AddArg(x) 21767 v.AddArg(y) 21768 return true 21769 } 21770 // match: (OR (UBFIZ [bfc] x) (ANDconst [ac] y)) 21771 // cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc))) 21772 // result: (BFI [bfc] y x) 21773 for { 21774 _ = v.Args[1] 21775 v_0 := v.Args[0] 21776 if v_0.Op != OpARM64UBFIZ { 21777 break 21778 } 21779 bfc := v_0.AuxInt 21780 x := v_0.Args[0] 21781 v_1 := v.Args[1] 21782 if v_1.Op != OpARM64ANDconst { 21783 break 21784 } 21785 ac := v_1.AuxInt 21786 y := v_1.Args[0] 21787 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) { 21788 break 21789 } 21790 v.reset(OpARM64BFI) 21791 v.AuxInt = bfc 21792 v.AddArg(y) 21793 v.AddArg(x) 21794 return true 21795 } 21796 return false 21797 } 21798 func rewriteValueARM64_OpARM64OR_20(v *Value) bool { 21799 b := v.Block 21800 _ = b 21801 // match: (OR (ANDconst [ac] y) (UBFIZ [bfc] x)) 21802 // cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc))) 21803 // result: (BFI [bfc] y x) 21804 for { 21805 _ = v.Args[1] 21806 v_0 := v.Args[0] 21807 if v_0.Op != OpARM64ANDconst { 21808 break 21809 } 21810 ac := v_0.AuxInt 21811 y := v_0.Args[0] 21812 v_1 := v.Args[1] 21813 if v_1.Op != OpARM64UBFIZ { 21814 break 21815 } 21816 bfc := v_1.AuxInt 21817 x := v_1.Args[0] 21818 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) { 21819 break 21820 } 21821 v.reset(OpARM64BFI) 21822 v.AuxInt = bfc 21823 v.AddArg(y) 21824 v.AddArg(x) 21825 return true 21826 } 21827 // match: (OR (UBFX [bfc] x) (ANDconst [ac] y)) 21828 // cond: ac == ^(1<<uint(getARM64BFwidth(bfc))-1) 21829 // result: (BFXIL [bfc] y x) 21830 for { 21831 _ = v.Args[1] 21832 v_0 := v.Args[0] 21833 if v_0.Op != OpARM64UBFX { 21834 break 21835 } 21836 bfc := v_0.AuxInt 21837 x := v_0.Args[0] 21838 v_1 := v.Args[1] 21839 if v_1.Op != OpARM64ANDconst { 21840 break 21841 } 21842 ac := v_1.AuxInt 21843 y := v_1.Args[0] 21844 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) { 21845 break 21846 } 21847 v.reset(OpARM64BFXIL) 21848 v.AuxInt = bfc 21849 v.AddArg(y) 21850 v.AddArg(x) 21851 return true 21852 } 21853 // match: (OR (ANDconst [ac] y) (UBFX [bfc] x)) 21854 // cond: ac == ^(1<<uint(getARM64BFwidth(bfc))-1) 21855 // result: (BFXIL [bfc] y x) 21856 for { 21857 _ = v.Args[1] 21858 v_0 := v.Args[0] 21859 if v_0.Op != OpARM64ANDconst { 21860 break 21861 } 21862 ac := v_0.AuxInt 21863 y := v_0.Args[0] 21864 v_1 := v.Args[1] 21865 if v_1.Op != OpARM64UBFX { 21866 break 21867 } 21868 bfc := v_1.AuxInt 21869 x := v_1.Args[0] 21870 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) { 21871 break 21872 } 21873 v.reset(OpARM64BFXIL) 21874 v.AuxInt = bfc 21875 v.AddArg(y) 21876 v.AddArg(x) 21877 return true 21878 } 21879 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 21880 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 21881 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 21882 for { 21883 t := v.Type 21884 _ = v.Args[1] 21885 o0 := v.Args[0] 21886 if o0.Op != OpARM64ORshiftLL { 21887 break 21888 } 21889 if o0.AuxInt != 8 { 21890 break 21891 } 21892 _ = o0.Args[1] 21893 o1 := o0.Args[0] 21894 if o1.Op != OpARM64ORshiftLL { 21895 break 21896 } 21897 if o1.AuxInt != 16 { 21898 break 21899 } 21900 _ = o1.Args[1] 21901 s0 := o1.Args[0] 21902 if s0.Op != OpARM64SLLconst { 21903 break 21904 } 21905 if s0.AuxInt != 24 { 21906 break 21907 } 21908 y0 := s0.Args[0] 21909 if y0.Op != OpARM64MOVDnop { 21910 break 21911 } 21912 x0 := y0.Args[0] 21913 if x0.Op != OpARM64MOVBUload { 21914 break 21915 } 21916 i3 := x0.AuxInt 21917 s := x0.Aux 21918 _ = x0.Args[1] 21919 p := x0.Args[0] 21920 mem := x0.Args[1] 21921 y1 := o1.Args[1] 21922 if y1.Op != OpARM64MOVDnop { 21923 break 21924 } 21925 x1 := y1.Args[0] 21926 if x1.Op != OpARM64MOVBUload { 21927 break 21928 } 21929 i2 := x1.AuxInt 21930 if x1.Aux != s { 21931 break 21932 } 21933 _ = x1.Args[1] 21934 if p != x1.Args[0] { 21935 break 21936 } 21937 if mem != x1.Args[1] { 21938 break 21939 } 21940 y2 := o0.Args[1] 21941 if y2.Op != OpARM64MOVDnop { 21942 break 21943 } 21944 x2 := y2.Args[0] 21945 if x2.Op != OpARM64MOVBUload { 21946 break 21947 } 21948 i1 := x2.AuxInt 21949 if x2.Aux != s { 21950 break 21951 } 21952 _ = x2.Args[1] 21953 if p != x2.Args[0] { 21954 break 21955 } 21956 if mem != x2.Args[1] { 21957 break 21958 } 21959 y3 := v.Args[1] 21960 if y3.Op != OpARM64MOVDnop { 21961 break 21962 } 21963 x3 := y3.Args[0] 21964 if x3.Op != OpARM64MOVBUload { 21965 break 21966 } 21967 i0 := x3.AuxInt 21968 if x3.Aux != s { 21969 break 21970 } 21971 _ = x3.Args[1] 21972 if p != x3.Args[0] { 21973 break 21974 } 21975 if mem != x3.Args[1] { 21976 break 21977 } 21978 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 21979 break 21980 } 21981 b = mergePoint(b, x0, x1, x2, x3) 21982 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 21983 v.reset(OpCopy) 21984 v.AddArg(v0) 21985 v0.Aux = s 21986 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 21987 v1.AuxInt = i0 21988 v1.AddArg(p) 21989 v0.AddArg(v1) 21990 v0.AddArg(mem) 21991 return true 21992 } 21993 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 21994 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 21995 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 21996 for { 21997 t := v.Type 21998 _ = v.Args[1] 21999 y3 := v.Args[0] 22000 if y3.Op != OpARM64MOVDnop { 22001 break 22002 } 22003 x3 := y3.Args[0] 22004 if x3.Op != OpARM64MOVBUload { 22005 break 22006 } 22007 i0 := x3.AuxInt 22008 s := x3.Aux 22009 _ = x3.Args[1] 22010 p := x3.Args[0] 22011 mem := x3.Args[1] 22012 o0 := v.Args[1] 22013 if o0.Op != OpARM64ORshiftLL { 22014 break 22015 } 22016 if o0.AuxInt != 8 { 22017 break 22018 } 22019 _ = o0.Args[1] 22020 o1 := o0.Args[0] 22021 if o1.Op != OpARM64ORshiftLL { 22022 break 22023 } 22024 if o1.AuxInt != 16 { 22025 break 22026 } 22027 _ = o1.Args[1] 22028 s0 := o1.Args[0] 22029 if s0.Op != OpARM64SLLconst { 22030 break 22031 } 22032 if s0.AuxInt != 24 { 22033 break 22034 } 22035 y0 := s0.Args[0] 22036 if y0.Op != OpARM64MOVDnop { 22037 break 22038 } 22039 x0 := y0.Args[0] 22040 if x0.Op != OpARM64MOVBUload { 22041 break 22042 } 22043 i3 := x0.AuxInt 22044 if x0.Aux != s { 22045 break 22046 } 22047 _ = x0.Args[1] 22048 if p != x0.Args[0] { 22049 break 22050 } 22051 if mem != x0.Args[1] { 22052 break 22053 } 22054 y1 := o1.Args[1] 22055 if y1.Op != OpARM64MOVDnop { 22056 break 22057 } 22058 x1 := y1.Args[0] 22059 if x1.Op != OpARM64MOVBUload { 22060 break 22061 } 22062 i2 := x1.AuxInt 22063 if x1.Aux != s { 22064 break 22065 } 22066 _ = x1.Args[1] 22067 if p != x1.Args[0] { 22068 break 22069 } 22070 if mem != x1.Args[1] { 22071 break 22072 } 22073 y2 := o0.Args[1] 22074 if y2.Op != OpARM64MOVDnop { 22075 break 22076 } 22077 x2 := y2.Args[0] 22078 if x2.Op != OpARM64MOVBUload { 22079 break 22080 } 22081 i1 := x2.AuxInt 22082 if x2.Aux != s { 22083 break 22084 } 22085 _ = x2.Args[1] 22086 if p != x2.Args[0] { 22087 break 22088 } 22089 if mem != x2.Args[1] { 22090 break 22091 } 22092 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22093 break 22094 } 22095 b = mergePoint(b, x0, x1, x2, x3) 22096 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 22097 v.reset(OpCopy) 22098 v.AddArg(v0) 22099 v0.Aux = s 22100 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 22101 v1.AuxInt = i0 22102 v1.AddArg(p) 22103 v0.AddArg(v1) 22104 v0.AddArg(mem) 22105 return true 22106 } 22107 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem))) 22108 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22109 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem) 22110 for { 22111 t := v.Type 22112 _ = v.Args[1] 22113 o0 := v.Args[0] 22114 if o0.Op != OpARM64ORshiftLL { 22115 break 22116 } 22117 if o0.AuxInt != 8 { 22118 break 22119 } 22120 _ = o0.Args[1] 22121 o1 := o0.Args[0] 22122 if o1.Op != OpARM64ORshiftLL { 22123 break 22124 } 22125 if o1.AuxInt != 16 { 22126 break 22127 } 22128 _ = o1.Args[1] 22129 s0 := o1.Args[0] 22130 if s0.Op != OpARM64SLLconst { 22131 break 22132 } 22133 if s0.AuxInt != 24 { 22134 break 22135 } 22136 y0 := s0.Args[0] 22137 if y0.Op != OpARM64MOVDnop { 22138 break 22139 } 22140 x0 := y0.Args[0] 22141 if x0.Op != OpARM64MOVBUload { 22142 break 22143 } 22144 if x0.AuxInt != 3 { 22145 break 22146 } 22147 s := x0.Aux 22148 _ = x0.Args[1] 22149 p := x0.Args[0] 22150 mem := x0.Args[1] 22151 y1 := o1.Args[1] 22152 if y1.Op != OpARM64MOVDnop { 22153 break 22154 } 22155 x1 := y1.Args[0] 22156 if x1.Op != OpARM64MOVBUload { 22157 break 22158 } 22159 if x1.AuxInt != 2 { 22160 break 22161 } 22162 if x1.Aux != s { 22163 break 22164 } 22165 _ = x1.Args[1] 22166 if p != x1.Args[0] { 22167 break 22168 } 22169 if mem != x1.Args[1] { 22170 break 22171 } 22172 y2 := o0.Args[1] 22173 if y2.Op != OpARM64MOVDnop { 22174 break 22175 } 22176 x2 := y2.Args[0] 22177 if x2.Op != OpARM64MOVBUload { 22178 break 22179 } 22180 if x2.AuxInt != 1 { 22181 break 22182 } 22183 if x2.Aux != s { 22184 break 22185 } 22186 _ = x2.Args[1] 22187 p1 := x2.Args[0] 22188 if p1.Op != OpARM64ADD { 22189 break 22190 } 22191 _ = p1.Args[1] 22192 ptr1 := p1.Args[0] 22193 idx1 := p1.Args[1] 22194 if mem != x2.Args[1] { 22195 break 22196 } 22197 y3 := v.Args[1] 22198 if y3.Op != OpARM64MOVDnop { 22199 break 22200 } 22201 x3 := y3.Args[0] 22202 if x3.Op != OpARM64MOVBUloadidx { 22203 break 22204 } 22205 _ = x3.Args[2] 22206 ptr0 := x3.Args[0] 22207 idx0 := x3.Args[1] 22208 if mem != x3.Args[2] { 22209 break 22210 } 22211 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22212 break 22213 } 22214 b = mergePoint(b, x0, x1, x2, x3) 22215 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22216 v.reset(OpCopy) 22217 v.AddArg(v0) 22218 v0.AddArg(ptr0) 22219 v0.AddArg(idx0) 22220 v0.AddArg(mem) 22221 return true 22222 } 22223 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))) 22224 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22225 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem) 22226 for { 22227 t := v.Type 22228 _ = v.Args[1] 22229 y3 := v.Args[0] 22230 if y3.Op != OpARM64MOVDnop { 22231 break 22232 } 22233 x3 := y3.Args[0] 22234 if x3.Op != OpARM64MOVBUloadidx { 22235 break 22236 } 22237 _ = x3.Args[2] 22238 ptr0 := x3.Args[0] 22239 idx0 := x3.Args[1] 22240 mem := x3.Args[2] 22241 o0 := v.Args[1] 22242 if o0.Op != OpARM64ORshiftLL { 22243 break 22244 } 22245 if o0.AuxInt != 8 { 22246 break 22247 } 22248 _ = o0.Args[1] 22249 o1 := o0.Args[0] 22250 if o1.Op != OpARM64ORshiftLL { 22251 break 22252 } 22253 if o1.AuxInt != 16 { 22254 break 22255 } 22256 _ = o1.Args[1] 22257 s0 := o1.Args[0] 22258 if s0.Op != OpARM64SLLconst { 22259 break 22260 } 22261 if s0.AuxInt != 24 { 22262 break 22263 } 22264 y0 := s0.Args[0] 22265 if y0.Op != OpARM64MOVDnop { 22266 break 22267 } 22268 x0 := y0.Args[0] 22269 if x0.Op != OpARM64MOVBUload { 22270 break 22271 } 22272 if x0.AuxInt != 3 { 22273 break 22274 } 22275 s := x0.Aux 22276 _ = x0.Args[1] 22277 p := x0.Args[0] 22278 if mem != x0.Args[1] { 22279 break 22280 } 22281 y1 := o1.Args[1] 22282 if y1.Op != OpARM64MOVDnop { 22283 break 22284 } 22285 x1 := y1.Args[0] 22286 if x1.Op != OpARM64MOVBUload { 22287 break 22288 } 22289 if x1.AuxInt != 2 { 22290 break 22291 } 22292 if x1.Aux != s { 22293 break 22294 } 22295 _ = x1.Args[1] 22296 if p != x1.Args[0] { 22297 break 22298 } 22299 if mem != x1.Args[1] { 22300 break 22301 } 22302 y2 := o0.Args[1] 22303 if y2.Op != OpARM64MOVDnop { 22304 break 22305 } 22306 x2 := y2.Args[0] 22307 if x2.Op != OpARM64MOVBUload { 22308 break 22309 } 22310 if x2.AuxInt != 1 { 22311 break 22312 } 22313 if x2.Aux != s { 22314 break 22315 } 22316 _ = x2.Args[1] 22317 p1 := x2.Args[0] 22318 if p1.Op != OpARM64ADD { 22319 break 22320 } 22321 _ = p1.Args[1] 22322 ptr1 := p1.Args[0] 22323 idx1 := p1.Args[1] 22324 if mem != x2.Args[1] { 22325 break 22326 } 22327 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22328 break 22329 } 22330 b = mergePoint(b, x0, x1, x2, x3) 22331 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22332 v.reset(OpCopy) 22333 v.AddArg(v0) 22334 v0.AddArg(ptr0) 22335 v0.AddArg(idx0) 22336 v0.AddArg(mem) 22337 return true 22338 } 22339 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem))) 22340 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22341 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem) 22342 for { 22343 t := v.Type 22344 _ = v.Args[1] 22345 o0 := v.Args[0] 22346 if o0.Op != OpARM64ORshiftLL { 22347 break 22348 } 22349 if o0.AuxInt != 8 { 22350 break 22351 } 22352 _ = o0.Args[1] 22353 o1 := o0.Args[0] 22354 if o1.Op != OpARM64ORshiftLL { 22355 break 22356 } 22357 if o1.AuxInt != 16 { 22358 break 22359 } 22360 _ = o1.Args[1] 22361 s0 := o1.Args[0] 22362 if s0.Op != OpARM64SLLconst { 22363 break 22364 } 22365 if s0.AuxInt != 24 { 22366 break 22367 } 22368 y0 := s0.Args[0] 22369 if y0.Op != OpARM64MOVDnop { 22370 break 22371 } 22372 x0 := y0.Args[0] 22373 if x0.Op != OpARM64MOVBUloadidx { 22374 break 22375 } 22376 _ = x0.Args[2] 22377 ptr := x0.Args[0] 22378 x0_1 := x0.Args[1] 22379 if x0_1.Op != OpARM64ADDconst { 22380 break 22381 } 22382 if x0_1.AuxInt != 3 { 22383 break 22384 } 22385 idx := x0_1.Args[0] 22386 mem := x0.Args[2] 22387 y1 := o1.Args[1] 22388 if y1.Op != OpARM64MOVDnop { 22389 break 22390 } 22391 x1 := y1.Args[0] 22392 if x1.Op != OpARM64MOVBUloadidx { 22393 break 22394 } 22395 _ = x1.Args[2] 22396 if ptr != x1.Args[0] { 22397 break 22398 } 22399 x1_1 := x1.Args[1] 22400 if x1_1.Op != OpARM64ADDconst { 22401 break 22402 } 22403 if x1_1.AuxInt != 2 { 22404 break 22405 } 22406 if idx != x1_1.Args[0] { 22407 break 22408 } 22409 if mem != x1.Args[2] { 22410 break 22411 } 22412 y2 := o0.Args[1] 22413 if y2.Op != OpARM64MOVDnop { 22414 break 22415 } 22416 x2 := y2.Args[0] 22417 if x2.Op != OpARM64MOVBUloadidx { 22418 break 22419 } 22420 _ = x2.Args[2] 22421 if ptr != x2.Args[0] { 22422 break 22423 } 22424 x2_1 := x2.Args[1] 22425 if x2_1.Op != OpARM64ADDconst { 22426 break 22427 } 22428 if x2_1.AuxInt != 1 { 22429 break 22430 } 22431 if idx != x2_1.Args[0] { 22432 break 22433 } 22434 if mem != x2.Args[2] { 22435 break 22436 } 22437 y3 := v.Args[1] 22438 if y3.Op != OpARM64MOVDnop { 22439 break 22440 } 22441 x3 := y3.Args[0] 22442 if x3.Op != OpARM64MOVBUloadidx { 22443 break 22444 } 22445 _ = x3.Args[2] 22446 if ptr != x3.Args[0] { 22447 break 22448 } 22449 if idx != x3.Args[1] { 22450 break 22451 } 22452 if mem != x3.Args[2] { 22453 break 22454 } 22455 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22456 break 22457 } 22458 b = mergePoint(b, x0, x1, x2, x3) 22459 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22460 v.reset(OpCopy) 22461 v.AddArg(v0) 22462 v0.AddArg(ptr) 22463 v0.AddArg(idx) 22464 v0.AddArg(mem) 22465 return true 22466 } 22467 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))) 22468 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22469 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem) 22470 for { 22471 t := v.Type 22472 _ = v.Args[1] 22473 y3 := v.Args[0] 22474 if y3.Op != OpARM64MOVDnop { 22475 break 22476 } 22477 x3 := y3.Args[0] 22478 if x3.Op != OpARM64MOVBUloadidx { 22479 break 22480 } 22481 _ = x3.Args[2] 22482 ptr := x3.Args[0] 22483 idx := x3.Args[1] 22484 mem := x3.Args[2] 22485 o0 := v.Args[1] 22486 if o0.Op != OpARM64ORshiftLL { 22487 break 22488 } 22489 if o0.AuxInt != 8 { 22490 break 22491 } 22492 _ = o0.Args[1] 22493 o1 := o0.Args[0] 22494 if o1.Op != OpARM64ORshiftLL { 22495 break 22496 } 22497 if o1.AuxInt != 16 { 22498 break 22499 } 22500 _ = o1.Args[1] 22501 s0 := o1.Args[0] 22502 if s0.Op != OpARM64SLLconst { 22503 break 22504 } 22505 if s0.AuxInt != 24 { 22506 break 22507 } 22508 y0 := s0.Args[0] 22509 if y0.Op != OpARM64MOVDnop { 22510 break 22511 } 22512 x0 := y0.Args[0] 22513 if x0.Op != OpARM64MOVBUloadidx { 22514 break 22515 } 22516 _ = x0.Args[2] 22517 if ptr != x0.Args[0] { 22518 break 22519 } 22520 x0_1 := x0.Args[1] 22521 if x0_1.Op != OpARM64ADDconst { 22522 break 22523 } 22524 if x0_1.AuxInt != 3 { 22525 break 22526 } 22527 if idx != x0_1.Args[0] { 22528 break 22529 } 22530 if mem != x0.Args[2] { 22531 break 22532 } 22533 y1 := o1.Args[1] 22534 if y1.Op != OpARM64MOVDnop { 22535 break 22536 } 22537 x1 := y1.Args[0] 22538 if x1.Op != OpARM64MOVBUloadidx { 22539 break 22540 } 22541 _ = x1.Args[2] 22542 if ptr != x1.Args[0] { 22543 break 22544 } 22545 x1_1 := x1.Args[1] 22546 if x1_1.Op != OpARM64ADDconst { 22547 break 22548 } 22549 if x1_1.AuxInt != 2 { 22550 break 22551 } 22552 if idx != x1_1.Args[0] { 22553 break 22554 } 22555 if mem != x1.Args[2] { 22556 break 22557 } 22558 y2 := o0.Args[1] 22559 if y2.Op != OpARM64MOVDnop { 22560 break 22561 } 22562 x2 := y2.Args[0] 22563 if x2.Op != OpARM64MOVBUloadidx { 22564 break 22565 } 22566 _ = x2.Args[2] 22567 if ptr != x2.Args[0] { 22568 break 22569 } 22570 x2_1 := x2.Args[1] 22571 if x2_1.Op != OpARM64ADDconst { 22572 break 22573 } 22574 if x2_1.AuxInt != 1 { 22575 break 22576 } 22577 if idx != x2_1.Args[0] { 22578 break 22579 } 22580 if mem != x2.Args[2] { 22581 break 22582 } 22583 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22584 break 22585 } 22586 b = mergePoint(b, x0, x1, x2, x3) 22587 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22588 v.reset(OpCopy) 22589 v.AddArg(v0) 22590 v0.AddArg(ptr) 22591 v0.AddArg(idx) 22592 v0.AddArg(mem) 22593 return true 22594 } 22595 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 22596 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 22597 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22598 for { 22599 t := v.Type 22600 _ = v.Args[1] 22601 o0 := v.Args[0] 22602 if o0.Op != OpARM64ORshiftLL { 22603 break 22604 } 22605 if o0.AuxInt != 8 { 22606 break 22607 } 22608 _ = o0.Args[1] 22609 o1 := o0.Args[0] 22610 if o1.Op != OpARM64ORshiftLL { 22611 break 22612 } 22613 if o1.AuxInt != 16 { 22614 break 22615 } 22616 _ = o1.Args[1] 22617 o2 := o1.Args[0] 22618 if o2.Op != OpARM64ORshiftLL { 22619 break 22620 } 22621 if o2.AuxInt != 24 { 22622 break 22623 } 22624 _ = o2.Args[1] 22625 o3 := o2.Args[0] 22626 if o3.Op != OpARM64ORshiftLL { 22627 break 22628 } 22629 if o3.AuxInt != 32 { 22630 break 22631 } 22632 _ = o3.Args[1] 22633 o4 := o3.Args[0] 22634 if o4.Op != OpARM64ORshiftLL { 22635 break 22636 } 22637 if o4.AuxInt != 40 { 22638 break 22639 } 22640 _ = o4.Args[1] 22641 o5 := o4.Args[0] 22642 if o5.Op != OpARM64ORshiftLL { 22643 break 22644 } 22645 if o5.AuxInt != 48 { 22646 break 22647 } 22648 _ = o5.Args[1] 22649 s0 := o5.Args[0] 22650 if s0.Op != OpARM64SLLconst { 22651 break 22652 } 22653 if s0.AuxInt != 56 { 22654 break 22655 } 22656 y0 := s0.Args[0] 22657 if y0.Op != OpARM64MOVDnop { 22658 break 22659 } 22660 x0 := y0.Args[0] 22661 if x0.Op != OpARM64MOVBUload { 22662 break 22663 } 22664 i7 := x0.AuxInt 22665 s := x0.Aux 22666 _ = x0.Args[1] 22667 p := x0.Args[0] 22668 mem := x0.Args[1] 22669 y1 := o5.Args[1] 22670 if y1.Op != OpARM64MOVDnop { 22671 break 22672 } 22673 x1 := y1.Args[0] 22674 if x1.Op != OpARM64MOVBUload { 22675 break 22676 } 22677 i6 := x1.AuxInt 22678 if x1.Aux != s { 22679 break 22680 } 22681 _ = x1.Args[1] 22682 if p != x1.Args[0] { 22683 break 22684 } 22685 if mem != x1.Args[1] { 22686 break 22687 } 22688 y2 := o4.Args[1] 22689 if y2.Op != OpARM64MOVDnop { 22690 break 22691 } 22692 x2 := y2.Args[0] 22693 if x2.Op != OpARM64MOVBUload { 22694 break 22695 } 22696 i5 := x2.AuxInt 22697 if x2.Aux != s { 22698 break 22699 } 22700 _ = x2.Args[1] 22701 if p != x2.Args[0] { 22702 break 22703 } 22704 if mem != x2.Args[1] { 22705 break 22706 } 22707 y3 := o3.Args[1] 22708 if y3.Op != OpARM64MOVDnop { 22709 break 22710 } 22711 x3 := y3.Args[0] 22712 if x3.Op != OpARM64MOVBUload { 22713 break 22714 } 22715 i4 := x3.AuxInt 22716 if x3.Aux != s { 22717 break 22718 } 22719 _ = x3.Args[1] 22720 if p != x3.Args[0] { 22721 break 22722 } 22723 if mem != x3.Args[1] { 22724 break 22725 } 22726 y4 := o2.Args[1] 22727 if y4.Op != OpARM64MOVDnop { 22728 break 22729 } 22730 x4 := y4.Args[0] 22731 if x4.Op != OpARM64MOVBUload { 22732 break 22733 } 22734 i3 := x4.AuxInt 22735 if x4.Aux != s { 22736 break 22737 } 22738 _ = x4.Args[1] 22739 if p != x4.Args[0] { 22740 break 22741 } 22742 if mem != x4.Args[1] { 22743 break 22744 } 22745 y5 := o1.Args[1] 22746 if y5.Op != OpARM64MOVDnop { 22747 break 22748 } 22749 x5 := y5.Args[0] 22750 if x5.Op != OpARM64MOVBUload { 22751 break 22752 } 22753 i2 := x5.AuxInt 22754 if x5.Aux != s { 22755 break 22756 } 22757 _ = x5.Args[1] 22758 if p != x5.Args[0] { 22759 break 22760 } 22761 if mem != x5.Args[1] { 22762 break 22763 } 22764 y6 := o0.Args[1] 22765 if y6.Op != OpARM64MOVDnop { 22766 break 22767 } 22768 x6 := y6.Args[0] 22769 if x6.Op != OpARM64MOVBUload { 22770 break 22771 } 22772 i1 := x6.AuxInt 22773 if x6.Aux != s { 22774 break 22775 } 22776 _ = x6.Args[1] 22777 if p != x6.Args[0] { 22778 break 22779 } 22780 if mem != x6.Args[1] { 22781 break 22782 } 22783 y7 := v.Args[1] 22784 if y7.Op != OpARM64MOVDnop { 22785 break 22786 } 22787 x7 := y7.Args[0] 22788 if x7.Op != OpARM64MOVBUload { 22789 break 22790 } 22791 i0 := x7.AuxInt 22792 if x7.Aux != s { 22793 break 22794 } 22795 _ = x7.Args[1] 22796 if p != x7.Args[0] { 22797 break 22798 } 22799 if mem != x7.Args[1] { 22800 break 22801 } 22802 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 22803 break 22804 } 22805 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 22806 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 22807 v.reset(OpCopy) 22808 v.AddArg(v0) 22809 v0.Aux = s 22810 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 22811 v1.AuxInt = i0 22812 v1.AddArg(p) 22813 v0.AddArg(v1) 22814 v0.AddArg(mem) 22815 return true 22816 } 22817 return false 22818 } 22819 func rewriteValueARM64_OpARM64OR_30(v *Value) bool { 22820 b := v.Block 22821 _ = b 22822 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 22823 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 22824 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22825 for { 22826 t := v.Type 22827 _ = v.Args[1] 22828 y7 := v.Args[0] 22829 if y7.Op != OpARM64MOVDnop { 22830 break 22831 } 22832 x7 := y7.Args[0] 22833 if x7.Op != OpARM64MOVBUload { 22834 break 22835 } 22836 i0 := x7.AuxInt 22837 s := x7.Aux 22838 _ = x7.Args[1] 22839 p := x7.Args[0] 22840 mem := x7.Args[1] 22841 o0 := v.Args[1] 22842 if o0.Op != OpARM64ORshiftLL { 22843 break 22844 } 22845 if o0.AuxInt != 8 { 22846 break 22847 } 22848 _ = o0.Args[1] 22849 o1 := o0.Args[0] 22850 if o1.Op != OpARM64ORshiftLL { 22851 break 22852 } 22853 if o1.AuxInt != 16 { 22854 break 22855 } 22856 _ = o1.Args[1] 22857 o2 := o1.Args[0] 22858 if o2.Op != OpARM64ORshiftLL { 22859 break 22860 } 22861 if o2.AuxInt != 24 { 22862 break 22863 } 22864 _ = o2.Args[1] 22865 o3 := o2.Args[0] 22866 if o3.Op != OpARM64ORshiftLL { 22867 break 22868 } 22869 if o3.AuxInt != 32 { 22870 break 22871 } 22872 _ = o3.Args[1] 22873 o4 := o3.Args[0] 22874 if o4.Op != OpARM64ORshiftLL { 22875 break 22876 } 22877 if o4.AuxInt != 40 { 22878 break 22879 } 22880 _ = o4.Args[1] 22881 o5 := o4.Args[0] 22882 if o5.Op != OpARM64ORshiftLL { 22883 break 22884 } 22885 if o5.AuxInt != 48 { 22886 break 22887 } 22888 _ = o5.Args[1] 22889 s0 := o5.Args[0] 22890 if s0.Op != OpARM64SLLconst { 22891 break 22892 } 22893 if s0.AuxInt != 56 { 22894 break 22895 } 22896 y0 := s0.Args[0] 22897 if y0.Op != OpARM64MOVDnop { 22898 break 22899 } 22900 x0 := y0.Args[0] 22901 if x0.Op != OpARM64MOVBUload { 22902 break 22903 } 22904 i7 := x0.AuxInt 22905 if x0.Aux != s { 22906 break 22907 } 22908 _ = x0.Args[1] 22909 if p != x0.Args[0] { 22910 break 22911 } 22912 if mem != x0.Args[1] { 22913 break 22914 } 22915 y1 := o5.Args[1] 22916 if y1.Op != OpARM64MOVDnop { 22917 break 22918 } 22919 x1 := y1.Args[0] 22920 if x1.Op != OpARM64MOVBUload { 22921 break 22922 } 22923 i6 := x1.AuxInt 22924 if x1.Aux != s { 22925 break 22926 } 22927 _ = x1.Args[1] 22928 if p != x1.Args[0] { 22929 break 22930 } 22931 if mem != x1.Args[1] { 22932 break 22933 } 22934 y2 := o4.Args[1] 22935 if y2.Op != OpARM64MOVDnop { 22936 break 22937 } 22938 x2 := y2.Args[0] 22939 if x2.Op != OpARM64MOVBUload { 22940 break 22941 } 22942 i5 := x2.AuxInt 22943 if x2.Aux != s { 22944 break 22945 } 22946 _ = x2.Args[1] 22947 if p != x2.Args[0] { 22948 break 22949 } 22950 if mem != x2.Args[1] { 22951 break 22952 } 22953 y3 := o3.Args[1] 22954 if y3.Op != OpARM64MOVDnop { 22955 break 22956 } 22957 x3 := y3.Args[0] 22958 if x3.Op != OpARM64MOVBUload { 22959 break 22960 } 22961 i4 := x3.AuxInt 22962 if x3.Aux != s { 22963 break 22964 } 22965 _ = x3.Args[1] 22966 if p != x3.Args[0] { 22967 break 22968 } 22969 if mem != x3.Args[1] { 22970 break 22971 } 22972 y4 := o2.Args[1] 22973 if y4.Op != OpARM64MOVDnop { 22974 break 22975 } 22976 x4 := y4.Args[0] 22977 if x4.Op != OpARM64MOVBUload { 22978 break 22979 } 22980 i3 := x4.AuxInt 22981 if x4.Aux != s { 22982 break 22983 } 22984 _ = x4.Args[1] 22985 if p != x4.Args[0] { 22986 break 22987 } 22988 if mem != x4.Args[1] { 22989 break 22990 } 22991 y5 := o1.Args[1] 22992 if y5.Op != OpARM64MOVDnop { 22993 break 22994 } 22995 x5 := y5.Args[0] 22996 if x5.Op != OpARM64MOVBUload { 22997 break 22998 } 22999 i2 := x5.AuxInt 23000 if x5.Aux != s { 23001 break 23002 } 23003 _ = x5.Args[1] 23004 if p != x5.Args[0] { 23005 break 23006 } 23007 if mem != x5.Args[1] { 23008 break 23009 } 23010 y6 := o0.Args[1] 23011 if y6.Op != OpARM64MOVDnop { 23012 break 23013 } 23014 x6 := y6.Args[0] 23015 if x6.Op != OpARM64MOVBUload { 23016 break 23017 } 23018 i1 := x6.AuxInt 23019 if x6.Aux != s { 23020 break 23021 } 23022 _ = x6.Args[1] 23023 if p != x6.Args[0] { 23024 break 23025 } 23026 if mem != x6.Args[1] { 23027 break 23028 } 23029 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23030 break 23031 } 23032 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23033 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 23034 v.reset(OpCopy) 23035 v.AddArg(v0) 23036 v0.Aux = s 23037 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 23038 v1.AuxInt = i0 23039 v1.AddArg(p) 23040 v0.AddArg(v1) 23041 v0.AddArg(mem) 23042 return true 23043 } 23044 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem))) 23045 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23046 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem) 23047 for { 23048 t := v.Type 23049 _ = v.Args[1] 23050 o0 := v.Args[0] 23051 if o0.Op != OpARM64ORshiftLL { 23052 break 23053 } 23054 if o0.AuxInt != 8 { 23055 break 23056 } 23057 _ = o0.Args[1] 23058 o1 := o0.Args[0] 23059 if o1.Op != OpARM64ORshiftLL { 23060 break 23061 } 23062 if o1.AuxInt != 16 { 23063 break 23064 } 23065 _ = o1.Args[1] 23066 o2 := o1.Args[0] 23067 if o2.Op != OpARM64ORshiftLL { 23068 break 23069 } 23070 if o2.AuxInt != 24 { 23071 break 23072 } 23073 _ = o2.Args[1] 23074 o3 := o2.Args[0] 23075 if o3.Op != OpARM64ORshiftLL { 23076 break 23077 } 23078 if o3.AuxInt != 32 { 23079 break 23080 } 23081 _ = o3.Args[1] 23082 o4 := o3.Args[0] 23083 if o4.Op != OpARM64ORshiftLL { 23084 break 23085 } 23086 if o4.AuxInt != 40 { 23087 break 23088 } 23089 _ = o4.Args[1] 23090 o5 := o4.Args[0] 23091 if o5.Op != OpARM64ORshiftLL { 23092 break 23093 } 23094 if o5.AuxInt != 48 { 23095 break 23096 } 23097 _ = o5.Args[1] 23098 s0 := o5.Args[0] 23099 if s0.Op != OpARM64SLLconst { 23100 break 23101 } 23102 if s0.AuxInt != 56 { 23103 break 23104 } 23105 y0 := s0.Args[0] 23106 if y0.Op != OpARM64MOVDnop { 23107 break 23108 } 23109 x0 := y0.Args[0] 23110 if x0.Op != OpARM64MOVBUload { 23111 break 23112 } 23113 if x0.AuxInt != 7 { 23114 break 23115 } 23116 s := x0.Aux 23117 _ = x0.Args[1] 23118 p := x0.Args[0] 23119 mem := x0.Args[1] 23120 y1 := o5.Args[1] 23121 if y1.Op != OpARM64MOVDnop { 23122 break 23123 } 23124 x1 := y1.Args[0] 23125 if x1.Op != OpARM64MOVBUload { 23126 break 23127 } 23128 if x1.AuxInt != 6 { 23129 break 23130 } 23131 if x1.Aux != s { 23132 break 23133 } 23134 _ = x1.Args[1] 23135 if p != x1.Args[0] { 23136 break 23137 } 23138 if mem != x1.Args[1] { 23139 break 23140 } 23141 y2 := o4.Args[1] 23142 if y2.Op != OpARM64MOVDnop { 23143 break 23144 } 23145 x2 := y2.Args[0] 23146 if x2.Op != OpARM64MOVBUload { 23147 break 23148 } 23149 if x2.AuxInt != 5 { 23150 break 23151 } 23152 if x2.Aux != s { 23153 break 23154 } 23155 _ = x2.Args[1] 23156 if p != x2.Args[0] { 23157 break 23158 } 23159 if mem != x2.Args[1] { 23160 break 23161 } 23162 y3 := o3.Args[1] 23163 if y3.Op != OpARM64MOVDnop { 23164 break 23165 } 23166 x3 := y3.Args[0] 23167 if x3.Op != OpARM64MOVBUload { 23168 break 23169 } 23170 if x3.AuxInt != 4 { 23171 break 23172 } 23173 if x3.Aux != s { 23174 break 23175 } 23176 _ = x3.Args[1] 23177 if p != x3.Args[0] { 23178 break 23179 } 23180 if mem != x3.Args[1] { 23181 break 23182 } 23183 y4 := o2.Args[1] 23184 if y4.Op != OpARM64MOVDnop { 23185 break 23186 } 23187 x4 := y4.Args[0] 23188 if x4.Op != OpARM64MOVBUload { 23189 break 23190 } 23191 if x4.AuxInt != 3 { 23192 break 23193 } 23194 if x4.Aux != s { 23195 break 23196 } 23197 _ = x4.Args[1] 23198 if p != x4.Args[0] { 23199 break 23200 } 23201 if mem != x4.Args[1] { 23202 break 23203 } 23204 y5 := o1.Args[1] 23205 if y5.Op != OpARM64MOVDnop { 23206 break 23207 } 23208 x5 := y5.Args[0] 23209 if x5.Op != OpARM64MOVBUload { 23210 break 23211 } 23212 if x5.AuxInt != 2 { 23213 break 23214 } 23215 if x5.Aux != s { 23216 break 23217 } 23218 _ = x5.Args[1] 23219 if p != x5.Args[0] { 23220 break 23221 } 23222 if mem != x5.Args[1] { 23223 break 23224 } 23225 y6 := o0.Args[1] 23226 if y6.Op != OpARM64MOVDnop { 23227 break 23228 } 23229 x6 := y6.Args[0] 23230 if x6.Op != OpARM64MOVBUload { 23231 break 23232 } 23233 if x6.AuxInt != 1 { 23234 break 23235 } 23236 if x6.Aux != s { 23237 break 23238 } 23239 _ = x6.Args[1] 23240 p1 := x6.Args[0] 23241 if p1.Op != OpARM64ADD { 23242 break 23243 } 23244 _ = p1.Args[1] 23245 ptr1 := p1.Args[0] 23246 idx1 := p1.Args[1] 23247 if mem != x6.Args[1] { 23248 break 23249 } 23250 y7 := v.Args[1] 23251 if y7.Op != OpARM64MOVDnop { 23252 break 23253 } 23254 x7 := y7.Args[0] 23255 if x7.Op != OpARM64MOVBUloadidx { 23256 break 23257 } 23258 _ = x7.Args[2] 23259 ptr0 := x7.Args[0] 23260 idx0 := x7.Args[1] 23261 if mem != x7.Args[2] { 23262 break 23263 } 23264 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23265 break 23266 } 23267 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23268 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 23269 v.reset(OpCopy) 23270 v.AddArg(v0) 23271 v0.AddArg(ptr0) 23272 v0.AddArg(idx0) 23273 v0.AddArg(mem) 23274 return true 23275 } 23276 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))) 23277 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23278 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem) 23279 for { 23280 t := v.Type 23281 _ = v.Args[1] 23282 y7 := v.Args[0] 23283 if y7.Op != OpARM64MOVDnop { 23284 break 23285 } 23286 x7 := y7.Args[0] 23287 if x7.Op != OpARM64MOVBUloadidx { 23288 break 23289 } 23290 _ = x7.Args[2] 23291 ptr0 := x7.Args[0] 23292 idx0 := x7.Args[1] 23293 mem := x7.Args[2] 23294 o0 := v.Args[1] 23295 if o0.Op != OpARM64ORshiftLL { 23296 break 23297 } 23298 if o0.AuxInt != 8 { 23299 break 23300 } 23301 _ = o0.Args[1] 23302 o1 := o0.Args[0] 23303 if o1.Op != OpARM64ORshiftLL { 23304 break 23305 } 23306 if o1.AuxInt != 16 { 23307 break 23308 } 23309 _ = o1.Args[1] 23310 o2 := o1.Args[0] 23311 if o2.Op != OpARM64ORshiftLL { 23312 break 23313 } 23314 if o2.AuxInt != 24 { 23315 break 23316 } 23317 _ = o2.Args[1] 23318 o3 := o2.Args[0] 23319 if o3.Op != OpARM64ORshiftLL { 23320 break 23321 } 23322 if o3.AuxInt != 32 { 23323 break 23324 } 23325 _ = o3.Args[1] 23326 o4 := o3.Args[0] 23327 if o4.Op != OpARM64ORshiftLL { 23328 break 23329 } 23330 if o4.AuxInt != 40 { 23331 break 23332 } 23333 _ = o4.Args[1] 23334 o5 := o4.Args[0] 23335 if o5.Op != OpARM64ORshiftLL { 23336 break 23337 } 23338 if o5.AuxInt != 48 { 23339 break 23340 } 23341 _ = o5.Args[1] 23342 s0 := o5.Args[0] 23343 if s0.Op != OpARM64SLLconst { 23344 break 23345 } 23346 if s0.AuxInt != 56 { 23347 break 23348 } 23349 y0 := s0.Args[0] 23350 if y0.Op != OpARM64MOVDnop { 23351 break 23352 } 23353 x0 := y0.Args[0] 23354 if x0.Op != OpARM64MOVBUload { 23355 break 23356 } 23357 if x0.AuxInt != 7 { 23358 break 23359 } 23360 s := x0.Aux 23361 _ = x0.Args[1] 23362 p := x0.Args[0] 23363 if mem != x0.Args[1] { 23364 break 23365 } 23366 y1 := o5.Args[1] 23367 if y1.Op != OpARM64MOVDnop { 23368 break 23369 } 23370 x1 := y1.Args[0] 23371 if x1.Op != OpARM64MOVBUload { 23372 break 23373 } 23374 if x1.AuxInt != 6 { 23375 break 23376 } 23377 if x1.Aux != s { 23378 break 23379 } 23380 _ = x1.Args[1] 23381 if p != x1.Args[0] { 23382 break 23383 } 23384 if mem != x1.Args[1] { 23385 break 23386 } 23387 y2 := o4.Args[1] 23388 if y2.Op != OpARM64MOVDnop { 23389 break 23390 } 23391 x2 := y2.Args[0] 23392 if x2.Op != OpARM64MOVBUload { 23393 break 23394 } 23395 if x2.AuxInt != 5 { 23396 break 23397 } 23398 if x2.Aux != s { 23399 break 23400 } 23401 _ = x2.Args[1] 23402 if p != x2.Args[0] { 23403 break 23404 } 23405 if mem != x2.Args[1] { 23406 break 23407 } 23408 y3 := o3.Args[1] 23409 if y3.Op != OpARM64MOVDnop { 23410 break 23411 } 23412 x3 := y3.Args[0] 23413 if x3.Op != OpARM64MOVBUload { 23414 break 23415 } 23416 if x3.AuxInt != 4 { 23417 break 23418 } 23419 if x3.Aux != s { 23420 break 23421 } 23422 _ = x3.Args[1] 23423 if p != x3.Args[0] { 23424 break 23425 } 23426 if mem != x3.Args[1] { 23427 break 23428 } 23429 y4 := o2.Args[1] 23430 if y4.Op != OpARM64MOVDnop { 23431 break 23432 } 23433 x4 := y4.Args[0] 23434 if x4.Op != OpARM64MOVBUload { 23435 break 23436 } 23437 if x4.AuxInt != 3 { 23438 break 23439 } 23440 if x4.Aux != s { 23441 break 23442 } 23443 _ = x4.Args[1] 23444 if p != x4.Args[0] { 23445 break 23446 } 23447 if mem != x4.Args[1] { 23448 break 23449 } 23450 y5 := o1.Args[1] 23451 if y5.Op != OpARM64MOVDnop { 23452 break 23453 } 23454 x5 := y5.Args[0] 23455 if x5.Op != OpARM64MOVBUload { 23456 break 23457 } 23458 if x5.AuxInt != 2 { 23459 break 23460 } 23461 if x5.Aux != s { 23462 break 23463 } 23464 _ = x5.Args[1] 23465 if p != x5.Args[0] { 23466 break 23467 } 23468 if mem != x5.Args[1] { 23469 break 23470 } 23471 y6 := o0.Args[1] 23472 if y6.Op != OpARM64MOVDnop { 23473 break 23474 } 23475 x6 := y6.Args[0] 23476 if x6.Op != OpARM64MOVBUload { 23477 break 23478 } 23479 if x6.AuxInt != 1 { 23480 break 23481 } 23482 if x6.Aux != s { 23483 break 23484 } 23485 _ = x6.Args[1] 23486 p1 := x6.Args[0] 23487 if p1.Op != OpARM64ADD { 23488 break 23489 } 23490 _ = p1.Args[1] 23491 ptr1 := p1.Args[0] 23492 idx1 := p1.Args[1] 23493 if mem != x6.Args[1] { 23494 break 23495 } 23496 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23497 break 23498 } 23499 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23500 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 23501 v.reset(OpCopy) 23502 v.AddArg(v0) 23503 v0.AddArg(ptr0) 23504 v0.AddArg(idx0) 23505 v0.AddArg(mem) 23506 return true 23507 } 23508 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem))) 23509 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23510 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem) 23511 for { 23512 t := v.Type 23513 _ = v.Args[1] 23514 o0 := v.Args[0] 23515 if o0.Op != OpARM64ORshiftLL { 23516 break 23517 } 23518 if o0.AuxInt != 8 { 23519 break 23520 } 23521 _ = o0.Args[1] 23522 o1 := o0.Args[0] 23523 if o1.Op != OpARM64ORshiftLL { 23524 break 23525 } 23526 if o1.AuxInt != 16 { 23527 break 23528 } 23529 _ = o1.Args[1] 23530 o2 := o1.Args[0] 23531 if o2.Op != OpARM64ORshiftLL { 23532 break 23533 } 23534 if o2.AuxInt != 24 { 23535 break 23536 } 23537 _ = o2.Args[1] 23538 o3 := o2.Args[0] 23539 if o3.Op != OpARM64ORshiftLL { 23540 break 23541 } 23542 if o3.AuxInt != 32 { 23543 break 23544 } 23545 _ = o3.Args[1] 23546 o4 := o3.Args[0] 23547 if o4.Op != OpARM64ORshiftLL { 23548 break 23549 } 23550 if o4.AuxInt != 40 { 23551 break 23552 } 23553 _ = o4.Args[1] 23554 o5 := o4.Args[0] 23555 if o5.Op != OpARM64ORshiftLL { 23556 break 23557 } 23558 if o5.AuxInt != 48 { 23559 break 23560 } 23561 _ = o5.Args[1] 23562 s0 := o5.Args[0] 23563 if s0.Op != OpARM64SLLconst { 23564 break 23565 } 23566 if s0.AuxInt != 56 { 23567 break 23568 } 23569 y0 := s0.Args[0] 23570 if y0.Op != OpARM64MOVDnop { 23571 break 23572 } 23573 x0 := y0.Args[0] 23574 if x0.Op != OpARM64MOVBUloadidx { 23575 break 23576 } 23577 _ = x0.Args[2] 23578 ptr := x0.Args[0] 23579 x0_1 := x0.Args[1] 23580 if x0_1.Op != OpARM64ADDconst { 23581 break 23582 } 23583 if x0_1.AuxInt != 7 { 23584 break 23585 } 23586 idx := x0_1.Args[0] 23587 mem := x0.Args[2] 23588 y1 := o5.Args[1] 23589 if y1.Op != OpARM64MOVDnop { 23590 break 23591 } 23592 x1 := y1.Args[0] 23593 if x1.Op != OpARM64MOVBUloadidx { 23594 break 23595 } 23596 _ = x1.Args[2] 23597 if ptr != x1.Args[0] { 23598 break 23599 } 23600 x1_1 := x1.Args[1] 23601 if x1_1.Op != OpARM64ADDconst { 23602 break 23603 } 23604 if x1_1.AuxInt != 6 { 23605 break 23606 } 23607 if idx != x1_1.Args[0] { 23608 break 23609 } 23610 if mem != x1.Args[2] { 23611 break 23612 } 23613 y2 := o4.Args[1] 23614 if y2.Op != OpARM64MOVDnop { 23615 break 23616 } 23617 x2 := y2.Args[0] 23618 if x2.Op != OpARM64MOVBUloadidx { 23619 break 23620 } 23621 _ = x2.Args[2] 23622 if ptr != x2.Args[0] { 23623 break 23624 } 23625 x2_1 := x2.Args[1] 23626 if x2_1.Op != OpARM64ADDconst { 23627 break 23628 } 23629 if x2_1.AuxInt != 5 { 23630 break 23631 } 23632 if idx != x2_1.Args[0] { 23633 break 23634 } 23635 if mem != x2.Args[2] { 23636 break 23637 } 23638 y3 := o3.Args[1] 23639 if y3.Op != OpARM64MOVDnop { 23640 break 23641 } 23642 x3 := y3.Args[0] 23643 if x3.Op != OpARM64MOVBUloadidx { 23644 break 23645 } 23646 _ = x3.Args[2] 23647 if ptr != x3.Args[0] { 23648 break 23649 } 23650 x3_1 := x3.Args[1] 23651 if x3_1.Op != OpARM64ADDconst { 23652 break 23653 } 23654 if x3_1.AuxInt != 4 { 23655 break 23656 } 23657 if idx != x3_1.Args[0] { 23658 break 23659 } 23660 if mem != x3.Args[2] { 23661 break 23662 } 23663 y4 := o2.Args[1] 23664 if y4.Op != OpARM64MOVDnop { 23665 break 23666 } 23667 x4 := y4.Args[0] 23668 if x4.Op != OpARM64MOVBUloadidx { 23669 break 23670 } 23671 _ = x4.Args[2] 23672 if ptr != x4.Args[0] { 23673 break 23674 } 23675 x4_1 := x4.Args[1] 23676 if x4_1.Op != OpARM64ADDconst { 23677 break 23678 } 23679 if x4_1.AuxInt != 3 { 23680 break 23681 } 23682 if idx != x4_1.Args[0] { 23683 break 23684 } 23685 if mem != x4.Args[2] { 23686 break 23687 } 23688 y5 := o1.Args[1] 23689 if y5.Op != OpARM64MOVDnop { 23690 break 23691 } 23692 x5 := y5.Args[0] 23693 if x5.Op != OpARM64MOVBUloadidx { 23694 break 23695 } 23696 _ = x5.Args[2] 23697 if ptr != x5.Args[0] { 23698 break 23699 } 23700 x5_1 := x5.Args[1] 23701 if x5_1.Op != OpARM64ADDconst { 23702 break 23703 } 23704 if x5_1.AuxInt != 2 { 23705 break 23706 } 23707 if idx != x5_1.Args[0] { 23708 break 23709 } 23710 if mem != x5.Args[2] { 23711 break 23712 } 23713 y6 := o0.Args[1] 23714 if y6.Op != OpARM64MOVDnop { 23715 break 23716 } 23717 x6 := y6.Args[0] 23718 if x6.Op != OpARM64MOVBUloadidx { 23719 break 23720 } 23721 _ = x6.Args[2] 23722 if ptr != x6.Args[0] { 23723 break 23724 } 23725 x6_1 := x6.Args[1] 23726 if x6_1.Op != OpARM64ADDconst { 23727 break 23728 } 23729 if x6_1.AuxInt != 1 { 23730 break 23731 } 23732 if idx != x6_1.Args[0] { 23733 break 23734 } 23735 if mem != x6.Args[2] { 23736 break 23737 } 23738 y7 := v.Args[1] 23739 if y7.Op != OpARM64MOVDnop { 23740 break 23741 } 23742 x7 := y7.Args[0] 23743 if x7.Op != OpARM64MOVBUloadidx { 23744 break 23745 } 23746 _ = x7.Args[2] 23747 if ptr != x7.Args[0] { 23748 break 23749 } 23750 if idx != x7.Args[1] { 23751 break 23752 } 23753 if mem != x7.Args[2] { 23754 break 23755 } 23756 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23757 break 23758 } 23759 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23760 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 23761 v.reset(OpCopy) 23762 v.AddArg(v0) 23763 v0.AddArg(ptr) 23764 v0.AddArg(idx) 23765 v0.AddArg(mem) 23766 return true 23767 } 23768 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))) 23769 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23770 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem) 23771 for { 23772 t := v.Type 23773 _ = v.Args[1] 23774 y7 := v.Args[0] 23775 if y7.Op != OpARM64MOVDnop { 23776 break 23777 } 23778 x7 := y7.Args[0] 23779 if x7.Op != OpARM64MOVBUloadidx { 23780 break 23781 } 23782 _ = x7.Args[2] 23783 ptr := x7.Args[0] 23784 idx := x7.Args[1] 23785 mem := x7.Args[2] 23786 o0 := v.Args[1] 23787 if o0.Op != OpARM64ORshiftLL { 23788 break 23789 } 23790 if o0.AuxInt != 8 { 23791 break 23792 } 23793 _ = o0.Args[1] 23794 o1 := o0.Args[0] 23795 if o1.Op != OpARM64ORshiftLL { 23796 break 23797 } 23798 if o1.AuxInt != 16 { 23799 break 23800 } 23801 _ = o1.Args[1] 23802 o2 := o1.Args[0] 23803 if o2.Op != OpARM64ORshiftLL { 23804 break 23805 } 23806 if o2.AuxInt != 24 { 23807 break 23808 } 23809 _ = o2.Args[1] 23810 o3 := o2.Args[0] 23811 if o3.Op != OpARM64ORshiftLL { 23812 break 23813 } 23814 if o3.AuxInt != 32 { 23815 break 23816 } 23817 _ = o3.Args[1] 23818 o4 := o3.Args[0] 23819 if o4.Op != OpARM64ORshiftLL { 23820 break 23821 } 23822 if o4.AuxInt != 40 { 23823 break 23824 } 23825 _ = o4.Args[1] 23826 o5 := o4.Args[0] 23827 if o5.Op != OpARM64ORshiftLL { 23828 break 23829 } 23830 if o5.AuxInt != 48 { 23831 break 23832 } 23833 _ = o5.Args[1] 23834 s0 := o5.Args[0] 23835 if s0.Op != OpARM64SLLconst { 23836 break 23837 } 23838 if s0.AuxInt != 56 { 23839 break 23840 } 23841 y0 := s0.Args[0] 23842 if y0.Op != OpARM64MOVDnop { 23843 break 23844 } 23845 x0 := y0.Args[0] 23846 if x0.Op != OpARM64MOVBUloadidx { 23847 break 23848 } 23849 _ = x0.Args[2] 23850 if ptr != x0.Args[0] { 23851 break 23852 } 23853 x0_1 := x0.Args[1] 23854 if x0_1.Op != OpARM64ADDconst { 23855 break 23856 } 23857 if x0_1.AuxInt != 7 { 23858 break 23859 } 23860 if idx != x0_1.Args[0] { 23861 break 23862 } 23863 if mem != x0.Args[2] { 23864 break 23865 } 23866 y1 := o5.Args[1] 23867 if y1.Op != OpARM64MOVDnop { 23868 break 23869 } 23870 x1 := y1.Args[0] 23871 if x1.Op != OpARM64MOVBUloadidx { 23872 break 23873 } 23874 _ = x1.Args[2] 23875 if ptr != x1.Args[0] { 23876 break 23877 } 23878 x1_1 := x1.Args[1] 23879 if x1_1.Op != OpARM64ADDconst { 23880 break 23881 } 23882 if x1_1.AuxInt != 6 { 23883 break 23884 } 23885 if idx != x1_1.Args[0] { 23886 break 23887 } 23888 if mem != x1.Args[2] { 23889 break 23890 } 23891 y2 := o4.Args[1] 23892 if y2.Op != OpARM64MOVDnop { 23893 break 23894 } 23895 x2 := y2.Args[0] 23896 if x2.Op != OpARM64MOVBUloadidx { 23897 break 23898 } 23899 _ = x2.Args[2] 23900 if ptr != x2.Args[0] { 23901 break 23902 } 23903 x2_1 := x2.Args[1] 23904 if x2_1.Op != OpARM64ADDconst { 23905 break 23906 } 23907 if x2_1.AuxInt != 5 { 23908 break 23909 } 23910 if idx != x2_1.Args[0] { 23911 break 23912 } 23913 if mem != x2.Args[2] { 23914 break 23915 } 23916 y3 := o3.Args[1] 23917 if y3.Op != OpARM64MOVDnop { 23918 break 23919 } 23920 x3 := y3.Args[0] 23921 if x3.Op != OpARM64MOVBUloadidx { 23922 break 23923 } 23924 _ = x3.Args[2] 23925 if ptr != x3.Args[0] { 23926 break 23927 } 23928 x3_1 := x3.Args[1] 23929 if x3_1.Op != OpARM64ADDconst { 23930 break 23931 } 23932 if x3_1.AuxInt != 4 { 23933 break 23934 } 23935 if idx != x3_1.Args[0] { 23936 break 23937 } 23938 if mem != x3.Args[2] { 23939 break 23940 } 23941 y4 := o2.Args[1] 23942 if y4.Op != OpARM64MOVDnop { 23943 break 23944 } 23945 x4 := y4.Args[0] 23946 if x4.Op != OpARM64MOVBUloadidx { 23947 break 23948 } 23949 _ = x4.Args[2] 23950 if ptr != x4.Args[0] { 23951 break 23952 } 23953 x4_1 := x4.Args[1] 23954 if x4_1.Op != OpARM64ADDconst { 23955 break 23956 } 23957 if x4_1.AuxInt != 3 { 23958 break 23959 } 23960 if idx != x4_1.Args[0] { 23961 break 23962 } 23963 if mem != x4.Args[2] { 23964 break 23965 } 23966 y5 := o1.Args[1] 23967 if y5.Op != OpARM64MOVDnop { 23968 break 23969 } 23970 x5 := y5.Args[0] 23971 if x5.Op != OpARM64MOVBUloadidx { 23972 break 23973 } 23974 _ = x5.Args[2] 23975 if ptr != x5.Args[0] { 23976 break 23977 } 23978 x5_1 := x5.Args[1] 23979 if x5_1.Op != OpARM64ADDconst { 23980 break 23981 } 23982 if x5_1.AuxInt != 2 { 23983 break 23984 } 23985 if idx != x5_1.Args[0] { 23986 break 23987 } 23988 if mem != x5.Args[2] { 23989 break 23990 } 23991 y6 := o0.Args[1] 23992 if y6.Op != OpARM64MOVDnop { 23993 break 23994 } 23995 x6 := y6.Args[0] 23996 if x6.Op != OpARM64MOVBUloadidx { 23997 break 23998 } 23999 _ = x6.Args[2] 24000 if ptr != x6.Args[0] { 24001 break 24002 } 24003 x6_1 := x6.Args[1] 24004 if x6_1.Op != OpARM64ADDconst { 24005 break 24006 } 24007 if x6_1.AuxInt != 1 { 24008 break 24009 } 24010 if idx != x6_1.Args[0] { 24011 break 24012 } 24013 if mem != x6.Args[2] { 24014 break 24015 } 24016 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 24017 break 24018 } 24019 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 24020 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 24021 v.reset(OpCopy) 24022 v.AddArg(v0) 24023 v0.AddArg(ptr) 24024 v0.AddArg(idx) 24025 v0.AddArg(mem) 24026 return true 24027 } 24028 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 24029 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24030 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24031 for { 24032 t := v.Type 24033 _ = v.Args[1] 24034 o0 := v.Args[0] 24035 if o0.Op != OpARM64ORshiftLL { 24036 break 24037 } 24038 if o0.AuxInt != 8 { 24039 break 24040 } 24041 _ = o0.Args[1] 24042 o1 := o0.Args[0] 24043 if o1.Op != OpARM64ORshiftLL { 24044 break 24045 } 24046 if o1.AuxInt != 16 { 24047 break 24048 } 24049 _ = o1.Args[1] 24050 s0 := o1.Args[0] 24051 if s0.Op != OpARM64SLLconst { 24052 break 24053 } 24054 if s0.AuxInt != 24 { 24055 break 24056 } 24057 y0 := s0.Args[0] 24058 if y0.Op != OpARM64MOVDnop { 24059 break 24060 } 24061 x0 := y0.Args[0] 24062 if x0.Op != OpARM64MOVBUload { 24063 break 24064 } 24065 i0 := x0.AuxInt 24066 s := x0.Aux 24067 _ = x0.Args[1] 24068 p := x0.Args[0] 24069 mem := x0.Args[1] 24070 y1 := o1.Args[1] 24071 if y1.Op != OpARM64MOVDnop { 24072 break 24073 } 24074 x1 := y1.Args[0] 24075 if x1.Op != OpARM64MOVBUload { 24076 break 24077 } 24078 i1 := x1.AuxInt 24079 if x1.Aux != s { 24080 break 24081 } 24082 _ = x1.Args[1] 24083 if p != x1.Args[0] { 24084 break 24085 } 24086 if mem != x1.Args[1] { 24087 break 24088 } 24089 y2 := o0.Args[1] 24090 if y2.Op != OpARM64MOVDnop { 24091 break 24092 } 24093 x2 := y2.Args[0] 24094 if x2.Op != OpARM64MOVBUload { 24095 break 24096 } 24097 i2 := x2.AuxInt 24098 if x2.Aux != s { 24099 break 24100 } 24101 _ = x2.Args[1] 24102 if p != x2.Args[0] { 24103 break 24104 } 24105 if mem != x2.Args[1] { 24106 break 24107 } 24108 y3 := v.Args[1] 24109 if y3.Op != OpARM64MOVDnop { 24110 break 24111 } 24112 x3 := y3.Args[0] 24113 if x3.Op != OpARM64MOVBUload { 24114 break 24115 } 24116 i3 := x3.AuxInt 24117 if x3.Aux != s { 24118 break 24119 } 24120 _ = x3.Args[1] 24121 if p != x3.Args[0] { 24122 break 24123 } 24124 if mem != x3.Args[1] { 24125 break 24126 } 24127 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24128 break 24129 } 24130 b = mergePoint(b, x0, x1, x2, x3) 24131 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24132 v.reset(OpCopy) 24133 v.AddArg(v0) 24134 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 24135 v1.Aux = s 24136 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 24137 v2.AuxInt = i0 24138 v2.AddArg(p) 24139 v1.AddArg(v2) 24140 v1.AddArg(mem) 24141 v0.AddArg(v1) 24142 return true 24143 } 24144 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 24145 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24146 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24147 for { 24148 t := v.Type 24149 _ = v.Args[1] 24150 y3 := v.Args[0] 24151 if y3.Op != OpARM64MOVDnop { 24152 break 24153 } 24154 x3 := y3.Args[0] 24155 if x3.Op != OpARM64MOVBUload { 24156 break 24157 } 24158 i3 := x3.AuxInt 24159 s := x3.Aux 24160 _ = x3.Args[1] 24161 p := x3.Args[0] 24162 mem := x3.Args[1] 24163 o0 := v.Args[1] 24164 if o0.Op != OpARM64ORshiftLL { 24165 break 24166 } 24167 if o0.AuxInt != 8 { 24168 break 24169 } 24170 _ = o0.Args[1] 24171 o1 := o0.Args[0] 24172 if o1.Op != OpARM64ORshiftLL { 24173 break 24174 } 24175 if o1.AuxInt != 16 { 24176 break 24177 } 24178 _ = o1.Args[1] 24179 s0 := o1.Args[0] 24180 if s0.Op != OpARM64SLLconst { 24181 break 24182 } 24183 if s0.AuxInt != 24 { 24184 break 24185 } 24186 y0 := s0.Args[0] 24187 if y0.Op != OpARM64MOVDnop { 24188 break 24189 } 24190 x0 := y0.Args[0] 24191 if x0.Op != OpARM64MOVBUload { 24192 break 24193 } 24194 i0 := x0.AuxInt 24195 if x0.Aux != s { 24196 break 24197 } 24198 _ = x0.Args[1] 24199 if p != x0.Args[0] { 24200 break 24201 } 24202 if mem != x0.Args[1] { 24203 break 24204 } 24205 y1 := o1.Args[1] 24206 if y1.Op != OpARM64MOVDnop { 24207 break 24208 } 24209 x1 := y1.Args[0] 24210 if x1.Op != OpARM64MOVBUload { 24211 break 24212 } 24213 i1 := x1.AuxInt 24214 if x1.Aux != s { 24215 break 24216 } 24217 _ = x1.Args[1] 24218 if p != x1.Args[0] { 24219 break 24220 } 24221 if mem != x1.Args[1] { 24222 break 24223 } 24224 y2 := o0.Args[1] 24225 if y2.Op != OpARM64MOVDnop { 24226 break 24227 } 24228 x2 := y2.Args[0] 24229 if x2.Op != OpARM64MOVBUload { 24230 break 24231 } 24232 i2 := x2.AuxInt 24233 if x2.Aux != s { 24234 break 24235 } 24236 _ = x2.Args[1] 24237 if p != x2.Args[0] { 24238 break 24239 } 24240 if mem != x2.Args[1] { 24241 break 24242 } 24243 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24244 break 24245 } 24246 b = mergePoint(b, x0, x1, x2, x3) 24247 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24248 v.reset(OpCopy) 24249 v.AddArg(v0) 24250 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 24251 v1.Aux = s 24252 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 24253 v2.AuxInt = i0 24254 v2.AddArg(p) 24255 v1.AddArg(v2) 24256 v1.AddArg(mem) 24257 v0.AddArg(v1) 24258 return true 24259 } 24260 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) 24261 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24262 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 24263 for { 24264 t := v.Type 24265 _ = v.Args[1] 24266 o0 := v.Args[0] 24267 if o0.Op != OpARM64ORshiftLL { 24268 break 24269 } 24270 if o0.AuxInt != 8 { 24271 break 24272 } 24273 _ = o0.Args[1] 24274 o1 := o0.Args[0] 24275 if o1.Op != OpARM64ORshiftLL { 24276 break 24277 } 24278 if o1.AuxInt != 16 { 24279 break 24280 } 24281 _ = o1.Args[1] 24282 s0 := o1.Args[0] 24283 if s0.Op != OpARM64SLLconst { 24284 break 24285 } 24286 if s0.AuxInt != 24 { 24287 break 24288 } 24289 y0 := s0.Args[0] 24290 if y0.Op != OpARM64MOVDnop { 24291 break 24292 } 24293 x0 := y0.Args[0] 24294 if x0.Op != OpARM64MOVBUloadidx { 24295 break 24296 } 24297 _ = x0.Args[2] 24298 ptr0 := x0.Args[0] 24299 idx0 := x0.Args[1] 24300 mem := x0.Args[2] 24301 y1 := o1.Args[1] 24302 if y1.Op != OpARM64MOVDnop { 24303 break 24304 } 24305 x1 := y1.Args[0] 24306 if x1.Op != OpARM64MOVBUload { 24307 break 24308 } 24309 if x1.AuxInt != 1 { 24310 break 24311 } 24312 s := x1.Aux 24313 _ = x1.Args[1] 24314 p1 := x1.Args[0] 24315 if p1.Op != OpARM64ADD { 24316 break 24317 } 24318 _ = p1.Args[1] 24319 ptr1 := p1.Args[0] 24320 idx1 := p1.Args[1] 24321 if mem != x1.Args[1] { 24322 break 24323 } 24324 y2 := o0.Args[1] 24325 if y2.Op != OpARM64MOVDnop { 24326 break 24327 } 24328 x2 := y2.Args[0] 24329 if x2.Op != OpARM64MOVBUload { 24330 break 24331 } 24332 if x2.AuxInt != 2 { 24333 break 24334 } 24335 if x2.Aux != s { 24336 break 24337 } 24338 _ = x2.Args[1] 24339 p := x2.Args[0] 24340 if mem != x2.Args[1] { 24341 break 24342 } 24343 y3 := v.Args[1] 24344 if y3.Op != OpARM64MOVDnop { 24345 break 24346 } 24347 x3 := y3.Args[0] 24348 if x3.Op != OpARM64MOVBUload { 24349 break 24350 } 24351 if x3.AuxInt != 3 { 24352 break 24353 } 24354 if x3.Aux != s { 24355 break 24356 } 24357 _ = x3.Args[1] 24358 if p != x3.Args[0] { 24359 break 24360 } 24361 if mem != x3.Args[1] { 24362 break 24363 } 24364 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24365 break 24366 } 24367 b = mergePoint(b, x0, x1, x2, x3) 24368 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24369 v.reset(OpCopy) 24370 v.AddArg(v0) 24371 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 24372 v1.AddArg(ptr0) 24373 v1.AddArg(idx0) 24374 v1.AddArg(mem) 24375 v0.AddArg(v1) 24376 return true 24377 } 24378 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem)))) 24379 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24380 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 24381 for { 24382 t := v.Type 24383 _ = v.Args[1] 24384 y3 := v.Args[0] 24385 if y3.Op != OpARM64MOVDnop { 24386 break 24387 } 24388 x3 := y3.Args[0] 24389 if x3.Op != OpARM64MOVBUload { 24390 break 24391 } 24392 if x3.AuxInt != 3 { 24393 break 24394 } 24395 s := x3.Aux 24396 _ = x3.Args[1] 24397 p := x3.Args[0] 24398 mem := x3.Args[1] 24399 o0 := v.Args[1] 24400 if o0.Op != OpARM64ORshiftLL { 24401 break 24402 } 24403 if o0.AuxInt != 8 { 24404 break 24405 } 24406 _ = o0.Args[1] 24407 o1 := o0.Args[0] 24408 if o1.Op != OpARM64ORshiftLL { 24409 break 24410 } 24411 if o1.AuxInt != 16 { 24412 break 24413 } 24414 _ = o1.Args[1] 24415 s0 := o1.Args[0] 24416 if s0.Op != OpARM64SLLconst { 24417 break 24418 } 24419 if s0.AuxInt != 24 { 24420 break 24421 } 24422 y0 := s0.Args[0] 24423 if y0.Op != OpARM64MOVDnop { 24424 break 24425 } 24426 x0 := y0.Args[0] 24427 if x0.Op != OpARM64MOVBUloadidx { 24428 break 24429 } 24430 _ = x0.Args[2] 24431 ptr0 := x0.Args[0] 24432 idx0 := x0.Args[1] 24433 if mem != x0.Args[2] { 24434 break 24435 } 24436 y1 := o1.Args[1] 24437 if y1.Op != OpARM64MOVDnop { 24438 break 24439 } 24440 x1 := y1.Args[0] 24441 if x1.Op != OpARM64MOVBUload { 24442 break 24443 } 24444 if x1.AuxInt != 1 { 24445 break 24446 } 24447 if x1.Aux != s { 24448 break 24449 } 24450 _ = x1.Args[1] 24451 p1 := x1.Args[0] 24452 if p1.Op != OpARM64ADD { 24453 break 24454 } 24455 _ = p1.Args[1] 24456 ptr1 := p1.Args[0] 24457 idx1 := p1.Args[1] 24458 if mem != x1.Args[1] { 24459 break 24460 } 24461 y2 := o0.Args[1] 24462 if y2.Op != OpARM64MOVDnop { 24463 break 24464 } 24465 x2 := y2.Args[0] 24466 if x2.Op != OpARM64MOVBUload { 24467 break 24468 } 24469 if x2.AuxInt != 2 { 24470 break 24471 } 24472 if x2.Aux != s { 24473 break 24474 } 24475 _ = x2.Args[1] 24476 if p != x2.Args[0] { 24477 break 24478 } 24479 if mem != x2.Args[1] { 24480 break 24481 } 24482 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24483 break 24484 } 24485 b = mergePoint(b, x0, x1, x2, x3) 24486 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24487 v.reset(OpCopy) 24488 v.AddArg(v0) 24489 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 24490 v1.AddArg(ptr0) 24491 v1.AddArg(idx0) 24492 v1.AddArg(mem) 24493 v0.AddArg(v1) 24494 return true 24495 } 24496 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) 24497 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24498 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 24499 for { 24500 t := v.Type 24501 _ = v.Args[1] 24502 o0 := v.Args[0] 24503 if o0.Op != OpARM64ORshiftLL { 24504 break 24505 } 24506 if o0.AuxInt != 8 { 24507 break 24508 } 24509 _ = o0.Args[1] 24510 o1 := o0.Args[0] 24511 if o1.Op != OpARM64ORshiftLL { 24512 break 24513 } 24514 if o1.AuxInt != 16 { 24515 break 24516 } 24517 _ = o1.Args[1] 24518 s0 := o1.Args[0] 24519 if s0.Op != OpARM64SLLconst { 24520 break 24521 } 24522 if s0.AuxInt != 24 { 24523 break 24524 } 24525 y0 := s0.Args[0] 24526 if y0.Op != OpARM64MOVDnop { 24527 break 24528 } 24529 x0 := y0.Args[0] 24530 if x0.Op != OpARM64MOVBUloadidx { 24531 break 24532 } 24533 _ = x0.Args[2] 24534 ptr := x0.Args[0] 24535 idx := x0.Args[1] 24536 mem := x0.Args[2] 24537 y1 := o1.Args[1] 24538 if y1.Op != OpARM64MOVDnop { 24539 break 24540 } 24541 x1 := y1.Args[0] 24542 if x1.Op != OpARM64MOVBUloadidx { 24543 break 24544 } 24545 _ = x1.Args[2] 24546 if ptr != x1.Args[0] { 24547 break 24548 } 24549 x1_1 := x1.Args[1] 24550 if x1_1.Op != OpARM64ADDconst { 24551 break 24552 } 24553 if x1_1.AuxInt != 1 { 24554 break 24555 } 24556 if idx != x1_1.Args[0] { 24557 break 24558 } 24559 if mem != x1.Args[2] { 24560 break 24561 } 24562 y2 := o0.Args[1] 24563 if y2.Op != OpARM64MOVDnop { 24564 break 24565 } 24566 x2 := y2.Args[0] 24567 if x2.Op != OpARM64MOVBUloadidx { 24568 break 24569 } 24570 _ = x2.Args[2] 24571 if ptr != x2.Args[0] { 24572 break 24573 } 24574 x2_1 := x2.Args[1] 24575 if x2_1.Op != OpARM64ADDconst { 24576 break 24577 } 24578 if x2_1.AuxInt != 2 { 24579 break 24580 } 24581 if idx != x2_1.Args[0] { 24582 break 24583 } 24584 if mem != x2.Args[2] { 24585 break 24586 } 24587 y3 := v.Args[1] 24588 if y3.Op != OpARM64MOVDnop { 24589 break 24590 } 24591 x3 := y3.Args[0] 24592 if x3.Op != OpARM64MOVBUloadidx { 24593 break 24594 } 24595 _ = x3.Args[2] 24596 if ptr != x3.Args[0] { 24597 break 24598 } 24599 x3_1 := x3.Args[1] 24600 if x3_1.Op != OpARM64ADDconst { 24601 break 24602 } 24603 if x3_1.AuxInt != 3 { 24604 break 24605 } 24606 if idx != x3_1.Args[0] { 24607 break 24608 } 24609 if mem != x3.Args[2] { 24610 break 24611 } 24612 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24613 break 24614 } 24615 b = mergePoint(b, x0, x1, x2, x3) 24616 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24617 v.reset(OpCopy) 24618 v.AddArg(v0) 24619 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 24620 v1.AddArg(ptr) 24621 v1.AddArg(idx) 24622 v1.AddArg(mem) 24623 v0.AddArg(v1) 24624 return true 24625 } 24626 return false 24627 } 24628 func rewriteValueARM64_OpARM64OR_40(v *Value) bool { 24629 b := v.Block 24630 _ = b 24631 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem)))) 24632 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24633 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 24634 for { 24635 t := v.Type 24636 _ = v.Args[1] 24637 y3 := v.Args[0] 24638 if y3.Op != OpARM64MOVDnop { 24639 break 24640 } 24641 x3 := y3.Args[0] 24642 if x3.Op != OpARM64MOVBUloadidx { 24643 break 24644 } 24645 _ = x3.Args[2] 24646 ptr := x3.Args[0] 24647 x3_1 := x3.Args[1] 24648 if x3_1.Op != OpARM64ADDconst { 24649 break 24650 } 24651 if x3_1.AuxInt != 3 { 24652 break 24653 } 24654 idx := x3_1.Args[0] 24655 mem := x3.Args[2] 24656 o0 := v.Args[1] 24657 if o0.Op != OpARM64ORshiftLL { 24658 break 24659 } 24660 if o0.AuxInt != 8 { 24661 break 24662 } 24663 _ = o0.Args[1] 24664 o1 := o0.Args[0] 24665 if o1.Op != OpARM64ORshiftLL { 24666 break 24667 } 24668 if o1.AuxInt != 16 { 24669 break 24670 } 24671 _ = o1.Args[1] 24672 s0 := o1.Args[0] 24673 if s0.Op != OpARM64SLLconst { 24674 break 24675 } 24676 if s0.AuxInt != 24 { 24677 break 24678 } 24679 y0 := s0.Args[0] 24680 if y0.Op != OpARM64MOVDnop { 24681 break 24682 } 24683 x0 := y0.Args[0] 24684 if x0.Op != OpARM64MOVBUloadidx { 24685 break 24686 } 24687 _ = x0.Args[2] 24688 if ptr != x0.Args[0] { 24689 break 24690 } 24691 if idx != x0.Args[1] { 24692 break 24693 } 24694 if mem != x0.Args[2] { 24695 break 24696 } 24697 y1 := o1.Args[1] 24698 if y1.Op != OpARM64MOVDnop { 24699 break 24700 } 24701 x1 := y1.Args[0] 24702 if x1.Op != OpARM64MOVBUloadidx { 24703 break 24704 } 24705 _ = x1.Args[2] 24706 if ptr != x1.Args[0] { 24707 break 24708 } 24709 x1_1 := x1.Args[1] 24710 if x1_1.Op != OpARM64ADDconst { 24711 break 24712 } 24713 if x1_1.AuxInt != 1 { 24714 break 24715 } 24716 if idx != x1_1.Args[0] { 24717 break 24718 } 24719 if mem != x1.Args[2] { 24720 break 24721 } 24722 y2 := o0.Args[1] 24723 if y2.Op != OpARM64MOVDnop { 24724 break 24725 } 24726 x2 := y2.Args[0] 24727 if x2.Op != OpARM64MOVBUloadidx { 24728 break 24729 } 24730 _ = x2.Args[2] 24731 if ptr != x2.Args[0] { 24732 break 24733 } 24734 x2_1 := x2.Args[1] 24735 if x2_1.Op != OpARM64ADDconst { 24736 break 24737 } 24738 if x2_1.AuxInt != 2 { 24739 break 24740 } 24741 if idx != x2_1.Args[0] { 24742 break 24743 } 24744 if mem != x2.Args[2] { 24745 break 24746 } 24747 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24748 break 24749 } 24750 b = mergePoint(b, x0, x1, x2, x3) 24751 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24752 v.reset(OpCopy) 24753 v.AddArg(v0) 24754 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 24755 v1.AddArg(ptr) 24756 v1.AddArg(idx) 24757 v1.AddArg(mem) 24758 v0.AddArg(v1) 24759 return true 24760 } 24761 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 24762 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 24763 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24764 for { 24765 t := v.Type 24766 _ = v.Args[1] 24767 o0 := v.Args[0] 24768 if o0.Op != OpARM64ORshiftLL { 24769 break 24770 } 24771 if o0.AuxInt != 8 { 24772 break 24773 } 24774 _ = o0.Args[1] 24775 o1 := o0.Args[0] 24776 if o1.Op != OpARM64ORshiftLL { 24777 break 24778 } 24779 if o1.AuxInt != 16 { 24780 break 24781 } 24782 _ = o1.Args[1] 24783 o2 := o1.Args[0] 24784 if o2.Op != OpARM64ORshiftLL { 24785 break 24786 } 24787 if o2.AuxInt != 24 { 24788 break 24789 } 24790 _ = o2.Args[1] 24791 o3 := o2.Args[0] 24792 if o3.Op != OpARM64ORshiftLL { 24793 break 24794 } 24795 if o3.AuxInt != 32 { 24796 break 24797 } 24798 _ = o3.Args[1] 24799 o4 := o3.Args[0] 24800 if o4.Op != OpARM64ORshiftLL { 24801 break 24802 } 24803 if o4.AuxInt != 40 { 24804 break 24805 } 24806 _ = o4.Args[1] 24807 o5 := o4.Args[0] 24808 if o5.Op != OpARM64ORshiftLL { 24809 break 24810 } 24811 if o5.AuxInt != 48 { 24812 break 24813 } 24814 _ = o5.Args[1] 24815 s0 := o5.Args[0] 24816 if s0.Op != OpARM64SLLconst { 24817 break 24818 } 24819 if s0.AuxInt != 56 { 24820 break 24821 } 24822 y0 := s0.Args[0] 24823 if y0.Op != OpARM64MOVDnop { 24824 break 24825 } 24826 x0 := y0.Args[0] 24827 if x0.Op != OpARM64MOVBUload { 24828 break 24829 } 24830 i0 := x0.AuxInt 24831 s := x0.Aux 24832 _ = x0.Args[1] 24833 p := x0.Args[0] 24834 mem := x0.Args[1] 24835 y1 := o5.Args[1] 24836 if y1.Op != OpARM64MOVDnop { 24837 break 24838 } 24839 x1 := y1.Args[0] 24840 if x1.Op != OpARM64MOVBUload { 24841 break 24842 } 24843 i1 := x1.AuxInt 24844 if x1.Aux != s { 24845 break 24846 } 24847 _ = x1.Args[1] 24848 if p != x1.Args[0] { 24849 break 24850 } 24851 if mem != x1.Args[1] { 24852 break 24853 } 24854 y2 := o4.Args[1] 24855 if y2.Op != OpARM64MOVDnop { 24856 break 24857 } 24858 x2 := y2.Args[0] 24859 if x2.Op != OpARM64MOVBUload { 24860 break 24861 } 24862 i2 := x2.AuxInt 24863 if x2.Aux != s { 24864 break 24865 } 24866 _ = x2.Args[1] 24867 if p != x2.Args[0] { 24868 break 24869 } 24870 if mem != x2.Args[1] { 24871 break 24872 } 24873 y3 := o3.Args[1] 24874 if y3.Op != OpARM64MOVDnop { 24875 break 24876 } 24877 x3 := y3.Args[0] 24878 if x3.Op != OpARM64MOVBUload { 24879 break 24880 } 24881 i3 := x3.AuxInt 24882 if x3.Aux != s { 24883 break 24884 } 24885 _ = x3.Args[1] 24886 if p != x3.Args[0] { 24887 break 24888 } 24889 if mem != x3.Args[1] { 24890 break 24891 } 24892 y4 := o2.Args[1] 24893 if y4.Op != OpARM64MOVDnop { 24894 break 24895 } 24896 x4 := y4.Args[0] 24897 if x4.Op != OpARM64MOVBUload { 24898 break 24899 } 24900 i4 := x4.AuxInt 24901 if x4.Aux != s { 24902 break 24903 } 24904 _ = x4.Args[1] 24905 if p != x4.Args[0] { 24906 break 24907 } 24908 if mem != x4.Args[1] { 24909 break 24910 } 24911 y5 := o1.Args[1] 24912 if y5.Op != OpARM64MOVDnop { 24913 break 24914 } 24915 x5 := y5.Args[0] 24916 if x5.Op != OpARM64MOVBUload { 24917 break 24918 } 24919 i5 := x5.AuxInt 24920 if x5.Aux != s { 24921 break 24922 } 24923 _ = x5.Args[1] 24924 if p != x5.Args[0] { 24925 break 24926 } 24927 if mem != x5.Args[1] { 24928 break 24929 } 24930 y6 := o0.Args[1] 24931 if y6.Op != OpARM64MOVDnop { 24932 break 24933 } 24934 x6 := y6.Args[0] 24935 if x6.Op != OpARM64MOVBUload { 24936 break 24937 } 24938 i6 := x6.AuxInt 24939 if x6.Aux != s { 24940 break 24941 } 24942 _ = x6.Args[1] 24943 if p != x6.Args[0] { 24944 break 24945 } 24946 if mem != x6.Args[1] { 24947 break 24948 } 24949 y7 := v.Args[1] 24950 if y7.Op != OpARM64MOVDnop { 24951 break 24952 } 24953 x7 := y7.Args[0] 24954 if x7.Op != OpARM64MOVBUload { 24955 break 24956 } 24957 i7 := x7.AuxInt 24958 if x7.Aux != s { 24959 break 24960 } 24961 _ = x7.Args[1] 24962 if p != x7.Args[0] { 24963 break 24964 } 24965 if mem != x7.Args[1] { 24966 break 24967 } 24968 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 24969 break 24970 } 24971 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 24972 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 24973 v.reset(OpCopy) 24974 v.AddArg(v0) 24975 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 24976 v1.Aux = s 24977 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 24978 v2.AuxInt = i0 24979 v2.AddArg(p) 24980 v1.AddArg(v2) 24981 v1.AddArg(mem) 24982 v0.AddArg(v1) 24983 return true 24984 } 24985 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 24986 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 24987 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24988 for { 24989 t := v.Type 24990 _ = v.Args[1] 24991 y7 := v.Args[0] 24992 if y7.Op != OpARM64MOVDnop { 24993 break 24994 } 24995 x7 := y7.Args[0] 24996 if x7.Op != OpARM64MOVBUload { 24997 break 24998 } 24999 i7 := x7.AuxInt 25000 s := x7.Aux 25001 _ = x7.Args[1] 25002 p := x7.Args[0] 25003 mem := x7.Args[1] 25004 o0 := v.Args[1] 25005 if o0.Op != OpARM64ORshiftLL { 25006 break 25007 } 25008 if o0.AuxInt != 8 { 25009 break 25010 } 25011 _ = o0.Args[1] 25012 o1 := o0.Args[0] 25013 if o1.Op != OpARM64ORshiftLL { 25014 break 25015 } 25016 if o1.AuxInt != 16 { 25017 break 25018 } 25019 _ = o1.Args[1] 25020 o2 := o1.Args[0] 25021 if o2.Op != OpARM64ORshiftLL { 25022 break 25023 } 25024 if o2.AuxInt != 24 { 25025 break 25026 } 25027 _ = o2.Args[1] 25028 o3 := o2.Args[0] 25029 if o3.Op != OpARM64ORshiftLL { 25030 break 25031 } 25032 if o3.AuxInt != 32 { 25033 break 25034 } 25035 _ = o3.Args[1] 25036 o4 := o3.Args[0] 25037 if o4.Op != OpARM64ORshiftLL { 25038 break 25039 } 25040 if o4.AuxInt != 40 { 25041 break 25042 } 25043 _ = o4.Args[1] 25044 o5 := o4.Args[0] 25045 if o5.Op != OpARM64ORshiftLL { 25046 break 25047 } 25048 if o5.AuxInt != 48 { 25049 break 25050 } 25051 _ = o5.Args[1] 25052 s0 := o5.Args[0] 25053 if s0.Op != OpARM64SLLconst { 25054 break 25055 } 25056 if s0.AuxInt != 56 { 25057 break 25058 } 25059 y0 := s0.Args[0] 25060 if y0.Op != OpARM64MOVDnop { 25061 break 25062 } 25063 x0 := y0.Args[0] 25064 if x0.Op != OpARM64MOVBUload { 25065 break 25066 } 25067 i0 := x0.AuxInt 25068 if x0.Aux != s { 25069 break 25070 } 25071 _ = x0.Args[1] 25072 if p != x0.Args[0] { 25073 break 25074 } 25075 if mem != x0.Args[1] { 25076 break 25077 } 25078 y1 := o5.Args[1] 25079 if y1.Op != OpARM64MOVDnop { 25080 break 25081 } 25082 x1 := y1.Args[0] 25083 if x1.Op != OpARM64MOVBUload { 25084 break 25085 } 25086 i1 := x1.AuxInt 25087 if x1.Aux != s { 25088 break 25089 } 25090 _ = x1.Args[1] 25091 if p != x1.Args[0] { 25092 break 25093 } 25094 if mem != x1.Args[1] { 25095 break 25096 } 25097 y2 := o4.Args[1] 25098 if y2.Op != OpARM64MOVDnop { 25099 break 25100 } 25101 x2 := y2.Args[0] 25102 if x2.Op != OpARM64MOVBUload { 25103 break 25104 } 25105 i2 := x2.AuxInt 25106 if x2.Aux != s { 25107 break 25108 } 25109 _ = x2.Args[1] 25110 if p != x2.Args[0] { 25111 break 25112 } 25113 if mem != x2.Args[1] { 25114 break 25115 } 25116 y3 := o3.Args[1] 25117 if y3.Op != OpARM64MOVDnop { 25118 break 25119 } 25120 x3 := y3.Args[0] 25121 if x3.Op != OpARM64MOVBUload { 25122 break 25123 } 25124 i3 := x3.AuxInt 25125 if x3.Aux != s { 25126 break 25127 } 25128 _ = x3.Args[1] 25129 if p != x3.Args[0] { 25130 break 25131 } 25132 if mem != x3.Args[1] { 25133 break 25134 } 25135 y4 := o2.Args[1] 25136 if y4.Op != OpARM64MOVDnop { 25137 break 25138 } 25139 x4 := y4.Args[0] 25140 if x4.Op != OpARM64MOVBUload { 25141 break 25142 } 25143 i4 := x4.AuxInt 25144 if x4.Aux != s { 25145 break 25146 } 25147 _ = x4.Args[1] 25148 if p != x4.Args[0] { 25149 break 25150 } 25151 if mem != x4.Args[1] { 25152 break 25153 } 25154 y5 := o1.Args[1] 25155 if y5.Op != OpARM64MOVDnop { 25156 break 25157 } 25158 x5 := y5.Args[0] 25159 if x5.Op != OpARM64MOVBUload { 25160 break 25161 } 25162 i5 := x5.AuxInt 25163 if x5.Aux != s { 25164 break 25165 } 25166 _ = x5.Args[1] 25167 if p != x5.Args[0] { 25168 break 25169 } 25170 if mem != x5.Args[1] { 25171 break 25172 } 25173 y6 := o0.Args[1] 25174 if y6.Op != OpARM64MOVDnop { 25175 break 25176 } 25177 x6 := y6.Args[0] 25178 if x6.Op != OpARM64MOVBUload { 25179 break 25180 } 25181 i6 := x6.AuxInt 25182 if x6.Aux != s { 25183 break 25184 } 25185 _ = x6.Args[1] 25186 if p != x6.Args[0] { 25187 break 25188 } 25189 if mem != x6.Args[1] { 25190 break 25191 } 25192 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25193 break 25194 } 25195 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25196 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 25197 v.reset(OpCopy) 25198 v.AddArg(v0) 25199 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 25200 v1.Aux = s 25201 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 25202 v2.AuxInt = i0 25203 v2.AddArg(p) 25204 v1.AddArg(v2) 25205 v1.AddArg(mem) 25206 v0.AddArg(v1) 25207 return true 25208 } 25209 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [7] {s} p mem))) 25210 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25211 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 25212 for { 25213 t := v.Type 25214 _ = v.Args[1] 25215 o0 := v.Args[0] 25216 if o0.Op != OpARM64ORshiftLL { 25217 break 25218 } 25219 if o0.AuxInt != 8 { 25220 break 25221 } 25222 _ = o0.Args[1] 25223 o1 := o0.Args[0] 25224 if o1.Op != OpARM64ORshiftLL { 25225 break 25226 } 25227 if o1.AuxInt != 16 { 25228 break 25229 } 25230 _ = o1.Args[1] 25231 o2 := o1.Args[0] 25232 if o2.Op != OpARM64ORshiftLL { 25233 break 25234 } 25235 if o2.AuxInt != 24 { 25236 break 25237 } 25238 _ = o2.Args[1] 25239 o3 := o2.Args[0] 25240 if o3.Op != OpARM64ORshiftLL { 25241 break 25242 } 25243 if o3.AuxInt != 32 { 25244 break 25245 } 25246 _ = o3.Args[1] 25247 o4 := o3.Args[0] 25248 if o4.Op != OpARM64ORshiftLL { 25249 break 25250 } 25251 if o4.AuxInt != 40 { 25252 break 25253 } 25254 _ = o4.Args[1] 25255 o5 := o4.Args[0] 25256 if o5.Op != OpARM64ORshiftLL { 25257 break 25258 } 25259 if o5.AuxInt != 48 { 25260 break 25261 } 25262 _ = o5.Args[1] 25263 s0 := o5.Args[0] 25264 if s0.Op != OpARM64SLLconst { 25265 break 25266 } 25267 if s0.AuxInt != 56 { 25268 break 25269 } 25270 y0 := s0.Args[0] 25271 if y0.Op != OpARM64MOVDnop { 25272 break 25273 } 25274 x0 := y0.Args[0] 25275 if x0.Op != OpARM64MOVBUloadidx { 25276 break 25277 } 25278 _ = x0.Args[2] 25279 ptr0 := x0.Args[0] 25280 idx0 := x0.Args[1] 25281 mem := x0.Args[2] 25282 y1 := o5.Args[1] 25283 if y1.Op != OpARM64MOVDnop { 25284 break 25285 } 25286 x1 := y1.Args[0] 25287 if x1.Op != OpARM64MOVBUload { 25288 break 25289 } 25290 if x1.AuxInt != 1 { 25291 break 25292 } 25293 s := x1.Aux 25294 _ = x1.Args[1] 25295 p1 := x1.Args[0] 25296 if p1.Op != OpARM64ADD { 25297 break 25298 } 25299 _ = p1.Args[1] 25300 ptr1 := p1.Args[0] 25301 idx1 := p1.Args[1] 25302 if mem != x1.Args[1] { 25303 break 25304 } 25305 y2 := o4.Args[1] 25306 if y2.Op != OpARM64MOVDnop { 25307 break 25308 } 25309 x2 := y2.Args[0] 25310 if x2.Op != OpARM64MOVBUload { 25311 break 25312 } 25313 if x2.AuxInt != 2 { 25314 break 25315 } 25316 if x2.Aux != s { 25317 break 25318 } 25319 _ = x2.Args[1] 25320 p := x2.Args[0] 25321 if mem != x2.Args[1] { 25322 break 25323 } 25324 y3 := o3.Args[1] 25325 if y3.Op != OpARM64MOVDnop { 25326 break 25327 } 25328 x3 := y3.Args[0] 25329 if x3.Op != OpARM64MOVBUload { 25330 break 25331 } 25332 if x3.AuxInt != 3 { 25333 break 25334 } 25335 if x3.Aux != s { 25336 break 25337 } 25338 _ = x3.Args[1] 25339 if p != x3.Args[0] { 25340 break 25341 } 25342 if mem != x3.Args[1] { 25343 break 25344 } 25345 y4 := o2.Args[1] 25346 if y4.Op != OpARM64MOVDnop { 25347 break 25348 } 25349 x4 := y4.Args[0] 25350 if x4.Op != OpARM64MOVBUload { 25351 break 25352 } 25353 if x4.AuxInt != 4 { 25354 break 25355 } 25356 if x4.Aux != s { 25357 break 25358 } 25359 _ = x4.Args[1] 25360 if p != x4.Args[0] { 25361 break 25362 } 25363 if mem != x4.Args[1] { 25364 break 25365 } 25366 y5 := o1.Args[1] 25367 if y5.Op != OpARM64MOVDnop { 25368 break 25369 } 25370 x5 := y5.Args[0] 25371 if x5.Op != OpARM64MOVBUload { 25372 break 25373 } 25374 if x5.AuxInt != 5 { 25375 break 25376 } 25377 if x5.Aux != s { 25378 break 25379 } 25380 _ = x5.Args[1] 25381 if p != x5.Args[0] { 25382 break 25383 } 25384 if mem != x5.Args[1] { 25385 break 25386 } 25387 y6 := o0.Args[1] 25388 if y6.Op != OpARM64MOVDnop { 25389 break 25390 } 25391 x6 := y6.Args[0] 25392 if x6.Op != OpARM64MOVBUload { 25393 break 25394 } 25395 if x6.AuxInt != 6 { 25396 break 25397 } 25398 if x6.Aux != s { 25399 break 25400 } 25401 _ = x6.Args[1] 25402 if p != x6.Args[0] { 25403 break 25404 } 25405 if mem != x6.Args[1] { 25406 break 25407 } 25408 y7 := v.Args[1] 25409 if y7.Op != OpARM64MOVDnop { 25410 break 25411 } 25412 x7 := y7.Args[0] 25413 if x7.Op != OpARM64MOVBUload { 25414 break 25415 } 25416 if x7.AuxInt != 7 { 25417 break 25418 } 25419 if x7.Aux != s { 25420 break 25421 } 25422 _ = x7.Args[1] 25423 if p != x7.Args[0] { 25424 break 25425 } 25426 if mem != x7.Args[1] { 25427 break 25428 } 25429 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25430 break 25431 } 25432 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25433 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 25434 v.reset(OpCopy) 25435 v.AddArg(v0) 25436 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 25437 v1.AddArg(ptr0) 25438 v1.AddArg(idx0) 25439 v1.AddArg(mem) 25440 v0.AddArg(v1) 25441 return true 25442 } 25443 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem)))) 25444 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25445 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 25446 for { 25447 t := v.Type 25448 _ = v.Args[1] 25449 y7 := v.Args[0] 25450 if y7.Op != OpARM64MOVDnop { 25451 break 25452 } 25453 x7 := y7.Args[0] 25454 if x7.Op != OpARM64MOVBUload { 25455 break 25456 } 25457 if x7.AuxInt != 7 { 25458 break 25459 } 25460 s := x7.Aux 25461 _ = x7.Args[1] 25462 p := x7.Args[0] 25463 mem := x7.Args[1] 25464 o0 := v.Args[1] 25465 if o0.Op != OpARM64ORshiftLL { 25466 break 25467 } 25468 if o0.AuxInt != 8 { 25469 break 25470 } 25471 _ = o0.Args[1] 25472 o1 := o0.Args[0] 25473 if o1.Op != OpARM64ORshiftLL { 25474 break 25475 } 25476 if o1.AuxInt != 16 { 25477 break 25478 } 25479 _ = o1.Args[1] 25480 o2 := o1.Args[0] 25481 if o2.Op != OpARM64ORshiftLL { 25482 break 25483 } 25484 if o2.AuxInt != 24 { 25485 break 25486 } 25487 _ = o2.Args[1] 25488 o3 := o2.Args[0] 25489 if o3.Op != OpARM64ORshiftLL { 25490 break 25491 } 25492 if o3.AuxInt != 32 { 25493 break 25494 } 25495 _ = o3.Args[1] 25496 o4 := o3.Args[0] 25497 if o4.Op != OpARM64ORshiftLL { 25498 break 25499 } 25500 if o4.AuxInt != 40 { 25501 break 25502 } 25503 _ = o4.Args[1] 25504 o5 := o4.Args[0] 25505 if o5.Op != OpARM64ORshiftLL { 25506 break 25507 } 25508 if o5.AuxInt != 48 { 25509 break 25510 } 25511 _ = o5.Args[1] 25512 s0 := o5.Args[0] 25513 if s0.Op != OpARM64SLLconst { 25514 break 25515 } 25516 if s0.AuxInt != 56 { 25517 break 25518 } 25519 y0 := s0.Args[0] 25520 if y0.Op != OpARM64MOVDnop { 25521 break 25522 } 25523 x0 := y0.Args[0] 25524 if x0.Op != OpARM64MOVBUloadidx { 25525 break 25526 } 25527 _ = x0.Args[2] 25528 ptr0 := x0.Args[0] 25529 idx0 := x0.Args[1] 25530 if mem != x0.Args[2] { 25531 break 25532 } 25533 y1 := o5.Args[1] 25534 if y1.Op != OpARM64MOVDnop { 25535 break 25536 } 25537 x1 := y1.Args[0] 25538 if x1.Op != OpARM64MOVBUload { 25539 break 25540 } 25541 if x1.AuxInt != 1 { 25542 break 25543 } 25544 if x1.Aux != s { 25545 break 25546 } 25547 _ = x1.Args[1] 25548 p1 := x1.Args[0] 25549 if p1.Op != OpARM64ADD { 25550 break 25551 } 25552 _ = p1.Args[1] 25553 ptr1 := p1.Args[0] 25554 idx1 := p1.Args[1] 25555 if mem != x1.Args[1] { 25556 break 25557 } 25558 y2 := o4.Args[1] 25559 if y2.Op != OpARM64MOVDnop { 25560 break 25561 } 25562 x2 := y2.Args[0] 25563 if x2.Op != OpARM64MOVBUload { 25564 break 25565 } 25566 if x2.AuxInt != 2 { 25567 break 25568 } 25569 if x2.Aux != s { 25570 break 25571 } 25572 _ = x2.Args[1] 25573 if p != x2.Args[0] { 25574 break 25575 } 25576 if mem != x2.Args[1] { 25577 break 25578 } 25579 y3 := o3.Args[1] 25580 if y3.Op != OpARM64MOVDnop { 25581 break 25582 } 25583 x3 := y3.Args[0] 25584 if x3.Op != OpARM64MOVBUload { 25585 break 25586 } 25587 if x3.AuxInt != 3 { 25588 break 25589 } 25590 if x3.Aux != s { 25591 break 25592 } 25593 _ = x3.Args[1] 25594 if p != x3.Args[0] { 25595 break 25596 } 25597 if mem != x3.Args[1] { 25598 break 25599 } 25600 y4 := o2.Args[1] 25601 if y4.Op != OpARM64MOVDnop { 25602 break 25603 } 25604 x4 := y4.Args[0] 25605 if x4.Op != OpARM64MOVBUload { 25606 break 25607 } 25608 if x4.AuxInt != 4 { 25609 break 25610 } 25611 if x4.Aux != s { 25612 break 25613 } 25614 _ = x4.Args[1] 25615 if p != x4.Args[0] { 25616 break 25617 } 25618 if mem != x4.Args[1] { 25619 break 25620 } 25621 y5 := o1.Args[1] 25622 if y5.Op != OpARM64MOVDnop { 25623 break 25624 } 25625 x5 := y5.Args[0] 25626 if x5.Op != OpARM64MOVBUload { 25627 break 25628 } 25629 if x5.AuxInt != 5 { 25630 break 25631 } 25632 if x5.Aux != s { 25633 break 25634 } 25635 _ = x5.Args[1] 25636 if p != x5.Args[0] { 25637 break 25638 } 25639 if mem != x5.Args[1] { 25640 break 25641 } 25642 y6 := o0.Args[1] 25643 if y6.Op != OpARM64MOVDnop { 25644 break 25645 } 25646 x6 := y6.Args[0] 25647 if x6.Op != OpARM64MOVBUload { 25648 break 25649 } 25650 if x6.AuxInt != 6 { 25651 break 25652 } 25653 if x6.Aux != s { 25654 break 25655 } 25656 _ = x6.Args[1] 25657 if p != x6.Args[0] { 25658 break 25659 } 25660 if mem != x6.Args[1] { 25661 break 25662 } 25663 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25664 break 25665 } 25666 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25667 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 25668 v.reset(OpCopy) 25669 v.AddArg(v0) 25670 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 25671 v1.AddArg(ptr0) 25672 v1.AddArg(idx0) 25673 v1.AddArg(mem) 25674 v0.AddArg(v1) 25675 return true 25676 } 25677 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) 25678 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25679 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem)) 25680 for { 25681 t := v.Type 25682 _ = v.Args[1] 25683 o0 := v.Args[0] 25684 if o0.Op != OpARM64ORshiftLL { 25685 break 25686 } 25687 if o0.AuxInt != 8 { 25688 break 25689 } 25690 _ = o0.Args[1] 25691 o1 := o0.Args[0] 25692 if o1.Op != OpARM64ORshiftLL { 25693 break 25694 } 25695 if o1.AuxInt != 16 { 25696 break 25697 } 25698 _ = o1.Args[1] 25699 o2 := o1.Args[0] 25700 if o2.Op != OpARM64ORshiftLL { 25701 break 25702 } 25703 if o2.AuxInt != 24 { 25704 break 25705 } 25706 _ = o2.Args[1] 25707 o3 := o2.Args[0] 25708 if o3.Op != OpARM64ORshiftLL { 25709 break 25710 } 25711 if o3.AuxInt != 32 { 25712 break 25713 } 25714 _ = o3.Args[1] 25715 o4 := o3.Args[0] 25716 if o4.Op != OpARM64ORshiftLL { 25717 break 25718 } 25719 if o4.AuxInt != 40 { 25720 break 25721 } 25722 _ = o4.Args[1] 25723 o5 := o4.Args[0] 25724 if o5.Op != OpARM64ORshiftLL { 25725 break 25726 } 25727 if o5.AuxInt != 48 { 25728 break 25729 } 25730 _ = o5.Args[1] 25731 s0 := o5.Args[0] 25732 if s0.Op != OpARM64SLLconst { 25733 break 25734 } 25735 if s0.AuxInt != 56 { 25736 break 25737 } 25738 y0 := s0.Args[0] 25739 if y0.Op != OpARM64MOVDnop { 25740 break 25741 } 25742 x0 := y0.Args[0] 25743 if x0.Op != OpARM64MOVBUloadidx { 25744 break 25745 } 25746 _ = x0.Args[2] 25747 ptr := x0.Args[0] 25748 idx := x0.Args[1] 25749 mem := x0.Args[2] 25750 y1 := o5.Args[1] 25751 if y1.Op != OpARM64MOVDnop { 25752 break 25753 } 25754 x1 := y1.Args[0] 25755 if x1.Op != OpARM64MOVBUloadidx { 25756 break 25757 } 25758 _ = x1.Args[2] 25759 if ptr != x1.Args[0] { 25760 break 25761 } 25762 x1_1 := x1.Args[1] 25763 if x1_1.Op != OpARM64ADDconst { 25764 break 25765 } 25766 if x1_1.AuxInt != 1 { 25767 break 25768 } 25769 if idx != x1_1.Args[0] { 25770 break 25771 } 25772 if mem != x1.Args[2] { 25773 break 25774 } 25775 y2 := o4.Args[1] 25776 if y2.Op != OpARM64MOVDnop { 25777 break 25778 } 25779 x2 := y2.Args[0] 25780 if x2.Op != OpARM64MOVBUloadidx { 25781 break 25782 } 25783 _ = x2.Args[2] 25784 if ptr != x2.Args[0] { 25785 break 25786 } 25787 x2_1 := x2.Args[1] 25788 if x2_1.Op != OpARM64ADDconst { 25789 break 25790 } 25791 if x2_1.AuxInt != 2 { 25792 break 25793 } 25794 if idx != x2_1.Args[0] { 25795 break 25796 } 25797 if mem != x2.Args[2] { 25798 break 25799 } 25800 y3 := o3.Args[1] 25801 if y3.Op != OpARM64MOVDnop { 25802 break 25803 } 25804 x3 := y3.Args[0] 25805 if x3.Op != OpARM64MOVBUloadidx { 25806 break 25807 } 25808 _ = x3.Args[2] 25809 if ptr != x3.Args[0] { 25810 break 25811 } 25812 x3_1 := x3.Args[1] 25813 if x3_1.Op != OpARM64ADDconst { 25814 break 25815 } 25816 if x3_1.AuxInt != 3 { 25817 break 25818 } 25819 if idx != x3_1.Args[0] { 25820 break 25821 } 25822 if mem != x3.Args[2] { 25823 break 25824 } 25825 y4 := o2.Args[1] 25826 if y4.Op != OpARM64MOVDnop { 25827 break 25828 } 25829 x4 := y4.Args[0] 25830 if x4.Op != OpARM64MOVBUloadidx { 25831 break 25832 } 25833 _ = x4.Args[2] 25834 if ptr != x4.Args[0] { 25835 break 25836 } 25837 x4_1 := x4.Args[1] 25838 if x4_1.Op != OpARM64ADDconst { 25839 break 25840 } 25841 if x4_1.AuxInt != 4 { 25842 break 25843 } 25844 if idx != x4_1.Args[0] { 25845 break 25846 } 25847 if mem != x4.Args[2] { 25848 break 25849 } 25850 y5 := o1.Args[1] 25851 if y5.Op != OpARM64MOVDnop { 25852 break 25853 } 25854 x5 := y5.Args[0] 25855 if x5.Op != OpARM64MOVBUloadidx { 25856 break 25857 } 25858 _ = x5.Args[2] 25859 if ptr != x5.Args[0] { 25860 break 25861 } 25862 x5_1 := x5.Args[1] 25863 if x5_1.Op != OpARM64ADDconst { 25864 break 25865 } 25866 if x5_1.AuxInt != 5 { 25867 break 25868 } 25869 if idx != x5_1.Args[0] { 25870 break 25871 } 25872 if mem != x5.Args[2] { 25873 break 25874 } 25875 y6 := o0.Args[1] 25876 if y6.Op != OpARM64MOVDnop { 25877 break 25878 } 25879 x6 := y6.Args[0] 25880 if x6.Op != OpARM64MOVBUloadidx { 25881 break 25882 } 25883 _ = x6.Args[2] 25884 if ptr != x6.Args[0] { 25885 break 25886 } 25887 x6_1 := x6.Args[1] 25888 if x6_1.Op != OpARM64ADDconst { 25889 break 25890 } 25891 if x6_1.AuxInt != 6 { 25892 break 25893 } 25894 if idx != x6_1.Args[0] { 25895 break 25896 } 25897 if mem != x6.Args[2] { 25898 break 25899 } 25900 y7 := v.Args[1] 25901 if y7.Op != OpARM64MOVDnop { 25902 break 25903 } 25904 x7 := y7.Args[0] 25905 if x7.Op != OpARM64MOVBUloadidx { 25906 break 25907 } 25908 _ = x7.Args[2] 25909 if ptr != x7.Args[0] { 25910 break 25911 } 25912 x7_1 := x7.Args[1] 25913 if x7_1.Op != OpARM64ADDconst { 25914 break 25915 } 25916 if x7_1.AuxInt != 7 { 25917 break 25918 } 25919 if idx != x7_1.Args[0] { 25920 break 25921 } 25922 if mem != x7.Args[2] { 25923 break 25924 } 25925 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25926 break 25927 } 25928 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25929 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 25930 v.reset(OpCopy) 25931 v.AddArg(v0) 25932 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 25933 v1.AddArg(ptr) 25934 v1.AddArg(idx) 25935 v1.AddArg(mem) 25936 v0.AddArg(v1) 25937 return true 25938 } 25939 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem)))) 25940 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25941 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem)) 25942 for { 25943 t := v.Type 25944 _ = v.Args[1] 25945 y7 := v.Args[0] 25946 if y7.Op != OpARM64MOVDnop { 25947 break 25948 } 25949 x7 := y7.Args[0] 25950 if x7.Op != OpARM64MOVBUloadidx { 25951 break 25952 } 25953 _ = x7.Args[2] 25954 ptr := x7.Args[0] 25955 x7_1 := x7.Args[1] 25956 if x7_1.Op != OpARM64ADDconst { 25957 break 25958 } 25959 if x7_1.AuxInt != 7 { 25960 break 25961 } 25962 idx := x7_1.Args[0] 25963 mem := x7.Args[2] 25964 o0 := v.Args[1] 25965 if o0.Op != OpARM64ORshiftLL { 25966 break 25967 } 25968 if o0.AuxInt != 8 { 25969 break 25970 } 25971 _ = o0.Args[1] 25972 o1 := o0.Args[0] 25973 if o1.Op != OpARM64ORshiftLL { 25974 break 25975 } 25976 if o1.AuxInt != 16 { 25977 break 25978 } 25979 _ = o1.Args[1] 25980 o2 := o1.Args[0] 25981 if o2.Op != OpARM64ORshiftLL { 25982 break 25983 } 25984 if o2.AuxInt != 24 { 25985 break 25986 } 25987 _ = o2.Args[1] 25988 o3 := o2.Args[0] 25989 if o3.Op != OpARM64ORshiftLL { 25990 break 25991 } 25992 if o3.AuxInt != 32 { 25993 break 25994 } 25995 _ = o3.Args[1] 25996 o4 := o3.Args[0] 25997 if o4.Op != OpARM64ORshiftLL { 25998 break 25999 } 26000 if o4.AuxInt != 40 { 26001 break 26002 } 26003 _ = o4.Args[1] 26004 o5 := o4.Args[0] 26005 if o5.Op != OpARM64ORshiftLL { 26006 break 26007 } 26008 if o5.AuxInt != 48 { 26009 break 26010 } 26011 _ = o5.Args[1] 26012 s0 := o5.Args[0] 26013 if s0.Op != OpARM64SLLconst { 26014 break 26015 } 26016 if s0.AuxInt != 56 { 26017 break 26018 } 26019 y0 := s0.Args[0] 26020 if y0.Op != OpARM64MOVDnop { 26021 break 26022 } 26023 x0 := y0.Args[0] 26024 if x0.Op != OpARM64MOVBUloadidx { 26025 break 26026 } 26027 _ = x0.Args[2] 26028 if ptr != x0.Args[0] { 26029 break 26030 } 26031 if idx != x0.Args[1] { 26032 break 26033 } 26034 if mem != x0.Args[2] { 26035 break 26036 } 26037 y1 := o5.Args[1] 26038 if y1.Op != OpARM64MOVDnop { 26039 break 26040 } 26041 x1 := y1.Args[0] 26042 if x1.Op != OpARM64MOVBUloadidx { 26043 break 26044 } 26045 _ = x1.Args[2] 26046 if ptr != x1.Args[0] { 26047 break 26048 } 26049 x1_1 := x1.Args[1] 26050 if x1_1.Op != OpARM64ADDconst { 26051 break 26052 } 26053 if x1_1.AuxInt != 1 { 26054 break 26055 } 26056 if idx != x1_1.Args[0] { 26057 break 26058 } 26059 if mem != x1.Args[2] { 26060 break 26061 } 26062 y2 := o4.Args[1] 26063 if y2.Op != OpARM64MOVDnop { 26064 break 26065 } 26066 x2 := y2.Args[0] 26067 if x2.Op != OpARM64MOVBUloadidx { 26068 break 26069 } 26070 _ = x2.Args[2] 26071 if ptr != x2.Args[0] { 26072 break 26073 } 26074 x2_1 := x2.Args[1] 26075 if x2_1.Op != OpARM64ADDconst { 26076 break 26077 } 26078 if x2_1.AuxInt != 2 { 26079 break 26080 } 26081 if idx != x2_1.Args[0] { 26082 break 26083 } 26084 if mem != x2.Args[2] { 26085 break 26086 } 26087 y3 := o3.Args[1] 26088 if y3.Op != OpARM64MOVDnop { 26089 break 26090 } 26091 x3 := y3.Args[0] 26092 if x3.Op != OpARM64MOVBUloadidx { 26093 break 26094 } 26095 _ = x3.Args[2] 26096 if ptr != x3.Args[0] { 26097 break 26098 } 26099 x3_1 := x3.Args[1] 26100 if x3_1.Op != OpARM64ADDconst { 26101 break 26102 } 26103 if x3_1.AuxInt != 3 { 26104 break 26105 } 26106 if idx != x3_1.Args[0] { 26107 break 26108 } 26109 if mem != x3.Args[2] { 26110 break 26111 } 26112 y4 := o2.Args[1] 26113 if y4.Op != OpARM64MOVDnop { 26114 break 26115 } 26116 x4 := y4.Args[0] 26117 if x4.Op != OpARM64MOVBUloadidx { 26118 break 26119 } 26120 _ = x4.Args[2] 26121 if ptr != x4.Args[0] { 26122 break 26123 } 26124 x4_1 := x4.Args[1] 26125 if x4_1.Op != OpARM64ADDconst { 26126 break 26127 } 26128 if x4_1.AuxInt != 4 { 26129 break 26130 } 26131 if idx != x4_1.Args[0] { 26132 break 26133 } 26134 if mem != x4.Args[2] { 26135 break 26136 } 26137 y5 := o1.Args[1] 26138 if y5.Op != OpARM64MOVDnop { 26139 break 26140 } 26141 x5 := y5.Args[0] 26142 if x5.Op != OpARM64MOVBUloadidx { 26143 break 26144 } 26145 _ = x5.Args[2] 26146 if ptr != x5.Args[0] { 26147 break 26148 } 26149 x5_1 := x5.Args[1] 26150 if x5_1.Op != OpARM64ADDconst { 26151 break 26152 } 26153 if x5_1.AuxInt != 5 { 26154 break 26155 } 26156 if idx != x5_1.Args[0] { 26157 break 26158 } 26159 if mem != x5.Args[2] { 26160 break 26161 } 26162 y6 := o0.Args[1] 26163 if y6.Op != OpARM64MOVDnop { 26164 break 26165 } 26166 x6 := y6.Args[0] 26167 if x6.Op != OpARM64MOVBUloadidx { 26168 break 26169 } 26170 _ = x6.Args[2] 26171 if ptr != x6.Args[0] { 26172 break 26173 } 26174 x6_1 := x6.Args[1] 26175 if x6_1.Op != OpARM64ADDconst { 26176 break 26177 } 26178 if x6_1.AuxInt != 6 { 26179 break 26180 } 26181 if idx != x6_1.Args[0] { 26182 break 26183 } 26184 if mem != x6.Args[2] { 26185 break 26186 } 26187 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 26188 break 26189 } 26190 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 26191 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 26192 v.reset(OpCopy) 26193 v.AddArg(v0) 26194 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 26195 v1.AddArg(ptr) 26196 v1.AddArg(idx) 26197 v1.AddArg(mem) 26198 v0.AddArg(v1) 26199 return true 26200 } 26201 return false 26202 } 26203 func rewriteValueARM64_OpARM64ORN_0(v *Value) bool { 26204 // match: (ORN x (MOVDconst [c])) 26205 // cond: 26206 // result: (ORconst [^c] x) 26207 for { 26208 _ = v.Args[1] 26209 x := v.Args[0] 26210 v_1 := v.Args[1] 26211 if v_1.Op != OpARM64MOVDconst { 26212 break 26213 } 26214 c := v_1.AuxInt 26215 v.reset(OpARM64ORconst) 26216 v.AuxInt = ^c 26217 v.AddArg(x) 26218 return true 26219 } 26220 // match: (ORN x x) 26221 // cond: 26222 // result: (MOVDconst [-1]) 26223 for { 26224 _ = v.Args[1] 26225 x := v.Args[0] 26226 if x != v.Args[1] { 26227 break 26228 } 26229 v.reset(OpARM64MOVDconst) 26230 v.AuxInt = -1 26231 return true 26232 } 26233 // match: (ORN x0 x1:(SLLconst [c] y)) 26234 // cond: clobberIfDead(x1) 26235 // result: (ORNshiftLL x0 y [c]) 26236 for { 26237 _ = v.Args[1] 26238 x0 := v.Args[0] 26239 x1 := v.Args[1] 26240 if x1.Op != OpARM64SLLconst { 26241 break 26242 } 26243 c := x1.AuxInt 26244 y := x1.Args[0] 26245 if !(clobberIfDead(x1)) { 26246 break 26247 } 26248 v.reset(OpARM64ORNshiftLL) 26249 v.AuxInt = c 26250 v.AddArg(x0) 26251 v.AddArg(y) 26252 return true 26253 } 26254 // match: (ORN x0 x1:(SRLconst [c] y)) 26255 // cond: clobberIfDead(x1) 26256 // result: (ORNshiftRL x0 y [c]) 26257 for { 26258 _ = v.Args[1] 26259 x0 := v.Args[0] 26260 x1 := v.Args[1] 26261 if x1.Op != OpARM64SRLconst { 26262 break 26263 } 26264 c := x1.AuxInt 26265 y := x1.Args[0] 26266 if !(clobberIfDead(x1)) { 26267 break 26268 } 26269 v.reset(OpARM64ORNshiftRL) 26270 v.AuxInt = c 26271 v.AddArg(x0) 26272 v.AddArg(y) 26273 return true 26274 } 26275 // match: (ORN x0 x1:(SRAconst [c] y)) 26276 // cond: clobberIfDead(x1) 26277 // result: (ORNshiftRA x0 y [c]) 26278 for { 26279 _ = v.Args[1] 26280 x0 := v.Args[0] 26281 x1 := v.Args[1] 26282 if x1.Op != OpARM64SRAconst { 26283 break 26284 } 26285 c := x1.AuxInt 26286 y := x1.Args[0] 26287 if !(clobberIfDead(x1)) { 26288 break 26289 } 26290 v.reset(OpARM64ORNshiftRA) 26291 v.AuxInt = c 26292 v.AddArg(x0) 26293 v.AddArg(y) 26294 return true 26295 } 26296 return false 26297 } 26298 func rewriteValueARM64_OpARM64ORNshiftLL_0(v *Value) bool { 26299 // match: (ORNshiftLL x (MOVDconst [c]) [d]) 26300 // cond: 26301 // result: (ORconst x [^int64(uint64(c)<<uint64(d))]) 26302 for { 26303 d := v.AuxInt 26304 _ = v.Args[1] 26305 x := v.Args[0] 26306 v_1 := v.Args[1] 26307 if v_1.Op != OpARM64MOVDconst { 26308 break 26309 } 26310 c := v_1.AuxInt 26311 v.reset(OpARM64ORconst) 26312 v.AuxInt = ^int64(uint64(c) << uint64(d)) 26313 v.AddArg(x) 26314 return true 26315 } 26316 // match: (ORNshiftLL x (SLLconst x [c]) [d]) 26317 // cond: c==d 26318 // result: (MOVDconst [-1]) 26319 for { 26320 d := v.AuxInt 26321 _ = v.Args[1] 26322 x := v.Args[0] 26323 v_1 := v.Args[1] 26324 if v_1.Op != OpARM64SLLconst { 26325 break 26326 } 26327 c := v_1.AuxInt 26328 if x != v_1.Args[0] { 26329 break 26330 } 26331 if !(c == d) { 26332 break 26333 } 26334 v.reset(OpARM64MOVDconst) 26335 v.AuxInt = -1 26336 return true 26337 } 26338 return false 26339 } 26340 func rewriteValueARM64_OpARM64ORNshiftRA_0(v *Value) bool { 26341 // match: (ORNshiftRA x (MOVDconst [c]) [d]) 26342 // cond: 26343 // result: (ORconst x [^(c>>uint64(d))]) 26344 for { 26345 d := v.AuxInt 26346 _ = v.Args[1] 26347 x := v.Args[0] 26348 v_1 := v.Args[1] 26349 if v_1.Op != OpARM64MOVDconst { 26350 break 26351 } 26352 c := v_1.AuxInt 26353 v.reset(OpARM64ORconst) 26354 v.AuxInt = ^(c >> uint64(d)) 26355 v.AddArg(x) 26356 return true 26357 } 26358 // match: (ORNshiftRA x (SRAconst x [c]) [d]) 26359 // cond: c==d 26360 // result: (MOVDconst [-1]) 26361 for { 26362 d := v.AuxInt 26363 _ = v.Args[1] 26364 x := v.Args[0] 26365 v_1 := v.Args[1] 26366 if v_1.Op != OpARM64SRAconst { 26367 break 26368 } 26369 c := v_1.AuxInt 26370 if x != v_1.Args[0] { 26371 break 26372 } 26373 if !(c == d) { 26374 break 26375 } 26376 v.reset(OpARM64MOVDconst) 26377 v.AuxInt = -1 26378 return true 26379 } 26380 return false 26381 } 26382 func rewriteValueARM64_OpARM64ORNshiftRL_0(v *Value) bool { 26383 // match: (ORNshiftRL x (MOVDconst [c]) [d]) 26384 // cond: 26385 // result: (ORconst x [^int64(uint64(c)>>uint64(d))]) 26386 for { 26387 d := v.AuxInt 26388 _ = v.Args[1] 26389 x := v.Args[0] 26390 v_1 := v.Args[1] 26391 if v_1.Op != OpARM64MOVDconst { 26392 break 26393 } 26394 c := v_1.AuxInt 26395 v.reset(OpARM64ORconst) 26396 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 26397 v.AddArg(x) 26398 return true 26399 } 26400 // match: (ORNshiftRL x (SRLconst x [c]) [d]) 26401 // cond: c==d 26402 // result: (MOVDconst [-1]) 26403 for { 26404 d := v.AuxInt 26405 _ = v.Args[1] 26406 x := v.Args[0] 26407 v_1 := v.Args[1] 26408 if v_1.Op != OpARM64SRLconst { 26409 break 26410 } 26411 c := v_1.AuxInt 26412 if x != v_1.Args[0] { 26413 break 26414 } 26415 if !(c == d) { 26416 break 26417 } 26418 v.reset(OpARM64MOVDconst) 26419 v.AuxInt = -1 26420 return true 26421 } 26422 return false 26423 } 26424 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 26425 // match: (ORconst [0] x) 26426 // cond: 26427 // result: x 26428 for { 26429 if v.AuxInt != 0 { 26430 break 26431 } 26432 x := v.Args[0] 26433 v.reset(OpCopy) 26434 v.Type = x.Type 26435 v.AddArg(x) 26436 return true 26437 } 26438 // match: (ORconst [-1] _) 26439 // cond: 26440 // result: (MOVDconst [-1]) 26441 for { 26442 if v.AuxInt != -1 { 26443 break 26444 } 26445 v.reset(OpARM64MOVDconst) 26446 v.AuxInt = -1 26447 return true 26448 } 26449 // match: (ORconst [c] (MOVDconst [d])) 26450 // cond: 26451 // result: (MOVDconst [c|d]) 26452 for { 26453 c := v.AuxInt 26454 v_0 := v.Args[0] 26455 if v_0.Op != OpARM64MOVDconst { 26456 break 26457 } 26458 d := v_0.AuxInt 26459 v.reset(OpARM64MOVDconst) 26460 v.AuxInt = c | d 26461 return true 26462 } 26463 // match: (ORconst [c] (ORconst [d] x)) 26464 // cond: 26465 // result: (ORconst [c|d] x) 26466 for { 26467 c := v.AuxInt 26468 v_0 := v.Args[0] 26469 if v_0.Op != OpARM64ORconst { 26470 break 26471 } 26472 d := v_0.AuxInt 26473 x := v_0.Args[0] 26474 v.reset(OpARM64ORconst) 26475 v.AuxInt = c | d 26476 v.AddArg(x) 26477 return true 26478 } 26479 // match: (ORconst [c1] (ANDconst [c2] x)) 26480 // cond: c2|c1 == ^0 26481 // result: (ORconst [c1] x) 26482 for { 26483 c1 := v.AuxInt 26484 v_0 := v.Args[0] 26485 if v_0.Op != OpARM64ANDconst { 26486 break 26487 } 26488 c2 := v_0.AuxInt 26489 x := v_0.Args[0] 26490 if !(c2|c1 == ^0) { 26491 break 26492 } 26493 v.reset(OpARM64ORconst) 26494 v.AuxInt = c1 26495 v.AddArg(x) 26496 return true 26497 } 26498 return false 26499 } 26500 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 26501 b := v.Block 26502 _ = b 26503 // match: (ORshiftLL (MOVDconst [c]) x [d]) 26504 // cond: 26505 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 26506 for { 26507 d := v.AuxInt 26508 _ = v.Args[1] 26509 v_0 := v.Args[0] 26510 if v_0.Op != OpARM64MOVDconst { 26511 break 26512 } 26513 c := v_0.AuxInt 26514 x := v.Args[1] 26515 v.reset(OpARM64ORconst) 26516 v.AuxInt = c 26517 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 26518 v0.AuxInt = d 26519 v0.AddArg(x) 26520 v.AddArg(v0) 26521 return true 26522 } 26523 // match: (ORshiftLL x (MOVDconst [c]) [d]) 26524 // cond: 26525 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 26526 for { 26527 d := v.AuxInt 26528 _ = v.Args[1] 26529 x := v.Args[0] 26530 v_1 := v.Args[1] 26531 if v_1.Op != OpARM64MOVDconst { 26532 break 26533 } 26534 c := v_1.AuxInt 26535 v.reset(OpARM64ORconst) 26536 v.AuxInt = int64(uint64(c) << uint64(d)) 26537 v.AddArg(x) 26538 return true 26539 } 26540 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 26541 // cond: c==d 26542 // result: y 26543 for { 26544 d := v.AuxInt 26545 _ = v.Args[1] 26546 x := v.Args[0] 26547 y := v.Args[1] 26548 if y.Op != OpARM64SLLconst { 26549 break 26550 } 26551 c := y.AuxInt 26552 if x != y.Args[0] { 26553 break 26554 } 26555 if !(c == d) { 26556 break 26557 } 26558 v.reset(OpCopy) 26559 v.Type = y.Type 26560 v.AddArg(y) 26561 return true 26562 } 26563 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 26564 // cond: 26565 // result: (RORconst [64-c] x) 26566 for { 26567 c := v.AuxInt 26568 _ = v.Args[1] 26569 v_0 := v.Args[0] 26570 if v_0.Op != OpARM64SRLconst { 26571 break 26572 } 26573 if v_0.AuxInt != 64-c { 26574 break 26575 } 26576 x := v_0.Args[0] 26577 if x != v.Args[1] { 26578 break 26579 } 26580 v.reset(OpARM64RORconst) 26581 v.AuxInt = 64 - c 26582 v.AddArg(x) 26583 return true 26584 } 26585 // match: (ORshiftLL <t> [c] (UBFX [bfc] x) x) 26586 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 26587 // result: (RORWconst [32-c] x) 26588 for { 26589 t := v.Type 26590 c := v.AuxInt 26591 _ = v.Args[1] 26592 v_0 := v.Args[0] 26593 if v_0.Op != OpARM64UBFX { 26594 break 26595 } 26596 bfc := v_0.AuxInt 26597 x := v_0.Args[0] 26598 if x != v.Args[1] { 26599 break 26600 } 26601 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 26602 break 26603 } 26604 v.reset(OpARM64RORWconst) 26605 v.AuxInt = 32 - c 26606 v.AddArg(x) 26607 return true 26608 } 26609 // match: (ORshiftLL [c] (SRLconst x [64-c]) x2) 26610 // cond: 26611 // result: (EXTRconst [64-c] x2 x) 26612 for { 26613 c := v.AuxInt 26614 _ = v.Args[1] 26615 v_0 := v.Args[0] 26616 if v_0.Op != OpARM64SRLconst { 26617 break 26618 } 26619 if v_0.AuxInt != 64-c { 26620 break 26621 } 26622 x := v_0.Args[0] 26623 x2 := v.Args[1] 26624 v.reset(OpARM64EXTRconst) 26625 v.AuxInt = 64 - c 26626 v.AddArg(x2) 26627 v.AddArg(x) 26628 return true 26629 } 26630 // match: (ORshiftLL <t> [c] (UBFX [bfc] x) x2) 26631 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 26632 // result: (EXTRWconst [32-c] x2 x) 26633 for { 26634 t := v.Type 26635 c := v.AuxInt 26636 _ = v.Args[1] 26637 v_0 := v.Args[0] 26638 if v_0.Op != OpARM64UBFX { 26639 break 26640 } 26641 bfc := v_0.AuxInt 26642 x := v_0.Args[0] 26643 x2 := v.Args[1] 26644 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 26645 break 26646 } 26647 v.reset(OpARM64EXTRWconst) 26648 v.AuxInt = 32 - c 26649 v.AddArg(x2) 26650 v.AddArg(x) 26651 return true 26652 } 26653 // match: (ORshiftLL [sc] (UBFX [bfc] x) (SRLconst [sc] y)) 26654 // cond: sc == getARM64BFwidth(bfc) 26655 // result: (BFXIL [bfc] y x) 26656 for { 26657 sc := v.AuxInt 26658 _ = v.Args[1] 26659 v_0 := v.Args[0] 26660 if v_0.Op != OpARM64UBFX { 26661 break 26662 } 26663 bfc := v_0.AuxInt 26664 x := v_0.Args[0] 26665 v_1 := v.Args[1] 26666 if v_1.Op != OpARM64SRLconst { 26667 break 26668 } 26669 if v_1.AuxInt != sc { 26670 break 26671 } 26672 y := v_1.Args[0] 26673 if !(sc == getARM64BFwidth(bfc)) { 26674 break 26675 } 26676 v.reset(OpARM64BFXIL) 26677 v.AuxInt = bfc 26678 v.AddArg(y) 26679 v.AddArg(x) 26680 return true 26681 } 26682 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 26683 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 26684 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 26685 for { 26686 t := v.Type 26687 if v.AuxInt != 8 { 26688 break 26689 } 26690 _ = v.Args[1] 26691 y0 := v.Args[0] 26692 if y0.Op != OpARM64MOVDnop { 26693 break 26694 } 26695 x0 := y0.Args[0] 26696 if x0.Op != OpARM64MOVBUload { 26697 break 26698 } 26699 i0 := x0.AuxInt 26700 s := x0.Aux 26701 _ = x0.Args[1] 26702 p := x0.Args[0] 26703 mem := x0.Args[1] 26704 y1 := v.Args[1] 26705 if y1.Op != OpARM64MOVDnop { 26706 break 26707 } 26708 x1 := y1.Args[0] 26709 if x1.Op != OpARM64MOVBUload { 26710 break 26711 } 26712 i1 := x1.AuxInt 26713 if x1.Aux != s { 26714 break 26715 } 26716 _ = x1.Args[1] 26717 if p != x1.Args[0] { 26718 break 26719 } 26720 if mem != x1.Args[1] { 26721 break 26722 } 26723 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 26724 break 26725 } 26726 b = mergePoint(b, x0, x1) 26727 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 26728 v.reset(OpCopy) 26729 v.AddArg(v0) 26730 v0.Aux = s 26731 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 26732 v1.AuxInt = i0 26733 v1.AddArg(p) 26734 v0.AddArg(v1) 26735 v0.AddArg(mem) 26736 return true 26737 } 26738 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) 26739 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 26740 // result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr0 idx0 mem) 26741 for { 26742 t := v.Type 26743 if v.AuxInt != 8 { 26744 break 26745 } 26746 _ = v.Args[1] 26747 y0 := v.Args[0] 26748 if y0.Op != OpARM64MOVDnop { 26749 break 26750 } 26751 x0 := y0.Args[0] 26752 if x0.Op != OpARM64MOVBUloadidx { 26753 break 26754 } 26755 _ = x0.Args[2] 26756 ptr0 := x0.Args[0] 26757 idx0 := x0.Args[1] 26758 mem := x0.Args[2] 26759 y1 := v.Args[1] 26760 if y1.Op != OpARM64MOVDnop { 26761 break 26762 } 26763 x1 := y1.Args[0] 26764 if x1.Op != OpARM64MOVBUload { 26765 break 26766 } 26767 if x1.AuxInt != 1 { 26768 break 26769 } 26770 s := x1.Aux 26771 _ = x1.Args[1] 26772 p1 := x1.Args[0] 26773 if p1.Op != OpARM64ADD { 26774 break 26775 } 26776 _ = p1.Args[1] 26777 ptr1 := p1.Args[0] 26778 idx1 := p1.Args[1] 26779 if mem != x1.Args[1] { 26780 break 26781 } 26782 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 26783 break 26784 } 26785 b = mergePoint(b, x0, x1) 26786 v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 26787 v.reset(OpCopy) 26788 v.AddArg(v0) 26789 v0.AddArg(ptr0) 26790 v0.AddArg(idx0) 26791 v0.AddArg(mem) 26792 return true 26793 } 26794 return false 26795 } 26796 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 26797 b := v.Block 26798 _ = b 26799 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) 26800 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 26801 // result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr idx mem) 26802 for { 26803 t := v.Type 26804 if v.AuxInt != 8 { 26805 break 26806 } 26807 _ = v.Args[1] 26808 y0 := v.Args[0] 26809 if y0.Op != OpARM64MOVDnop { 26810 break 26811 } 26812 x0 := y0.Args[0] 26813 if x0.Op != OpARM64MOVBUloadidx { 26814 break 26815 } 26816 _ = x0.Args[2] 26817 ptr := x0.Args[0] 26818 idx := x0.Args[1] 26819 mem := x0.Args[2] 26820 y1 := v.Args[1] 26821 if y1.Op != OpARM64MOVDnop { 26822 break 26823 } 26824 x1 := y1.Args[0] 26825 if x1.Op != OpARM64MOVBUloadidx { 26826 break 26827 } 26828 _ = x1.Args[2] 26829 if ptr != x1.Args[0] { 26830 break 26831 } 26832 x1_1 := x1.Args[1] 26833 if x1_1.Op != OpARM64ADDconst { 26834 break 26835 } 26836 if x1_1.AuxInt != 1 { 26837 break 26838 } 26839 if idx != x1_1.Args[0] { 26840 break 26841 } 26842 if mem != x1.Args[2] { 26843 break 26844 } 26845 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 26846 break 26847 } 26848 b = mergePoint(b, x0, x1) 26849 v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 26850 v.reset(OpCopy) 26851 v.AddArg(v0) 26852 v0.AddArg(ptr) 26853 v0.AddArg(idx) 26854 v0.AddArg(mem) 26855 return true 26856 } 26857 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 26858 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 26859 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 26860 for { 26861 t := v.Type 26862 if v.AuxInt != 24 { 26863 break 26864 } 26865 _ = v.Args[1] 26866 o0 := v.Args[0] 26867 if o0.Op != OpARM64ORshiftLL { 26868 break 26869 } 26870 if o0.AuxInt != 16 { 26871 break 26872 } 26873 _ = o0.Args[1] 26874 x0 := o0.Args[0] 26875 if x0.Op != OpARM64MOVHUload { 26876 break 26877 } 26878 i0 := x0.AuxInt 26879 s := x0.Aux 26880 _ = x0.Args[1] 26881 p := x0.Args[0] 26882 mem := x0.Args[1] 26883 y1 := o0.Args[1] 26884 if y1.Op != OpARM64MOVDnop { 26885 break 26886 } 26887 x1 := y1.Args[0] 26888 if x1.Op != OpARM64MOVBUload { 26889 break 26890 } 26891 i2 := x1.AuxInt 26892 if x1.Aux != s { 26893 break 26894 } 26895 _ = x1.Args[1] 26896 if p != x1.Args[0] { 26897 break 26898 } 26899 if mem != x1.Args[1] { 26900 break 26901 } 26902 y2 := v.Args[1] 26903 if y2.Op != OpARM64MOVDnop { 26904 break 26905 } 26906 x2 := y2.Args[0] 26907 if x2.Op != OpARM64MOVBUload { 26908 break 26909 } 26910 i3 := x2.AuxInt 26911 if x2.Aux != s { 26912 break 26913 } 26914 _ = x2.Args[1] 26915 if p != x2.Args[0] { 26916 break 26917 } 26918 if mem != x2.Args[1] { 26919 break 26920 } 26921 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 26922 break 26923 } 26924 b = mergePoint(b, x0, x1, x2) 26925 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 26926 v.reset(OpCopy) 26927 v.AddArg(v0) 26928 v0.Aux = s 26929 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 26930 v1.AuxInt = i0 26931 v1.AddArg(p) 26932 v0.AddArg(v1) 26933 v0.AddArg(mem) 26934 return true 26935 } 26936 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem))) 26937 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 26938 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 idx0 mem) 26939 for { 26940 t := v.Type 26941 if v.AuxInt != 24 { 26942 break 26943 } 26944 _ = v.Args[1] 26945 o0 := v.Args[0] 26946 if o0.Op != OpARM64ORshiftLL { 26947 break 26948 } 26949 if o0.AuxInt != 16 { 26950 break 26951 } 26952 _ = o0.Args[1] 26953 x0 := o0.Args[0] 26954 if x0.Op != OpARM64MOVHUloadidx { 26955 break 26956 } 26957 _ = x0.Args[2] 26958 ptr0 := x0.Args[0] 26959 idx0 := x0.Args[1] 26960 mem := x0.Args[2] 26961 y1 := o0.Args[1] 26962 if y1.Op != OpARM64MOVDnop { 26963 break 26964 } 26965 x1 := y1.Args[0] 26966 if x1.Op != OpARM64MOVBUload { 26967 break 26968 } 26969 if x1.AuxInt != 2 { 26970 break 26971 } 26972 s := x1.Aux 26973 _ = x1.Args[1] 26974 p1 := x1.Args[0] 26975 if p1.Op != OpARM64ADD { 26976 break 26977 } 26978 _ = p1.Args[1] 26979 ptr1 := p1.Args[0] 26980 idx1 := p1.Args[1] 26981 if mem != x1.Args[1] { 26982 break 26983 } 26984 y2 := v.Args[1] 26985 if y2.Op != OpARM64MOVDnop { 26986 break 26987 } 26988 x2 := y2.Args[0] 26989 if x2.Op != OpARM64MOVBUload { 26990 break 26991 } 26992 if x2.AuxInt != 3 { 26993 break 26994 } 26995 if x2.Aux != s { 26996 break 26997 } 26998 _ = x2.Args[1] 26999 p := x2.Args[0] 27000 if mem != x2.Args[1] { 27001 break 27002 } 27003 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 27004 break 27005 } 27006 b = mergePoint(b, x0, x1, x2) 27007 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 27008 v.reset(OpCopy) 27009 v.AddArg(v0) 27010 v0.AddArg(ptr0) 27011 v0.AddArg(idx0) 27012 v0.AddArg(mem) 27013 return true 27014 } 27015 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) 27016 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 27017 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr idx mem) 27018 for { 27019 t := v.Type 27020 if v.AuxInt != 24 { 27021 break 27022 } 27023 _ = v.Args[1] 27024 o0 := v.Args[0] 27025 if o0.Op != OpARM64ORshiftLL { 27026 break 27027 } 27028 if o0.AuxInt != 16 { 27029 break 27030 } 27031 _ = o0.Args[1] 27032 x0 := o0.Args[0] 27033 if x0.Op != OpARM64MOVHUloadidx { 27034 break 27035 } 27036 _ = x0.Args[2] 27037 ptr := x0.Args[0] 27038 idx := x0.Args[1] 27039 mem := x0.Args[2] 27040 y1 := o0.Args[1] 27041 if y1.Op != OpARM64MOVDnop { 27042 break 27043 } 27044 x1 := y1.Args[0] 27045 if x1.Op != OpARM64MOVBUloadidx { 27046 break 27047 } 27048 _ = x1.Args[2] 27049 if ptr != x1.Args[0] { 27050 break 27051 } 27052 x1_1 := x1.Args[1] 27053 if x1_1.Op != OpARM64ADDconst { 27054 break 27055 } 27056 if x1_1.AuxInt != 2 { 27057 break 27058 } 27059 if idx != x1_1.Args[0] { 27060 break 27061 } 27062 if mem != x1.Args[2] { 27063 break 27064 } 27065 y2 := v.Args[1] 27066 if y2.Op != OpARM64MOVDnop { 27067 break 27068 } 27069 x2 := y2.Args[0] 27070 if x2.Op != OpARM64MOVBUloadidx { 27071 break 27072 } 27073 _ = x2.Args[2] 27074 if ptr != x2.Args[0] { 27075 break 27076 } 27077 x2_1 := x2.Args[1] 27078 if x2_1.Op != OpARM64ADDconst { 27079 break 27080 } 27081 if x2_1.AuxInt != 3 { 27082 break 27083 } 27084 if idx != x2_1.Args[0] { 27085 break 27086 } 27087 if mem != x2.Args[2] { 27088 break 27089 } 27090 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 27091 break 27092 } 27093 b = mergePoint(b, x0, x1, x2) 27094 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 27095 v.reset(OpCopy) 27096 v.AddArg(v0) 27097 v0.AddArg(ptr) 27098 v0.AddArg(idx) 27099 v0.AddArg(mem) 27100 return true 27101 } 27102 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx2 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADDshiftLL [1] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem))) 27103 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 27104 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 (SLLconst <idx0.Type> [1] idx0) mem) 27105 for { 27106 t := v.Type 27107 if v.AuxInt != 24 { 27108 break 27109 } 27110 _ = v.Args[1] 27111 o0 := v.Args[0] 27112 if o0.Op != OpARM64ORshiftLL { 27113 break 27114 } 27115 if o0.AuxInt != 16 { 27116 break 27117 } 27118 _ = o0.Args[1] 27119 x0 := o0.Args[0] 27120 if x0.Op != OpARM64MOVHUloadidx2 { 27121 break 27122 } 27123 _ = x0.Args[2] 27124 ptr0 := x0.Args[0] 27125 idx0 := x0.Args[1] 27126 mem := x0.Args[2] 27127 y1 := o0.Args[1] 27128 if y1.Op != OpARM64MOVDnop { 27129 break 27130 } 27131 x1 := y1.Args[0] 27132 if x1.Op != OpARM64MOVBUload { 27133 break 27134 } 27135 if x1.AuxInt != 2 { 27136 break 27137 } 27138 s := x1.Aux 27139 _ = x1.Args[1] 27140 p1 := x1.Args[0] 27141 if p1.Op != OpARM64ADDshiftLL { 27142 break 27143 } 27144 if p1.AuxInt != 1 { 27145 break 27146 } 27147 _ = p1.Args[1] 27148 ptr1 := p1.Args[0] 27149 idx1 := p1.Args[1] 27150 if mem != x1.Args[1] { 27151 break 27152 } 27153 y2 := v.Args[1] 27154 if y2.Op != OpARM64MOVDnop { 27155 break 27156 } 27157 x2 := y2.Args[0] 27158 if x2.Op != OpARM64MOVBUload { 27159 break 27160 } 27161 if x2.AuxInt != 3 { 27162 break 27163 } 27164 if x2.Aux != s { 27165 break 27166 } 27167 _ = x2.Args[1] 27168 p := x2.Args[0] 27169 if mem != x2.Args[1] { 27170 break 27171 } 27172 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 27173 break 27174 } 27175 b = mergePoint(b, x0, x1, x2) 27176 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 27177 v.reset(OpCopy) 27178 v.AddArg(v0) 27179 v0.AddArg(ptr0) 27180 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, idx0.Type) 27181 v1.AuxInt = 1 27182 v1.AddArg(idx0) 27183 v0.AddArg(v1) 27184 v0.AddArg(mem) 27185 return true 27186 } 27187 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 27188 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27189 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 27190 for { 27191 t := v.Type 27192 if v.AuxInt != 56 { 27193 break 27194 } 27195 _ = v.Args[1] 27196 o0 := v.Args[0] 27197 if o0.Op != OpARM64ORshiftLL { 27198 break 27199 } 27200 if o0.AuxInt != 48 { 27201 break 27202 } 27203 _ = o0.Args[1] 27204 o1 := o0.Args[0] 27205 if o1.Op != OpARM64ORshiftLL { 27206 break 27207 } 27208 if o1.AuxInt != 40 { 27209 break 27210 } 27211 _ = o1.Args[1] 27212 o2 := o1.Args[0] 27213 if o2.Op != OpARM64ORshiftLL { 27214 break 27215 } 27216 if o2.AuxInt != 32 { 27217 break 27218 } 27219 _ = o2.Args[1] 27220 x0 := o2.Args[0] 27221 if x0.Op != OpARM64MOVWUload { 27222 break 27223 } 27224 i0 := x0.AuxInt 27225 s := x0.Aux 27226 _ = x0.Args[1] 27227 p := x0.Args[0] 27228 mem := x0.Args[1] 27229 y1 := o2.Args[1] 27230 if y1.Op != OpARM64MOVDnop { 27231 break 27232 } 27233 x1 := y1.Args[0] 27234 if x1.Op != OpARM64MOVBUload { 27235 break 27236 } 27237 i4 := x1.AuxInt 27238 if x1.Aux != s { 27239 break 27240 } 27241 _ = x1.Args[1] 27242 if p != x1.Args[0] { 27243 break 27244 } 27245 if mem != x1.Args[1] { 27246 break 27247 } 27248 y2 := o1.Args[1] 27249 if y2.Op != OpARM64MOVDnop { 27250 break 27251 } 27252 x2 := y2.Args[0] 27253 if x2.Op != OpARM64MOVBUload { 27254 break 27255 } 27256 i5 := x2.AuxInt 27257 if x2.Aux != s { 27258 break 27259 } 27260 _ = x2.Args[1] 27261 if p != x2.Args[0] { 27262 break 27263 } 27264 if mem != x2.Args[1] { 27265 break 27266 } 27267 y3 := o0.Args[1] 27268 if y3.Op != OpARM64MOVDnop { 27269 break 27270 } 27271 x3 := y3.Args[0] 27272 if x3.Op != OpARM64MOVBUload { 27273 break 27274 } 27275 i6 := x3.AuxInt 27276 if x3.Aux != s { 27277 break 27278 } 27279 _ = x3.Args[1] 27280 if p != x3.Args[0] { 27281 break 27282 } 27283 if mem != x3.Args[1] { 27284 break 27285 } 27286 y4 := v.Args[1] 27287 if y4.Op != OpARM64MOVDnop { 27288 break 27289 } 27290 x4 := y4.Args[0] 27291 if x4.Op != OpARM64MOVBUload { 27292 break 27293 } 27294 i7 := x4.AuxInt 27295 if x4.Aux != s { 27296 break 27297 } 27298 _ = x4.Args[1] 27299 if p != x4.Args[0] { 27300 break 27301 } 27302 if mem != x4.Args[1] { 27303 break 27304 } 27305 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27306 break 27307 } 27308 b = mergePoint(b, x0, x1, x2, x3, x4) 27309 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 27310 v.reset(OpCopy) 27311 v.AddArg(v0) 27312 v0.Aux = s 27313 v1 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 27314 v1.AuxInt = i0 27315 v1.AddArg(p) 27316 v0.AddArg(v1) 27317 v0.AddArg(mem) 27318 return true 27319 } 27320 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem))) 27321 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27322 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 idx0 mem) 27323 for { 27324 t := v.Type 27325 if v.AuxInt != 56 { 27326 break 27327 } 27328 _ = v.Args[1] 27329 o0 := v.Args[0] 27330 if o0.Op != OpARM64ORshiftLL { 27331 break 27332 } 27333 if o0.AuxInt != 48 { 27334 break 27335 } 27336 _ = o0.Args[1] 27337 o1 := o0.Args[0] 27338 if o1.Op != OpARM64ORshiftLL { 27339 break 27340 } 27341 if o1.AuxInt != 40 { 27342 break 27343 } 27344 _ = o1.Args[1] 27345 o2 := o1.Args[0] 27346 if o2.Op != OpARM64ORshiftLL { 27347 break 27348 } 27349 if o2.AuxInt != 32 { 27350 break 27351 } 27352 _ = o2.Args[1] 27353 x0 := o2.Args[0] 27354 if x0.Op != OpARM64MOVWUloadidx { 27355 break 27356 } 27357 _ = x0.Args[2] 27358 ptr0 := x0.Args[0] 27359 idx0 := x0.Args[1] 27360 mem := x0.Args[2] 27361 y1 := o2.Args[1] 27362 if y1.Op != OpARM64MOVDnop { 27363 break 27364 } 27365 x1 := y1.Args[0] 27366 if x1.Op != OpARM64MOVBUload { 27367 break 27368 } 27369 if x1.AuxInt != 4 { 27370 break 27371 } 27372 s := x1.Aux 27373 _ = x1.Args[1] 27374 p1 := x1.Args[0] 27375 if p1.Op != OpARM64ADD { 27376 break 27377 } 27378 _ = p1.Args[1] 27379 ptr1 := p1.Args[0] 27380 idx1 := p1.Args[1] 27381 if mem != x1.Args[1] { 27382 break 27383 } 27384 y2 := o1.Args[1] 27385 if y2.Op != OpARM64MOVDnop { 27386 break 27387 } 27388 x2 := y2.Args[0] 27389 if x2.Op != OpARM64MOVBUload { 27390 break 27391 } 27392 if x2.AuxInt != 5 { 27393 break 27394 } 27395 if x2.Aux != s { 27396 break 27397 } 27398 _ = x2.Args[1] 27399 p := x2.Args[0] 27400 if mem != x2.Args[1] { 27401 break 27402 } 27403 y3 := o0.Args[1] 27404 if y3.Op != OpARM64MOVDnop { 27405 break 27406 } 27407 x3 := y3.Args[0] 27408 if x3.Op != OpARM64MOVBUload { 27409 break 27410 } 27411 if x3.AuxInt != 6 { 27412 break 27413 } 27414 if x3.Aux != s { 27415 break 27416 } 27417 _ = x3.Args[1] 27418 if p != x3.Args[0] { 27419 break 27420 } 27421 if mem != x3.Args[1] { 27422 break 27423 } 27424 y4 := v.Args[1] 27425 if y4.Op != OpARM64MOVDnop { 27426 break 27427 } 27428 x4 := y4.Args[0] 27429 if x4.Op != OpARM64MOVBUload { 27430 break 27431 } 27432 if x4.AuxInt != 7 { 27433 break 27434 } 27435 if x4.Aux != s { 27436 break 27437 } 27438 _ = x4.Args[1] 27439 if p != x4.Args[0] { 27440 break 27441 } 27442 if mem != x4.Args[1] { 27443 break 27444 } 27445 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27446 break 27447 } 27448 b = mergePoint(b, x0, x1, x2, x3, x4) 27449 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 27450 v.reset(OpCopy) 27451 v.AddArg(v0) 27452 v0.AddArg(ptr0) 27453 v0.AddArg(idx0) 27454 v0.AddArg(mem) 27455 return true 27456 } 27457 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx4 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADDshiftLL [2] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem))) 27458 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27459 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 (SLLconst <idx0.Type> [2] idx0) mem) 27460 for { 27461 t := v.Type 27462 if v.AuxInt != 56 { 27463 break 27464 } 27465 _ = v.Args[1] 27466 o0 := v.Args[0] 27467 if o0.Op != OpARM64ORshiftLL { 27468 break 27469 } 27470 if o0.AuxInt != 48 { 27471 break 27472 } 27473 _ = o0.Args[1] 27474 o1 := o0.Args[0] 27475 if o1.Op != OpARM64ORshiftLL { 27476 break 27477 } 27478 if o1.AuxInt != 40 { 27479 break 27480 } 27481 _ = o1.Args[1] 27482 o2 := o1.Args[0] 27483 if o2.Op != OpARM64ORshiftLL { 27484 break 27485 } 27486 if o2.AuxInt != 32 { 27487 break 27488 } 27489 _ = o2.Args[1] 27490 x0 := o2.Args[0] 27491 if x0.Op != OpARM64MOVWUloadidx4 { 27492 break 27493 } 27494 _ = x0.Args[2] 27495 ptr0 := x0.Args[0] 27496 idx0 := x0.Args[1] 27497 mem := x0.Args[2] 27498 y1 := o2.Args[1] 27499 if y1.Op != OpARM64MOVDnop { 27500 break 27501 } 27502 x1 := y1.Args[0] 27503 if x1.Op != OpARM64MOVBUload { 27504 break 27505 } 27506 if x1.AuxInt != 4 { 27507 break 27508 } 27509 s := x1.Aux 27510 _ = x1.Args[1] 27511 p1 := x1.Args[0] 27512 if p1.Op != OpARM64ADDshiftLL { 27513 break 27514 } 27515 if p1.AuxInt != 2 { 27516 break 27517 } 27518 _ = p1.Args[1] 27519 ptr1 := p1.Args[0] 27520 idx1 := p1.Args[1] 27521 if mem != x1.Args[1] { 27522 break 27523 } 27524 y2 := o1.Args[1] 27525 if y2.Op != OpARM64MOVDnop { 27526 break 27527 } 27528 x2 := y2.Args[0] 27529 if x2.Op != OpARM64MOVBUload { 27530 break 27531 } 27532 if x2.AuxInt != 5 { 27533 break 27534 } 27535 if x2.Aux != s { 27536 break 27537 } 27538 _ = x2.Args[1] 27539 p := x2.Args[0] 27540 if mem != x2.Args[1] { 27541 break 27542 } 27543 y3 := o0.Args[1] 27544 if y3.Op != OpARM64MOVDnop { 27545 break 27546 } 27547 x3 := y3.Args[0] 27548 if x3.Op != OpARM64MOVBUload { 27549 break 27550 } 27551 if x3.AuxInt != 6 { 27552 break 27553 } 27554 if x3.Aux != s { 27555 break 27556 } 27557 _ = x3.Args[1] 27558 if p != x3.Args[0] { 27559 break 27560 } 27561 if mem != x3.Args[1] { 27562 break 27563 } 27564 y4 := v.Args[1] 27565 if y4.Op != OpARM64MOVDnop { 27566 break 27567 } 27568 x4 := y4.Args[0] 27569 if x4.Op != OpARM64MOVBUload { 27570 break 27571 } 27572 if x4.AuxInt != 7 { 27573 break 27574 } 27575 if x4.Aux != s { 27576 break 27577 } 27578 _ = x4.Args[1] 27579 if p != x4.Args[0] { 27580 break 27581 } 27582 if mem != x4.Args[1] { 27583 break 27584 } 27585 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27586 break 27587 } 27588 b = mergePoint(b, x0, x1, x2, x3, x4) 27589 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 27590 v.reset(OpCopy) 27591 v.AddArg(v0) 27592 v0.AddArg(ptr0) 27593 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, idx0.Type) 27594 v1.AuxInt = 2 27595 v1.AddArg(idx0) 27596 v0.AddArg(v1) 27597 v0.AddArg(mem) 27598 return true 27599 } 27600 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) 27601 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27602 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr idx mem) 27603 for { 27604 t := v.Type 27605 if v.AuxInt != 56 { 27606 break 27607 } 27608 _ = v.Args[1] 27609 o0 := v.Args[0] 27610 if o0.Op != OpARM64ORshiftLL { 27611 break 27612 } 27613 if o0.AuxInt != 48 { 27614 break 27615 } 27616 _ = o0.Args[1] 27617 o1 := o0.Args[0] 27618 if o1.Op != OpARM64ORshiftLL { 27619 break 27620 } 27621 if o1.AuxInt != 40 { 27622 break 27623 } 27624 _ = o1.Args[1] 27625 o2 := o1.Args[0] 27626 if o2.Op != OpARM64ORshiftLL { 27627 break 27628 } 27629 if o2.AuxInt != 32 { 27630 break 27631 } 27632 _ = o2.Args[1] 27633 x0 := o2.Args[0] 27634 if x0.Op != OpARM64MOVWUloadidx { 27635 break 27636 } 27637 _ = x0.Args[2] 27638 ptr := x0.Args[0] 27639 idx := x0.Args[1] 27640 mem := x0.Args[2] 27641 y1 := o2.Args[1] 27642 if y1.Op != OpARM64MOVDnop { 27643 break 27644 } 27645 x1 := y1.Args[0] 27646 if x1.Op != OpARM64MOVBUloadidx { 27647 break 27648 } 27649 _ = x1.Args[2] 27650 if ptr != x1.Args[0] { 27651 break 27652 } 27653 x1_1 := x1.Args[1] 27654 if x1_1.Op != OpARM64ADDconst { 27655 break 27656 } 27657 if x1_1.AuxInt != 4 { 27658 break 27659 } 27660 if idx != x1_1.Args[0] { 27661 break 27662 } 27663 if mem != x1.Args[2] { 27664 break 27665 } 27666 y2 := o1.Args[1] 27667 if y2.Op != OpARM64MOVDnop { 27668 break 27669 } 27670 x2 := y2.Args[0] 27671 if x2.Op != OpARM64MOVBUloadidx { 27672 break 27673 } 27674 _ = x2.Args[2] 27675 if ptr != x2.Args[0] { 27676 break 27677 } 27678 x2_1 := x2.Args[1] 27679 if x2_1.Op != OpARM64ADDconst { 27680 break 27681 } 27682 if x2_1.AuxInt != 5 { 27683 break 27684 } 27685 if idx != x2_1.Args[0] { 27686 break 27687 } 27688 if mem != x2.Args[2] { 27689 break 27690 } 27691 y3 := o0.Args[1] 27692 if y3.Op != OpARM64MOVDnop { 27693 break 27694 } 27695 x3 := y3.Args[0] 27696 if x3.Op != OpARM64MOVBUloadidx { 27697 break 27698 } 27699 _ = x3.Args[2] 27700 if ptr != x3.Args[0] { 27701 break 27702 } 27703 x3_1 := x3.Args[1] 27704 if x3_1.Op != OpARM64ADDconst { 27705 break 27706 } 27707 if x3_1.AuxInt != 6 { 27708 break 27709 } 27710 if idx != x3_1.Args[0] { 27711 break 27712 } 27713 if mem != x3.Args[2] { 27714 break 27715 } 27716 y4 := v.Args[1] 27717 if y4.Op != OpARM64MOVDnop { 27718 break 27719 } 27720 x4 := y4.Args[0] 27721 if x4.Op != OpARM64MOVBUloadidx { 27722 break 27723 } 27724 _ = x4.Args[2] 27725 if ptr != x4.Args[0] { 27726 break 27727 } 27728 x4_1 := x4.Args[1] 27729 if x4_1.Op != OpARM64ADDconst { 27730 break 27731 } 27732 if x4_1.AuxInt != 7 { 27733 break 27734 } 27735 if idx != x4_1.Args[0] { 27736 break 27737 } 27738 if mem != x4.Args[2] { 27739 break 27740 } 27741 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27742 break 27743 } 27744 b = mergePoint(b, x0, x1, x2, x3, x4) 27745 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 27746 v.reset(OpCopy) 27747 v.AddArg(v0) 27748 v0.AddArg(ptr) 27749 v0.AddArg(idx) 27750 v0.AddArg(mem) 27751 return true 27752 } 27753 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 27754 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 27755 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 27756 for { 27757 t := v.Type 27758 if v.AuxInt != 8 { 27759 break 27760 } 27761 _ = v.Args[1] 27762 y0 := v.Args[0] 27763 if y0.Op != OpARM64MOVDnop { 27764 break 27765 } 27766 x0 := y0.Args[0] 27767 if x0.Op != OpARM64MOVBUload { 27768 break 27769 } 27770 i1 := x0.AuxInt 27771 s := x0.Aux 27772 _ = x0.Args[1] 27773 p := x0.Args[0] 27774 mem := x0.Args[1] 27775 y1 := v.Args[1] 27776 if y1.Op != OpARM64MOVDnop { 27777 break 27778 } 27779 x1 := y1.Args[0] 27780 if x1.Op != OpARM64MOVBUload { 27781 break 27782 } 27783 i0 := x1.AuxInt 27784 if x1.Aux != s { 27785 break 27786 } 27787 _ = x1.Args[1] 27788 if p != x1.Args[0] { 27789 break 27790 } 27791 if mem != x1.Args[1] { 27792 break 27793 } 27794 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 27795 break 27796 } 27797 b = mergePoint(b, x0, x1) 27798 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 27799 v.reset(OpCopy) 27800 v.AddArg(v0) 27801 v1 := b.NewValue0(v.Pos, OpARM64MOVHUload, t) 27802 v1.AuxInt = i0 27803 v1.Aux = s 27804 v1.AddArg(p) 27805 v1.AddArg(mem) 27806 v0.AddArg(v1) 27807 return true 27808 } 27809 return false 27810 } 27811 func rewriteValueARM64_OpARM64ORshiftLL_20(v *Value) bool { 27812 b := v.Block 27813 _ = b 27814 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr0 idx0 mem))) 27815 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 27816 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr0 idx0 mem)) 27817 for { 27818 t := v.Type 27819 if v.AuxInt != 8 { 27820 break 27821 } 27822 _ = v.Args[1] 27823 y0 := v.Args[0] 27824 if y0.Op != OpARM64MOVDnop { 27825 break 27826 } 27827 x0 := y0.Args[0] 27828 if x0.Op != OpARM64MOVBUload { 27829 break 27830 } 27831 if x0.AuxInt != 1 { 27832 break 27833 } 27834 s := x0.Aux 27835 _ = x0.Args[1] 27836 p1 := x0.Args[0] 27837 if p1.Op != OpARM64ADD { 27838 break 27839 } 27840 _ = p1.Args[1] 27841 ptr1 := p1.Args[0] 27842 idx1 := p1.Args[1] 27843 mem := x0.Args[1] 27844 y1 := v.Args[1] 27845 if y1.Op != OpARM64MOVDnop { 27846 break 27847 } 27848 x1 := y1.Args[0] 27849 if x1.Op != OpARM64MOVBUloadidx { 27850 break 27851 } 27852 _ = x1.Args[2] 27853 ptr0 := x1.Args[0] 27854 idx0 := x1.Args[1] 27855 if mem != x1.Args[2] { 27856 break 27857 } 27858 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 27859 break 27860 } 27861 b = mergePoint(b, x0, x1) 27862 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 27863 v.reset(OpCopy) 27864 v.AddArg(v0) 27865 v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 27866 v1.AddArg(ptr0) 27867 v1.AddArg(idx0) 27868 v1.AddArg(mem) 27869 v0.AddArg(v1) 27870 return true 27871 } 27872 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [1] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr idx mem))) 27873 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 27874 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr idx mem)) 27875 for { 27876 t := v.Type 27877 if v.AuxInt != 8 { 27878 break 27879 } 27880 _ = v.Args[1] 27881 y0 := v.Args[0] 27882 if y0.Op != OpARM64MOVDnop { 27883 break 27884 } 27885 x0 := y0.Args[0] 27886 if x0.Op != OpARM64MOVBUloadidx { 27887 break 27888 } 27889 _ = x0.Args[2] 27890 ptr := x0.Args[0] 27891 x0_1 := x0.Args[1] 27892 if x0_1.Op != OpARM64ADDconst { 27893 break 27894 } 27895 if x0_1.AuxInt != 1 { 27896 break 27897 } 27898 idx := x0_1.Args[0] 27899 mem := x0.Args[2] 27900 y1 := v.Args[1] 27901 if y1.Op != OpARM64MOVDnop { 27902 break 27903 } 27904 x1 := y1.Args[0] 27905 if x1.Op != OpARM64MOVBUloadidx { 27906 break 27907 } 27908 _ = x1.Args[2] 27909 if ptr != x1.Args[0] { 27910 break 27911 } 27912 if idx != x1.Args[1] { 27913 break 27914 } 27915 if mem != x1.Args[2] { 27916 break 27917 } 27918 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 27919 break 27920 } 27921 b = mergePoint(b, x0, x1) 27922 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 27923 v.reset(OpCopy) 27924 v.AddArg(v0) 27925 v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 27926 v1.AddArg(ptr) 27927 v1.AddArg(idx) 27928 v1.AddArg(mem) 27929 v0.AddArg(v1) 27930 return true 27931 } 27932 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 27933 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 27934 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 27935 for { 27936 t := v.Type 27937 if v.AuxInt != 24 { 27938 break 27939 } 27940 _ = v.Args[1] 27941 o0 := v.Args[0] 27942 if o0.Op != OpARM64ORshiftLL { 27943 break 27944 } 27945 if o0.AuxInt != 16 { 27946 break 27947 } 27948 _ = o0.Args[1] 27949 y0 := o0.Args[0] 27950 if y0.Op != OpARM64REV16W { 27951 break 27952 } 27953 x0 := y0.Args[0] 27954 if x0.Op != OpARM64MOVHUload { 27955 break 27956 } 27957 i2 := x0.AuxInt 27958 s := x0.Aux 27959 _ = x0.Args[1] 27960 p := x0.Args[0] 27961 mem := x0.Args[1] 27962 y1 := o0.Args[1] 27963 if y1.Op != OpARM64MOVDnop { 27964 break 27965 } 27966 x1 := y1.Args[0] 27967 if x1.Op != OpARM64MOVBUload { 27968 break 27969 } 27970 i1 := x1.AuxInt 27971 if x1.Aux != s { 27972 break 27973 } 27974 _ = x1.Args[1] 27975 if p != x1.Args[0] { 27976 break 27977 } 27978 if mem != x1.Args[1] { 27979 break 27980 } 27981 y2 := v.Args[1] 27982 if y2.Op != OpARM64MOVDnop { 27983 break 27984 } 27985 x2 := y2.Args[0] 27986 if x2.Op != OpARM64MOVBUload { 27987 break 27988 } 27989 i0 := x2.AuxInt 27990 if x2.Aux != s { 27991 break 27992 } 27993 _ = x2.Args[1] 27994 if p != x2.Args[0] { 27995 break 27996 } 27997 if mem != x2.Args[1] { 27998 break 27999 } 28000 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 28001 break 28002 } 28003 b = mergePoint(b, x0, x1, x2) 28004 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 28005 v.reset(OpCopy) 28006 v.AddArg(v0) 28007 v1 := b.NewValue0(v.Pos, OpARM64MOVWUload, t) 28008 v1.Aux = s 28009 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 28010 v2.AuxInt = i0 28011 v2.AddArg(p) 28012 v1.AddArg(v2) 28013 v1.AddArg(mem) 28014 v0.AddArg(v1) 28015 return true 28016 } 28017 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr0 idx0 mem))) 28018 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 28019 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 28020 for { 28021 t := v.Type 28022 if v.AuxInt != 24 { 28023 break 28024 } 28025 _ = v.Args[1] 28026 o0 := v.Args[0] 28027 if o0.Op != OpARM64ORshiftLL { 28028 break 28029 } 28030 if o0.AuxInt != 16 { 28031 break 28032 } 28033 _ = o0.Args[1] 28034 y0 := o0.Args[0] 28035 if y0.Op != OpARM64REV16W { 28036 break 28037 } 28038 x0 := y0.Args[0] 28039 if x0.Op != OpARM64MOVHUload { 28040 break 28041 } 28042 if x0.AuxInt != 2 { 28043 break 28044 } 28045 s := x0.Aux 28046 _ = x0.Args[1] 28047 p := x0.Args[0] 28048 mem := x0.Args[1] 28049 y1 := o0.Args[1] 28050 if y1.Op != OpARM64MOVDnop { 28051 break 28052 } 28053 x1 := y1.Args[0] 28054 if x1.Op != OpARM64MOVBUload { 28055 break 28056 } 28057 if x1.AuxInt != 1 { 28058 break 28059 } 28060 if x1.Aux != s { 28061 break 28062 } 28063 _ = x1.Args[1] 28064 p1 := x1.Args[0] 28065 if p1.Op != OpARM64ADD { 28066 break 28067 } 28068 _ = p1.Args[1] 28069 ptr1 := p1.Args[0] 28070 idx1 := p1.Args[1] 28071 if mem != x1.Args[1] { 28072 break 28073 } 28074 y2 := v.Args[1] 28075 if y2.Op != OpARM64MOVDnop { 28076 break 28077 } 28078 x2 := y2.Args[0] 28079 if x2.Op != OpARM64MOVBUloadidx { 28080 break 28081 } 28082 _ = x2.Args[2] 28083 ptr0 := x2.Args[0] 28084 idx0 := x2.Args[1] 28085 if mem != x2.Args[2] { 28086 break 28087 } 28088 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 28089 break 28090 } 28091 b = mergePoint(b, x0, x1, x2) 28092 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 28093 v.reset(OpCopy) 28094 v.AddArg(v0) 28095 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 28096 v1.AddArg(ptr0) 28097 v1.AddArg(idx0) 28098 v1.AddArg(mem) 28099 v0.AddArg(v1) 28100 return true 28101 } 28102 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUloadidx ptr (ADDconst [2] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr idx mem))) 28103 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 28104 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 28105 for { 28106 t := v.Type 28107 if v.AuxInt != 24 { 28108 break 28109 } 28110 _ = v.Args[1] 28111 o0 := v.Args[0] 28112 if o0.Op != OpARM64ORshiftLL { 28113 break 28114 } 28115 if o0.AuxInt != 16 { 28116 break 28117 } 28118 _ = o0.Args[1] 28119 y0 := o0.Args[0] 28120 if y0.Op != OpARM64REV16W { 28121 break 28122 } 28123 x0 := y0.Args[0] 28124 if x0.Op != OpARM64MOVHUloadidx { 28125 break 28126 } 28127 _ = x0.Args[2] 28128 ptr := x0.Args[0] 28129 x0_1 := x0.Args[1] 28130 if x0_1.Op != OpARM64ADDconst { 28131 break 28132 } 28133 if x0_1.AuxInt != 2 { 28134 break 28135 } 28136 idx := x0_1.Args[0] 28137 mem := x0.Args[2] 28138 y1 := o0.Args[1] 28139 if y1.Op != OpARM64MOVDnop { 28140 break 28141 } 28142 x1 := y1.Args[0] 28143 if x1.Op != OpARM64MOVBUloadidx { 28144 break 28145 } 28146 _ = x1.Args[2] 28147 if ptr != x1.Args[0] { 28148 break 28149 } 28150 x1_1 := x1.Args[1] 28151 if x1_1.Op != OpARM64ADDconst { 28152 break 28153 } 28154 if x1_1.AuxInt != 1 { 28155 break 28156 } 28157 if idx != x1_1.Args[0] { 28158 break 28159 } 28160 if mem != x1.Args[2] { 28161 break 28162 } 28163 y2 := v.Args[1] 28164 if y2.Op != OpARM64MOVDnop { 28165 break 28166 } 28167 x2 := y2.Args[0] 28168 if x2.Op != OpARM64MOVBUloadidx { 28169 break 28170 } 28171 _ = x2.Args[2] 28172 if ptr != x2.Args[0] { 28173 break 28174 } 28175 if idx != x2.Args[1] { 28176 break 28177 } 28178 if mem != x2.Args[2] { 28179 break 28180 } 28181 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 28182 break 28183 } 28184 b = mergePoint(b, x0, x1, x2) 28185 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 28186 v.reset(OpCopy) 28187 v.AddArg(v0) 28188 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 28189 v1.AddArg(ptr) 28190 v1.AddArg(idx) 28191 v1.AddArg(mem) 28192 v0.AddArg(v1) 28193 return true 28194 } 28195 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 28196 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 28197 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 28198 for { 28199 t := v.Type 28200 if v.AuxInt != 56 { 28201 break 28202 } 28203 _ = v.Args[1] 28204 o0 := v.Args[0] 28205 if o0.Op != OpARM64ORshiftLL { 28206 break 28207 } 28208 if o0.AuxInt != 48 { 28209 break 28210 } 28211 _ = o0.Args[1] 28212 o1 := o0.Args[0] 28213 if o1.Op != OpARM64ORshiftLL { 28214 break 28215 } 28216 if o1.AuxInt != 40 { 28217 break 28218 } 28219 _ = o1.Args[1] 28220 o2 := o1.Args[0] 28221 if o2.Op != OpARM64ORshiftLL { 28222 break 28223 } 28224 if o2.AuxInt != 32 { 28225 break 28226 } 28227 _ = o2.Args[1] 28228 y0 := o2.Args[0] 28229 if y0.Op != OpARM64REVW { 28230 break 28231 } 28232 x0 := y0.Args[0] 28233 if x0.Op != OpARM64MOVWUload { 28234 break 28235 } 28236 i4 := x0.AuxInt 28237 s := x0.Aux 28238 _ = x0.Args[1] 28239 p := x0.Args[0] 28240 mem := x0.Args[1] 28241 y1 := o2.Args[1] 28242 if y1.Op != OpARM64MOVDnop { 28243 break 28244 } 28245 x1 := y1.Args[0] 28246 if x1.Op != OpARM64MOVBUload { 28247 break 28248 } 28249 i3 := x1.AuxInt 28250 if x1.Aux != s { 28251 break 28252 } 28253 _ = x1.Args[1] 28254 if p != x1.Args[0] { 28255 break 28256 } 28257 if mem != x1.Args[1] { 28258 break 28259 } 28260 y2 := o1.Args[1] 28261 if y2.Op != OpARM64MOVDnop { 28262 break 28263 } 28264 x2 := y2.Args[0] 28265 if x2.Op != OpARM64MOVBUload { 28266 break 28267 } 28268 i2 := x2.AuxInt 28269 if x2.Aux != s { 28270 break 28271 } 28272 _ = x2.Args[1] 28273 if p != x2.Args[0] { 28274 break 28275 } 28276 if mem != x2.Args[1] { 28277 break 28278 } 28279 y3 := o0.Args[1] 28280 if y3.Op != OpARM64MOVDnop { 28281 break 28282 } 28283 x3 := y3.Args[0] 28284 if x3.Op != OpARM64MOVBUload { 28285 break 28286 } 28287 i1 := x3.AuxInt 28288 if x3.Aux != s { 28289 break 28290 } 28291 _ = x3.Args[1] 28292 if p != x3.Args[0] { 28293 break 28294 } 28295 if mem != x3.Args[1] { 28296 break 28297 } 28298 y4 := v.Args[1] 28299 if y4.Op != OpARM64MOVDnop { 28300 break 28301 } 28302 x4 := y4.Args[0] 28303 if x4.Op != OpARM64MOVBUload { 28304 break 28305 } 28306 i0 := x4.AuxInt 28307 if x4.Aux != s { 28308 break 28309 } 28310 _ = x4.Args[1] 28311 if p != x4.Args[0] { 28312 break 28313 } 28314 if mem != x4.Args[1] { 28315 break 28316 } 28317 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 28318 break 28319 } 28320 b = mergePoint(b, x0, x1, x2, x3, x4) 28321 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 28322 v.reset(OpCopy) 28323 v.AddArg(v0) 28324 v1 := b.NewValue0(v.Pos, OpARM64MOVDload, t) 28325 v1.Aux = s 28326 v2 := b.NewValue0(v.Pos, OpOffPtr, p.Type) 28327 v2.AuxInt = i0 28328 v2.AddArg(p) 28329 v1.AddArg(v2) 28330 v1.AddArg(mem) 28331 v0.AddArg(v1) 28332 return true 28333 } 28334 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr0 idx0 mem))) 28335 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 28336 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 28337 for { 28338 t := v.Type 28339 if v.AuxInt != 56 { 28340 break 28341 } 28342 _ = v.Args[1] 28343 o0 := v.Args[0] 28344 if o0.Op != OpARM64ORshiftLL { 28345 break 28346 } 28347 if o0.AuxInt != 48 { 28348 break 28349 } 28350 _ = o0.Args[1] 28351 o1 := o0.Args[0] 28352 if o1.Op != OpARM64ORshiftLL { 28353 break 28354 } 28355 if o1.AuxInt != 40 { 28356 break 28357 } 28358 _ = o1.Args[1] 28359 o2 := o1.Args[0] 28360 if o2.Op != OpARM64ORshiftLL { 28361 break 28362 } 28363 if o2.AuxInt != 32 { 28364 break 28365 } 28366 _ = o2.Args[1] 28367 y0 := o2.Args[0] 28368 if y0.Op != OpARM64REVW { 28369 break 28370 } 28371 x0 := y0.Args[0] 28372 if x0.Op != OpARM64MOVWUload { 28373 break 28374 } 28375 if x0.AuxInt != 4 { 28376 break 28377 } 28378 s := x0.Aux 28379 _ = x0.Args[1] 28380 p := x0.Args[0] 28381 mem := x0.Args[1] 28382 y1 := o2.Args[1] 28383 if y1.Op != OpARM64MOVDnop { 28384 break 28385 } 28386 x1 := y1.Args[0] 28387 if x1.Op != OpARM64MOVBUload { 28388 break 28389 } 28390 if x1.AuxInt != 3 { 28391 break 28392 } 28393 if x1.Aux != s { 28394 break 28395 } 28396 _ = x1.Args[1] 28397 if p != x1.Args[0] { 28398 break 28399 } 28400 if mem != x1.Args[1] { 28401 break 28402 } 28403 y2 := o1.Args[1] 28404 if y2.Op != OpARM64MOVDnop { 28405 break 28406 } 28407 x2 := y2.Args[0] 28408 if x2.Op != OpARM64MOVBUload { 28409 break 28410 } 28411 if x2.AuxInt != 2 { 28412 break 28413 } 28414 if x2.Aux != s { 28415 break 28416 } 28417 _ = x2.Args[1] 28418 if p != x2.Args[0] { 28419 break 28420 } 28421 if mem != x2.Args[1] { 28422 break 28423 } 28424 y3 := o0.Args[1] 28425 if y3.Op != OpARM64MOVDnop { 28426 break 28427 } 28428 x3 := y3.Args[0] 28429 if x3.Op != OpARM64MOVBUload { 28430 break 28431 } 28432 if x3.AuxInt != 1 { 28433 break 28434 } 28435 if x3.Aux != s { 28436 break 28437 } 28438 _ = x3.Args[1] 28439 p1 := x3.Args[0] 28440 if p1.Op != OpARM64ADD { 28441 break 28442 } 28443 _ = p1.Args[1] 28444 ptr1 := p1.Args[0] 28445 idx1 := p1.Args[1] 28446 if mem != x3.Args[1] { 28447 break 28448 } 28449 y4 := v.Args[1] 28450 if y4.Op != OpARM64MOVDnop { 28451 break 28452 } 28453 x4 := y4.Args[0] 28454 if x4.Op != OpARM64MOVBUloadidx { 28455 break 28456 } 28457 _ = x4.Args[2] 28458 ptr0 := x4.Args[0] 28459 idx0 := x4.Args[1] 28460 if mem != x4.Args[2] { 28461 break 28462 } 28463 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 28464 break 28465 } 28466 b = mergePoint(b, x0, x1, x2, x3, x4) 28467 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 28468 v.reset(OpCopy) 28469 v.AddArg(v0) 28470 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 28471 v1.AddArg(ptr0) 28472 v1.AddArg(idx0) 28473 v1.AddArg(mem) 28474 v0.AddArg(v1) 28475 return true 28476 } 28477 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUloadidx ptr (ADDconst [4] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr idx mem))) 28478 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 28479 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr idx mem)) 28480 for { 28481 t := v.Type 28482 if v.AuxInt != 56 { 28483 break 28484 } 28485 _ = v.Args[1] 28486 o0 := v.Args[0] 28487 if o0.Op != OpARM64ORshiftLL { 28488 break 28489 } 28490 if o0.AuxInt != 48 { 28491 break 28492 } 28493 _ = o0.Args[1] 28494 o1 := o0.Args[0] 28495 if o1.Op != OpARM64ORshiftLL { 28496 break 28497 } 28498 if o1.AuxInt != 40 { 28499 break 28500 } 28501 _ = o1.Args[1] 28502 o2 := o1.Args[0] 28503 if o2.Op != OpARM64ORshiftLL { 28504 break 28505 } 28506 if o2.AuxInt != 32 { 28507 break 28508 } 28509 _ = o2.Args[1] 28510 y0 := o2.Args[0] 28511 if y0.Op != OpARM64REVW { 28512 break 28513 } 28514 x0 := y0.Args[0] 28515 if x0.Op != OpARM64MOVWUloadidx { 28516 break 28517 } 28518 _ = x0.Args[2] 28519 ptr := x0.Args[0] 28520 x0_1 := x0.Args[1] 28521 if x0_1.Op != OpARM64ADDconst { 28522 break 28523 } 28524 if x0_1.AuxInt != 4 { 28525 break 28526 } 28527 idx := x0_1.Args[0] 28528 mem := x0.Args[2] 28529 y1 := o2.Args[1] 28530 if y1.Op != OpARM64MOVDnop { 28531 break 28532 } 28533 x1 := y1.Args[0] 28534 if x1.Op != OpARM64MOVBUloadidx { 28535 break 28536 } 28537 _ = x1.Args[2] 28538 if ptr != x1.Args[0] { 28539 break 28540 } 28541 x1_1 := x1.Args[1] 28542 if x1_1.Op != OpARM64ADDconst { 28543 break 28544 } 28545 if x1_1.AuxInt != 3 { 28546 break 28547 } 28548 if idx != x1_1.Args[0] { 28549 break 28550 } 28551 if mem != x1.Args[2] { 28552 break 28553 } 28554 y2 := o1.Args[1] 28555 if y2.Op != OpARM64MOVDnop { 28556 break 28557 } 28558 x2 := y2.Args[0] 28559 if x2.Op != OpARM64MOVBUloadidx { 28560 break 28561 } 28562 _ = x2.Args[2] 28563 if ptr != x2.Args[0] { 28564 break 28565 } 28566 x2_1 := x2.Args[1] 28567 if x2_1.Op != OpARM64ADDconst { 28568 break 28569 } 28570 if x2_1.AuxInt != 2 { 28571 break 28572 } 28573 if idx != x2_1.Args[0] { 28574 break 28575 } 28576 if mem != x2.Args[2] { 28577 break 28578 } 28579 y3 := o0.Args[1] 28580 if y3.Op != OpARM64MOVDnop { 28581 break 28582 } 28583 x3 := y3.Args[0] 28584 if x3.Op != OpARM64MOVBUloadidx { 28585 break 28586 } 28587 _ = x3.Args[2] 28588 if ptr != x3.Args[0] { 28589 break 28590 } 28591 x3_1 := x3.Args[1] 28592 if x3_1.Op != OpARM64ADDconst { 28593 break 28594 } 28595 if x3_1.AuxInt != 1 { 28596 break 28597 } 28598 if idx != x3_1.Args[0] { 28599 break 28600 } 28601 if mem != x3.Args[2] { 28602 break 28603 } 28604 y4 := v.Args[1] 28605 if y4.Op != OpARM64MOVDnop { 28606 break 28607 } 28608 x4 := y4.Args[0] 28609 if x4.Op != OpARM64MOVBUloadidx { 28610 break 28611 } 28612 _ = x4.Args[2] 28613 if ptr != x4.Args[0] { 28614 break 28615 } 28616 if idx != x4.Args[1] { 28617 break 28618 } 28619 if mem != x4.Args[2] { 28620 break 28621 } 28622 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 28623 break 28624 } 28625 b = mergePoint(b, x0, x1, x2, x3, x4) 28626 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 28627 v.reset(OpCopy) 28628 v.AddArg(v0) 28629 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 28630 v1.AddArg(ptr) 28631 v1.AddArg(idx) 28632 v1.AddArg(mem) 28633 v0.AddArg(v1) 28634 return true 28635 } 28636 return false 28637 } 28638 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 28639 b := v.Block 28640 _ = b 28641 // match: (ORshiftRA (MOVDconst [c]) x [d]) 28642 // cond: 28643 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 28644 for { 28645 d := v.AuxInt 28646 _ = v.Args[1] 28647 v_0 := v.Args[0] 28648 if v_0.Op != OpARM64MOVDconst { 28649 break 28650 } 28651 c := v_0.AuxInt 28652 x := v.Args[1] 28653 v.reset(OpARM64ORconst) 28654 v.AuxInt = c 28655 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 28656 v0.AuxInt = d 28657 v0.AddArg(x) 28658 v.AddArg(v0) 28659 return true 28660 } 28661 // match: (ORshiftRA x (MOVDconst [c]) [d]) 28662 // cond: 28663 // result: (ORconst x [c>>uint64(d)]) 28664 for { 28665 d := v.AuxInt 28666 _ = v.Args[1] 28667 x := v.Args[0] 28668 v_1 := v.Args[1] 28669 if v_1.Op != OpARM64MOVDconst { 28670 break 28671 } 28672 c := v_1.AuxInt 28673 v.reset(OpARM64ORconst) 28674 v.AuxInt = c >> uint64(d) 28675 v.AddArg(x) 28676 return true 28677 } 28678 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 28679 // cond: c==d 28680 // result: y 28681 for { 28682 d := v.AuxInt 28683 _ = v.Args[1] 28684 x := v.Args[0] 28685 y := v.Args[1] 28686 if y.Op != OpARM64SRAconst { 28687 break 28688 } 28689 c := y.AuxInt 28690 if x != y.Args[0] { 28691 break 28692 } 28693 if !(c == d) { 28694 break 28695 } 28696 v.reset(OpCopy) 28697 v.Type = y.Type 28698 v.AddArg(y) 28699 return true 28700 } 28701 return false 28702 } 28703 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 28704 b := v.Block 28705 _ = b 28706 // match: (ORshiftRL (MOVDconst [c]) x [d]) 28707 // cond: 28708 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 28709 for { 28710 d := v.AuxInt 28711 _ = v.Args[1] 28712 v_0 := v.Args[0] 28713 if v_0.Op != OpARM64MOVDconst { 28714 break 28715 } 28716 c := v_0.AuxInt 28717 x := v.Args[1] 28718 v.reset(OpARM64ORconst) 28719 v.AuxInt = c 28720 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 28721 v0.AuxInt = d 28722 v0.AddArg(x) 28723 v.AddArg(v0) 28724 return true 28725 } 28726 // match: (ORshiftRL x (MOVDconst [c]) [d]) 28727 // cond: 28728 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 28729 for { 28730 d := v.AuxInt 28731 _ = v.Args[1] 28732 x := v.Args[0] 28733 v_1 := v.Args[1] 28734 if v_1.Op != OpARM64MOVDconst { 28735 break 28736 } 28737 c := v_1.AuxInt 28738 v.reset(OpARM64ORconst) 28739 v.AuxInt = int64(uint64(c) >> uint64(d)) 28740 v.AddArg(x) 28741 return true 28742 } 28743 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 28744 // cond: c==d 28745 // result: y 28746 for { 28747 d := v.AuxInt 28748 _ = v.Args[1] 28749 x := v.Args[0] 28750 y := v.Args[1] 28751 if y.Op != OpARM64SRLconst { 28752 break 28753 } 28754 c := y.AuxInt 28755 if x != y.Args[0] { 28756 break 28757 } 28758 if !(c == d) { 28759 break 28760 } 28761 v.reset(OpCopy) 28762 v.Type = y.Type 28763 v.AddArg(y) 28764 return true 28765 } 28766 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 28767 // cond: 28768 // result: (RORconst [ c] x) 28769 for { 28770 c := v.AuxInt 28771 _ = v.Args[1] 28772 v_0 := v.Args[0] 28773 if v_0.Op != OpARM64SLLconst { 28774 break 28775 } 28776 if v_0.AuxInt != 64-c { 28777 break 28778 } 28779 x := v_0.Args[0] 28780 if x != v.Args[1] { 28781 break 28782 } 28783 v.reset(OpARM64RORconst) 28784 v.AuxInt = c 28785 v.AddArg(x) 28786 return true 28787 } 28788 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 28789 // cond: c < 32 && t.Size() == 4 28790 // result: (RORWconst [c] x) 28791 for { 28792 t := v.Type 28793 c := v.AuxInt 28794 _ = v.Args[1] 28795 v_0 := v.Args[0] 28796 if v_0.Op != OpARM64SLLconst { 28797 break 28798 } 28799 if v_0.AuxInt != 32-c { 28800 break 28801 } 28802 x := v_0.Args[0] 28803 v_1 := v.Args[1] 28804 if v_1.Op != OpARM64MOVWUreg { 28805 break 28806 } 28807 if x != v_1.Args[0] { 28808 break 28809 } 28810 if !(c < 32 && t.Size() == 4) { 28811 break 28812 } 28813 v.reset(OpARM64RORWconst) 28814 v.AuxInt = c 28815 v.AddArg(x) 28816 return true 28817 } 28818 // match: (ORshiftRL [rc] (ANDconst [ac] x) (SLLconst [lc] y)) 28819 // cond: lc > rc && ac == ^((1<<uint(64-lc)-1) << uint64(lc-rc)) 28820 // result: (BFI [arm64BFAuxInt(lc-rc, 64-lc)] x y) 28821 for { 28822 rc := v.AuxInt 28823 _ = v.Args[1] 28824 v_0 := v.Args[0] 28825 if v_0.Op != OpARM64ANDconst { 28826 break 28827 } 28828 ac := v_0.AuxInt 28829 x := v_0.Args[0] 28830 v_1 := v.Args[1] 28831 if v_1.Op != OpARM64SLLconst { 28832 break 28833 } 28834 lc := v_1.AuxInt 28835 y := v_1.Args[0] 28836 if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) { 28837 break 28838 } 28839 v.reset(OpARM64BFI) 28840 v.AuxInt = arm64BFAuxInt(lc-rc, 64-lc) 28841 v.AddArg(x) 28842 v.AddArg(y) 28843 return true 28844 } 28845 return false 28846 } 28847 func rewriteValueARM64_OpARM64RORWconst_0(v *Value) bool { 28848 // match: (RORWconst [c] (RORWconst [d] x)) 28849 // cond: 28850 // result: (RORWconst [(c+d)&31] x) 28851 for { 28852 c := v.AuxInt 28853 v_0 := v.Args[0] 28854 if v_0.Op != OpARM64RORWconst { 28855 break 28856 } 28857 d := v_0.AuxInt 28858 x := v_0.Args[0] 28859 v.reset(OpARM64RORWconst) 28860 v.AuxInt = (c + d) & 31 28861 v.AddArg(x) 28862 return true 28863 } 28864 return false 28865 } 28866 func rewriteValueARM64_OpARM64RORconst_0(v *Value) bool { 28867 // match: (RORconst [c] (RORconst [d] x)) 28868 // cond: 28869 // result: (RORconst [(c+d)&63] x) 28870 for { 28871 c := v.AuxInt 28872 v_0 := v.Args[0] 28873 if v_0.Op != OpARM64RORconst { 28874 break 28875 } 28876 d := v_0.AuxInt 28877 x := v_0.Args[0] 28878 v.reset(OpARM64RORconst) 28879 v.AuxInt = (c + d) & 63 28880 v.AddArg(x) 28881 return true 28882 } 28883 return false 28884 } 28885 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 28886 // match: (SLL x (MOVDconst [c])) 28887 // cond: 28888 // result: (SLLconst x [c&63]) 28889 for { 28890 _ = v.Args[1] 28891 x := v.Args[0] 28892 v_1 := v.Args[1] 28893 if v_1.Op != OpARM64MOVDconst { 28894 break 28895 } 28896 c := v_1.AuxInt 28897 v.reset(OpARM64SLLconst) 28898 v.AuxInt = c & 63 28899 v.AddArg(x) 28900 return true 28901 } 28902 return false 28903 } 28904 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 28905 // match: (SLLconst [c] (MOVDconst [d])) 28906 // cond: 28907 // result: (MOVDconst [d<<uint64(c)]) 28908 for { 28909 c := v.AuxInt 28910 v_0 := v.Args[0] 28911 if v_0.Op != OpARM64MOVDconst { 28912 break 28913 } 28914 d := v_0.AuxInt 28915 v.reset(OpARM64MOVDconst) 28916 v.AuxInt = d << uint64(c) 28917 return true 28918 } 28919 // match: (SLLconst [c] (SRLconst [c] x)) 28920 // cond: 0 < c && c < 64 28921 // result: (ANDconst [^(1<<uint(c)-1)] x) 28922 for { 28923 c := v.AuxInt 28924 v_0 := v.Args[0] 28925 if v_0.Op != OpARM64SRLconst { 28926 break 28927 } 28928 if v_0.AuxInt != c { 28929 break 28930 } 28931 x := v_0.Args[0] 28932 if !(0 < c && c < 64) { 28933 break 28934 } 28935 v.reset(OpARM64ANDconst) 28936 v.AuxInt = ^(1<<uint(c) - 1) 28937 v.AddArg(x) 28938 return true 28939 } 28940 // match: (SLLconst [sc] (ANDconst [ac] x)) 28941 // cond: isARM64BFMask(sc, ac, 0) 28942 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(ac, 0))] x) 28943 for { 28944 sc := v.AuxInt 28945 v_0 := v.Args[0] 28946 if v_0.Op != OpARM64ANDconst { 28947 break 28948 } 28949 ac := v_0.AuxInt 28950 x := v_0.Args[0] 28951 if !(isARM64BFMask(sc, ac, 0)) { 28952 break 28953 } 28954 v.reset(OpARM64UBFIZ) 28955 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, 0)) 28956 v.AddArg(x) 28957 return true 28958 } 28959 // match: (SLLconst [sc] (MOVWUreg x)) 28960 // cond: isARM64BFMask(sc, 1<<32-1, 0) 28961 // result: (UBFIZ [arm64BFAuxInt(sc, 32)] x) 28962 for { 28963 sc := v.AuxInt 28964 v_0 := v.Args[0] 28965 if v_0.Op != OpARM64MOVWUreg { 28966 break 28967 } 28968 x := v_0.Args[0] 28969 if !(isARM64BFMask(sc, 1<<32-1, 0)) { 28970 break 28971 } 28972 v.reset(OpARM64UBFIZ) 28973 v.AuxInt = arm64BFAuxInt(sc, 32) 28974 v.AddArg(x) 28975 return true 28976 } 28977 // match: (SLLconst [sc] (MOVHUreg x)) 28978 // cond: isARM64BFMask(sc, 1<<16-1, 0) 28979 // result: (UBFIZ [arm64BFAuxInt(sc, 16)] x) 28980 for { 28981 sc := v.AuxInt 28982 v_0 := v.Args[0] 28983 if v_0.Op != OpARM64MOVHUreg { 28984 break 28985 } 28986 x := v_0.Args[0] 28987 if !(isARM64BFMask(sc, 1<<16-1, 0)) { 28988 break 28989 } 28990 v.reset(OpARM64UBFIZ) 28991 v.AuxInt = arm64BFAuxInt(sc, 16) 28992 v.AddArg(x) 28993 return true 28994 } 28995 // match: (SLLconst [sc] (MOVBUreg x)) 28996 // cond: isARM64BFMask(sc, 1<<8-1, 0) 28997 // result: (UBFIZ [arm64BFAuxInt(sc, 8)] x) 28998 for { 28999 sc := v.AuxInt 29000 v_0 := v.Args[0] 29001 if v_0.Op != OpARM64MOVBUreg { 29002 break 29003 } 29004 x := v_0.Args[0] 29005 if !(isARM64BFMask(sc, 1<<8-1, 0)) { 29006 break 29007 } 29008 v.reset(OpARM64UBFIZ) 29009 v.AuxInt = arm64BFAuxInt(sc, 8) 29010 v.AddArg(x) 29011 return true 29012 } 29013 // match: (SLLconst [sc] (UBFIZ [bfc] x)) 29014 // cond: sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64 29015 // result: (UBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))] x) 29016 for { 29017 sc := v.AuxInt 29018 v_0 := v.Args[0] 29019 if v_0.Op != OpARM64UBFIZ { 29020 break 29021 } 29022 bfc := v_0.AuxInt 29023 x := v_0.Args[0] 29024 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) { 29025 break 29026 } 29027 v.reset(OpARM64UBFIZ) 29028 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)) 29029 v.AddArg(x) 29030 return true 29031 } 29032 return false 29033 } 29034 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 29035 // match: (SRA x (MOVDconst [c])) 29036 // cond: 29037 // result: (SRAconst x [c&63]) 29038 for { 29039 _ = v.Args[1] 29040 x := v.Args[0] 29041 v_1 := v.Args[1] 29042 if v_1.Op != OpARM64MOVDconst { 29043 break 29044 } 29045 c := v_1.AuxInt 29046 v.reset(OpARM64SRAconst) 29047 v.AuxInt = c & 63 29048 v.AddArg(x) 29049 return true 29050 } 29051 return false 29052 } 29053 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 29054 // match: (SRAconst [c] (MOVDconst [d])) 29055 // cond: 29056 // result: (MOVDconst [d>>uint64(c)]) 29057 for { 29058 c := v.AuxInt 29059 v_0 := v.Args[0] 29060 if v_0.Op != OpARM64MOVDconst { 29061 break 29062 } 29063 d := v_0.AuxInt 29064 v.reset(OpARM64MOVDconst) 29065 v.AuxInt = d >> uint64(c) 29066 return true 29067 } 29068 // match: (SRAconst [rc] (SLLconst [lc] x)) 29069 // cond: lc > rc 29070 // result: (SBFIZ [arm64BFAuxInt(lc-rc, 64-lc)] x) 29071 for { 29072 rc := v.AuxInt 29073 v_0 := v.Args[0] 29074 if v_0.Op != OpARM64SLLconst { 29075 break 29076 } 29077 lc := v_0.AuxInt 29078 x := v_0.Args[0] 29079 if !(lc > rc) { 29080 break 29081 } 29082 v.reset(OpARM64SBFIZ) 29083 v.AuxInt = arm64BFAuxInt(lc-rc, 64-lc) 29084 v.AddArg(x) 29085 return true 29086 } 29087 // match: (SRAconst [rc] (SLLconst [lc] x)) 29088 // cond: lc <= rc 29089 // result: (SBFX [arm64BFAuxInt(rc-lc, 64-rc)] x) 29090 for { 29091 rc := v.AuxInt 29092 v_0 := v.Args[0] 29093 if v_0.Op != OpARM64SLLconst { 29094 break 29095 } 29096 lc := v_0.AuxInt 29097 x := v_0.Args[0] 29098 if !(lc <= rc) { 29099 break 29100 } 29101 v.reset(OpARM64SBFX) 29102 v.AuxInt = arm64BFAuxInt(rc-lc, 64-rc) 29103 v.AddArg(x) 29104 return true 29105 } 29106 // match: (SRAconst [rc] (MOVWreg x)) 29107 // cond: rc < 32 29108 // result: (SBFX [arm64BFAuxInt(rc, 32-rc)] x) 29109 for { 29110 rc := v.AuxInt 29111 v_0 := v.Args[0] 29112 if v_0.Op != OpARM64MOVWreg { 29113 break 29114 } 29115 x := v_0.Args[0] 29116 if !(rc < 32) { 29117 break 29118 } 29119 v.reset(OpARM64SBFX) 29120 v.AuxInt = arm64BFAuxInt(rc, 32-rc) 29121 v.AddArg(x) 29122 return true 29123 } 29124 // match: (SRAconst [rc] (MOVHreg x)) 29125 // cond: rc < 16 29126 // result: (SBFX [arm64BFAuxInt(rc, 16-rc)] x) 29127 for { 29128 rc := v.AuxInt 29129 v_0 := v.Args[0] 29130 if v_0.Op != OpARM64MOVHreg { 29131 break 29132 } 29133 x := v_0.Args[0] 29134 if !(rc < 16) { 29135 break 29136 } 29137 v.reset(OpARM64SBFX) 29138 v.AuxInt = arm64BFAuxInt(rc, 16-rc) 29139 v.AddArg(x) 29140 return true 29141 } 29142 // match: (SRAconst [rc] (MOVBreg x)) 29143 // cond: rc < 8 29144 // result: (SBFX [arm64BFAuxInt(rc, 8-rc)] x) 29145 for { 29146 rc := v.AuxInt 29147 v_0 := v.Args[0] 29148 if v_0.Op != OpARM64MOVBreg { 29149 break 29150 } 29151 x := v_0.Args[0] 29152 if !(rc < 8) { 29153 break 29154 } 29155 v.reset(OpARM64SBFX) 29156 v.AuxInt = arm64BFAuxInt(rc, 8-rc) 29157 v.AddArg(x) 29158 return true 29159 } 29160 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 29161 // cond: sc < getARM64BFlsb(bfc) 29162 // result: (SBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 29163 for { 29164 sc := v.AuxInt 29165 v_0 := v.Args[0] 29166 if v_0.Op != OpARM64SBFIZ { 29167 break 29168 } 29169 bfc := v_0.AuxInt 29170 x := v_0.Args[0] 29171 if !(sc < getARM64BFlsb(bfc)) { 29172 break 29173 } 29174 v.reset(OpARM64SBFIZ) 29175 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 29176 v.AddArg(x) 29177 return true 29178 } 29179 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 29180 // cond: sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 29181 // result: (SBFX [arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 29182 for { 29183 sc := v.AuxInt 29184 v_0 := v.Args[0] 29185 if v_0.Op != OpARM64SBFIZ { 29186 break 29187 } 29188 bfc := v_0.AuxInt 29189 x := v_0.Args[0] 29190 if !(sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 29191 break 29192 } 29193 v.reset(OpARM64SBFX) 29194 v.AuxInt = arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 29195 v.AddArg(x) 29196 return true 29197 } 29198 return false 29199 } 29200 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 29201 // match: (SRL x (MOVDconst [c])) 29202 // cond: 29203 // result: (SRLconst x [c&63]) 29204 for { 29205 _ = v.Args[1] 29206 x := v.Args[0] 29207 v_1 := v.Args[1] 29208 if v_1.Op != OpARM64MOVDconst { 29209 break 29210 } 29211 c := v_1.AuxInt 29212 v.reset(OpARM64SRLconst) 29213 v.AuxInt = c & 63 29214 v.AddArg(x) 29215 return true 29216 } 29217 return false 29218 } 29219 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 29220 // match: (SRLconst [c] (MOVDconst [d])) 29221 // cond: 29222 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 29223 for { 29224 c := v.AuxInt 29225 v_0 := v.Args[0] 29226 if v_0.Op != OpARM64MOVDconst { 29227 break 29228 } 29229 d := v_0.AuxInt 29230 v.reset(OpARM64MOVDconst) 29231 v.AuxInt = int64(uint64(d) >> uint64(c)) 29232 return true 29233 } 29234 // match: (SRLconst [c] (SLLconst [c] x)) 29235 // cond: 0 < c && c < 64 29236 // result: (ANDconst [1<<uint(64-c)-1] x) 29237 for { 29238 c := v.AuxInt 29239 v_0 := v.Args[0] 29240 if v_0.Op != OpARM64SLLconst { 29241 break 29242 } 29243 if v_0.AuxInt != c { 29244 break 29245 } 29246 x := v_0.Args[0] 29247 if !(0 < c && c < 64) { 29248 break 29249 } 29250 v.reset(OpARM64ANDconst) 29251 v.AuxInt = 1<<uint(64-c) - 1 29252 v.AddArg(x) 29253 return true 29254 } 29255 // match: (SRLconst [rc] (SLLconst [lc] x)) 29256 // cond: lc > rc 29257 // result: (UBFIZ [arm64BFAuxInt(lc-rc, 64-lc)] x) 29258 for { 29259 rc := v.AuxInt 29260 v_0 := v.Args[0] 29261 if v_0.Op != OpARM64SLLconst { 29262 break 29263 } 29264 lc := v_0.AuxInt 29265 x := v_0.Args[0] 29266 if !(lc > rc) { 29267 break 29268 } 29269 v.reset(OpARM64UBFIZ) 29270 v.AuxInt = arm64BFAuxInt(lc-rc, 64-lc) 29271 v.AddArg(x) 29272 return true 29273 } 29274 // match: (SRLconst [sc] (ANDconst [ac] x)) 29275 // cond: isARM64BFMask(sc, ac, sc) 29276 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(ac, sc))] x) 29277 for { 29278 sc := v.AuxInt 29279 v_0 := v.Args[0] 29280 if v_0.Op != OpARM64ANDconst { 29281 break 29282 } 29283 ac := v_0.AuxInt 29284 x := v_0.Args[0] 29285 if !(isARM64BFMask(sc, ac, sc)) { 29286 break 29287 } 29288 v.reset(OpARM64UBFX) 29289 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, sc)) 29290 v.AddArg(x) 29291 return true 29292 } 29293 // match: (SRLconst [sc] (MOVWUreg x)) 29294 // cond: isARM64BFMask(sc, 1<<32-1, sc) 29295 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x) 29296 for { 29297 sc := v.AuxInt 29298 v_0 := v.Args[0] 29299 if v_0.Op != OpARM64MOVWUreg { 29300 break 29301 } 29302 x := v_0.Args[0] 29303 if !(isARM64BFMask(sc, 1<<32-1, sc)) { 29304 break 29305 } 29306 v.reset(OpARM64UBFX) 29307 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc)) 29308 v.AddArg(x) 29309 return true 29310 } 29311 // match: (SRLconst [sc] (MOVHUreg x)) 29312 // cond: isARM64BFMask(sc, 1<<16-1, sc) 29313 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x) 29314 for { 29315 sc := v.AuxInt 29316 v_0 := v.Args[0] 29317 if v_0.Op != OpARM64MOVHUreg { 29318 break 29319 } 29320 x := v_0.Args[0] 29321 if !(isARM64BFMask(sc, 1<<16-1, sc)) { 29322 break 29323 } 29324 v.reset(OpARM64UBFX) 29325 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc)) 29326 v.AddArg(x) 29327 return true 29328 } 29329 // match: (SRLconst [sc] (MOVBUreg x)) 29330 // cond: isARM64BFMask(sc, 1<<8-1, sc) 29331 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x) 29332 for { 29333 sc := v.AuxInt 29334 v_0 := v.Args[0] 29335 if v_0.Op != OpARM64MOVBUreg { 29336 break 29337 } 29338 x := v_0.Args[0] 29339 if !(isARM64BFMask(sc, 1<<8-1, sc)) { 29340 break 29341 } 29342 v.reset(OpARM64UBFX) 29343 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc)) 29344 v.AddArg(x) 29345 return true 29346 } 29347 // match: (SRLconst [rc] (SLLconst [lc] x)) 29348 // cond: lc < rc 29349 // result: (UBFX [arm64BFAuxInt(rc-lc, 64-rc)] x) 29350 for { 29351 rc := v.AuxInt 29352 v_0 := v.Args[0] 29353 if v_0.Op != OpARM64SLLconst { 29354 break 29355 } 29356 lc := v_0.AuxInt 29357 x := v_0.Args[0] 29358 if !(lc < rc) { 29359 break 29360 } 29361 v.reset(OpARM64UBFX) 29362 v.AuxInt = arm64BFAuxInt(rc-lc, 64-rc) 29363 v.AddArg(x) 29364 return true 29365 } 29366 // match: (SRLconst [sc] (UBFX [bfc] x)) 29367 // cond: sc < getARM64BFwidth(bfc) 29368 // result: (UBFX [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)] x) 29369 for { 29370 sc := v.AuxInt 29371 v_0 := v.Args[0] 29372 if v_0.Op != OpARM64UBFX { 29373 break 29374 } 29375 bfc := v_0.AuxInt 29376 x := v_0.Args[0] 29377 if !(sc < getARM64BFwidth(bfc)) { 29378 break 29379 } 29380 v.reset(OpARM64UBFX) 29381 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc) 29382 v.AddArg(x) 29383 return true 29384 } 29385 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 29386 // cond: sc == getARM64BFlsb(bfc) 29387 // result: (ANDconst [1<<uint(getARM64BFwidth(bfc))-1] x) 29388 for { 29389 sc := v.AuxInt 29390 v_0 := v.Args[0] 29391 if v_0.Op != OpARM64UBFIZ { 29392 break 29393 } 29394 bfc := v_0.AuxInt 29395 x := v_0.Args[0] 29396 if !(sc == getARM64BFlsb(bfc)) { 29397 break 29398 } 29399 v.reset(OpARM64ANDconst) 29400 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1 29401 v.AddArg(x) 29402 return true 29403 } 29404 return false 29405 } 29406 func rewriteValueARM64_OpARM64SRLconst_10(v *Value) bool { 29407 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 29408 // cond: sc < getARM64BFlsb(bfc) 29409 // result: (UBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 29410 for { 29411 sc := v.AuxInt 29412 v_0 := v.Args[0] 29413 if v_0.Op != OpARM64UBFIZ { 29414 break 29415 } 29416 bfc := v_0.AuxInt 29417 x := v_0.Args[0] 29418 if !(sc < getARM64BFlsb(bfc)) { 29419 break 29420 } 29421 v.reset(OpARM64UBFIZ) 29422 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 29423 v.AddArg(x) 29424 return true 29425 } 29426 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 29427 // cond: sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 29428 // result: (UBFX [arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 29429 for { 29430 sc := v.AuxInt 29431 v_0 := v.Args[0] 29432 if v_0.Op != OpARM64UBFIZ { 29433 break 29434 } 29435 bfc := v_0.AuxInt 29436 x := v_0.Args[0] 29437 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 29438 break 29439 } 29440 v.reset(OpARM64UBFX) 29441 v.AuxInt = arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 29442 v.AddArg(x) 29443 return true 29444 } 29445 return false 29446 } 29447 func rewriteValueARM64_OpARM64STP_0(v *Value) bool { 29448 b := v.Block 29449 _ = b 29450 config := b.Func.Config 29451 _ = config 29452 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 29453 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 29454 // result: (STP [off1+off2] {sym} ptr val1 val2 mem) 29455 for { 29456 off1 := v.AuxInt 29457 sym := v.Aux 29458 _ = v.Args[3] 29459 v_0 := v.Args[0] 29460 if v_0.Op != OpARM64ADDconst { 29461 break 29462 } 29463 off2 := v_0.AuxInt 29464 ptr := v_0.Args[0] 29465 val1 := v.Args[1] 29466 val2 := v.Args[2] 29467 mem := v.Args[3] 29468 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 29469 break 29470 } 29471 v.reset(OpARM64STP) 29472 v.AuxInt = off1 + off2 29473 v.Aux = sym 29474 v.AddArg(ptr) 29475 v.AddArg(val1) 29476 v.AddArg(val2) 29477 v.AddArg(mem) 29478 return true 29479 } 29480 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 29481 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 29482 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 29483 for { 29484 off1 := v.AuxInt 29485 sym1 := v.Aux 29486 _ = v.Args[3] 29487 v_0 := v.Args[0] 29488 if v_0.Op != OpARM64MOVDaddr { 29489 break 29490 } 29491 off2 := v_0.AuxInt 29492 sym2 := v_0.Aux 29493 ptr := v_0.Args[0] 29494 val1 := v.Args[1] 29495 val2 := v.Args[2] 29496 mem := v.Args[3] 29497 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 29498 break 29499 } 29500 v.reset(OpARM64STP) 29501 v.AuxInt = off1 + off2 29502 v.Aux = mergeSym(sym1, sym2) 29503 v.AddArg(ptr) 29504 v.AddArg(val1) 29505 v.AddArg(val2) 29506 v.AddArg(mem) 29507 return true 29508 } 29509 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 29510 // cond: 29511 // result: (MOVQstorezero [off] {sym} ptr mem) 29512 for { 29513 off := v.AuxInt 29514 sym := v.Aux 29515 _ = v.Args[3] 29516 ptr := v.Args[0] 29517 v_1 := v.Args[1] 29518 if v_1.Op != OpARM64MOVDconst { 29519 break 29520 } 29521 if v_1.AuxInt != 0 { 29522 break 29523 } 29524 v_2 := v.Args[2] 29525 if v_2.Op != OpARM64MOVDconst { 29526 break 29527 } 29528 if v_2.AuxInt != 0 { 29529 break 29530 } 29531 mem := v.Args[3] 29532 v.reset(OpARM64MOVQstorezero) 29533 v.AuxInt = off 29534 v.Aux = sym 29535 v.AddArg(ptr) 29536 v.AddArg(mem) 29537 return true 29538 } 29539 return false 29540 } 29541 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 29542 b := v.Block 29543 _ = b 29544 // match: (SUB x (MOVDconst [c])) 29545 // cond: 29546 // result: (SUBconst [c] x) 29547 for { 29548 _ = v.Args[1] 29549 x := v.Args[0] 29550 v_1 := v.Args[1] 29551 if v_1.Op != OpARM64MOVDconst { 29552 break 29553 } 29554 c := v_1.AuxInt 29555 v.reset(OpARM64SUBconst) 29556 v.AuxInt = c 29557 v.AddArg(x) 29558 return true 29559 } 29560 // match: (SUB a l:(MUL x y)) 29561 // cond: l.Uses==1 && clobber(l) 29562 // result: (MSUB a x y) 29563 for { 29564 _ = v.Args[1] 29565 a := v.Args[0] 29566 l := v.Args[1] 29567 if l.Op != OpARM64MUL { 29568 break 29569 } 29570 _ = l.Args[1] 29571 x := l.Args[0] 29572 y := l.Args[1] 29573 if !(l.Uses == 1 && clobber(l)) { 29574 break 29575 } 29576 v.reset(OpARM64MSUB) 29577 v.AddArg(a) 29578 v.AddArg(x) 29579 v.AddArg(y) 29580 return true 29581 } 29582 // match: (SUB a l:(MNEG x y)) 29583 // cond: l.Uses==1 && clobber(l) 29584 // result: (MADD a x y) 29585 for { 29586 _ = v.Args[1] 29587 a := v.Args[0] 29588 l := v.Args[1] 29589 if l.Op != OpARM64MNEG { 29590 break 29591 } 29592 _ = l.Args[1] 29593 x := l.Args[0] 29594 y := l.Args[1] 29595 if !(l.Uses == 1 && clobber(l)) { 29596 break 29597 } 29598 v.reset(OpARM64MADD) 29599 v.AddArg(a) 29600 v.AddArg(x) 29601 v.AddArg(y) 29602 return true 29603 } 29604 // match: (SUB a l:(MULW x y)) 29605 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 29606 // result: (MSUBW a x y) 29607 for { 29608 _ = v.Args[1] 29609 a := v.Args[0] 29610 l := v.Args[1] 29611 if l.Op != OpARM64MULW { 29612 break 29613 } 29614 _ = l.Args[1] 29615 x := l.Args[0] 29616 y := l.Args[1] 29617 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 29618 break 29619 } 29620 v.reset(OpARM64MSUBW) 29621 v.AddArg(a) 29622 v.AddArg(x) 29623 v.AddArg(y) 29624 return true 29625 } 29626 // match: (SUB a l:(MNEGW x y)) 29627 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 29628 // result: (MADDW a x y) 29629 for { 29630 _ = v.Args[1] 29631 a := v.Args[0] 29632 l := v.Args[1] 29633 if l.Op != OpARM64MNEGW { 29634 break 29635 } 29636 _ = l.Args[1] 29637 x := l.Args[0] 29638 y := l.Args[1] 29639 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 29640 break 29641 } 29642 v.reset(OpARM64MADDW) 29643 v.AddArg(a) 29644 v.AddArg(x) 29645 v.AddArg(y) 29646 return true 29647 } 29648 // match: (SUB x x) 29649 // cond: 29650 // result: (MOVDconst [0]) 29651 for { 29652 _ = v.Args[1] 29653 x := v.Args[0] 29654 if x != v.Args[1] { 29655 break 29656 } 29657 v.reset(OpARM64MOVDconst) 29658 v.AuxInt = 0 29659 return true 29660 } 29661 // match: (SUB x (SUB y z)) 29662 // cond: 29663 // result: (SUB (ADD <v.Type> x z) y) 29664 for { 29665 _ = v.Args[1] 29666 x := v.Args[0] 29667 v_1 := v.Args[1] 29668 if v_1.Op != OpARM64SUB { 29669 break 29670 } 29671 _ = v_1.Args[1] 29672 y := v_1.Args[0] 29673 z := v_1.Args[1] 29674 v.reset(OpARM64SUB) 29675 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 29676 v0.AddArg(x) 29677 v0.AddArg(z) 29678 v.AddArg(v0) 29679 v.AddArg(y) 29680 return true 29681 } 29682 // match: (SUB (SUB x y) z) 29683 // cond: 29684 // result: (SUB x (ADD <y.Type> y z)) 29685 for { 29686 _ = v.Args[1] 29687 v_0 := v.Args[0] 29688 if v_0.Op != OpARM64SUB { 29689 break 29690 } 29691 _ = v_0.Args[1] 29692 x := v_0.Args[0] 29693 y := v_0.Args[1] 29694 z := v.Args[1] 29695 v.reset(OpARM64SUB) 29696 v.AddArg(x) 29697 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 29698 v0.AddArg(y) 29699 v0.AddArg(z) 29700 v.AddArg(v0) 29701 return true 29702 } 29703 // match: (SUB x0 x1:(SLLconst [c] y)) 29704 // cond: clobberIfDead(x1) 29705 // result: (SUBshiftLL x0 y [c]) 29706 for { 29707 _ = v.Args[1] 29708 x0 := v.Args[0] 29709 x1 := v.Args[1] 29710 if x1.Op != OpARM64SLLconst { 29711 break 29712 } 29713 c := x1.AuxInt 29714 y := x1.Args[0] 29715 if !(clobberIfDead(x1)) { 29716 break 29717 } 29718 v.reset(OpARM64SUBshiftLL) 29719 v.AuxInt = c 29720 v.AddArg(x0) 29721 v.AddArg(y) 29722 return true 29723 } 29724 // match: (SUB x0 x1:(SRLconst [c] y)) 29725 // cond: clobberIfDead(x1) 29726 // result: (SUBshiftRL x0 y [c]) 29727 for { 29728 _ = v.Args[1] 29729 x0 := v.Args[0] 29730 x1 := v.Args[1] 29731 if x1.Op != OpARM64SRLconst { 29732 break 29733 } 29734 c := x1.AuxInt 29735 y := x1.Args[0] 29736 if !(clobberIfDead(x1)) { 29737 break 29738 } 29739 v.reset(OpARM64SUBshiftRL) 29740 v.AuxInt = c 29741 v.AddArg(x0) 29742 v.AddArg(y) 29743 return true 29744 } 29745 return false 29746 } 29747 func rewriteValueARM64_OpARM64SUB_10(v *Value) bool { 29748 // match: (SUB x0 x1:(SRAconst [c] y)) 29749 // cond: clobberIfDead(x1) 29750 // result: (SUBshiftRA x0 y [c]) 29751 for { 29752 _ = v.Args[1] 29753 x0 := v.Args[0] 29754 x1 := v.Args[1] 29755 if x1.Op != OpARM64SRAconst { 29756 break 29757 } 29758 c := x1.AuxInt 29759 y := x1.Args[0] 29760 if !(clobberIfDead(x1)) { 29761 break 29762 } 29763 v.reset(OpARM64SUBshiftRA) 29764 v.AuxInt = c 29765 v.AddArg(x0) 29766 v.AddArg(y) 29767 return true 29768 } 29769 return false 29770 } 29771 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 29772 // match: (SUBconst [0] x) 29773 // cond: 29774 // result: x 29775 for { 29776 if v.AuxInt != 0 { 29777 break 29778 } 29779 x := v.Args[0] 29780 v.reset(OpCopy) 29781 v.Type = x.Type 29782 v.AddArg(x) 29783 return true 29784 } 29785 // match: (SUBconst [c] (MOVDconst [d])) 29786 // cond: 29787 // result: (MOVDconst [d-c]) 29788 for { 29789 c := v.AuxInt 29790 v_0 := v.Args[0] 29791 if v_0.Op != OpARM64MOVDconst { 29792 break 29793 } 29794 d := v_0.AuxInt 29795 v.reset(OpARM64MOVDconst) 29796 v.AuxInt = d - c 29797 return true 29798 } 29799 // match: (SUBconst [c] (SUBconst [d] x)) 29800 // cond: 29801 // result: (ADDconst [-c-d] x) 29802 for { 29803 c := v.AuxInt 29804 v_0 := v.Args[0] 29805 if v_0.Op != OpARM64SUBconst { 29806 break 29807 } 29808 d := v_0.AuxInt 29809 x := v_0.Args[0] 29810 v.reset(OpARM64ADDconst) 29811 v.AuxInt = -c - d 29812 v.AddArg(x) 29813 return true 29814 } 29815 // match: (SUBconst [c] (ADDconst [d] x)) 29816 // cond: 29817 // result: (ADDconst [-c+d] x) 29818 for { 29819 c := v.AuxInt 29820 v_0 := v.Args[0] 29821 if v_0.Op != OpARM64ADDconst { 29822 break 29823 } 29824 d := v_0.AuxInt 29825 x := v_0.Args[0] 29826 v.reset(OpARM64ADDconst) 29827 v.AuxInt = -c + d 29828 v.AddArg(x) 29829 return true 29830 } 29831 return false 29832 } 29833 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 29834 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 29835 // cond: 29836 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 29837 for { 29838 d := v.AuxInt 29839 _ = v.Args[1] 29840 x := v.Args[0] 29841 v_1 := v.Args[1] 29842 if v_1.Op != OpARM64MOVDconst { 29843 break 29844 } 29845 c := v_1.AuxInt 29846 v.reset(OpARM64SUBconst) 29847 v.AuxInt = int64(uint64(c) << uint64(d)) 29848 v.AddArg(x) 29849 return true 29850 } 29851 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 29852 // cond: c==d 29853 // result: (MOVDconst [0]) 29854 for { 29855 d := v.AuxInt 29856 _ = v.Args[1] 29857 x := v.Args[0] 29858 v_1 := v.Args[1] 29859 if v_1.Op != OpARM64SLLconst { 29860 break 29861 } 29862 c := v_1.AuxInt 29863 if x != v_1.Args[0] { 29864 break 29865 } 29866 if !(c == d) { 29867 break 29868 } 29869 v.reset(OpARM64MOVDconst) 29870 v.AuxInt = 0 29871 return true 29872 } 29873 return false 29874 } 29875 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 29876 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 29877 // cond: 29878 // result: (SUBconst x [c>>uint64(d)]) 29879 for { 29880 d := v.AuxInt 29881 _ = v.Args[1] 29882 x := v.Args[0] 29883 v_1 := v.Args[1] 29884 if v_1.Op != OpARM64MOVDconst { 29885 break 29886 } 29887 c := v_1.AuxInt 29888 v.reset(OpARM64SUBconst) 29889 v.AuxInt = c >> uint64(d) 29890 v.AddArg(x) 29891 return true 29892 } 29893 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 29894 // cond: c==d 29895 // result: (MOVDconst [0]) 29896 for { 29897 d := v.AuxInt 29898 _ = v.Args[1] 29899 x := v.Args[0] 29900 v_1 := v.Args[1] 29901 if v_1.Op != OpARM64SRAconst { 29902 break 29903 } 29904 c := v_1.AuxInt 29905 if x != v_1.Args[0] { 29906 break 29907 } 29908 if !(c == d) { 29909 break 29910 } 29911 v.reset(OpARM64MOVDconst) 29912 v.AuxInt = 0 29913 return true 29914 } 29915 return false 29916 } 29917 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 29918 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 29919 // cond: 29920 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 29921 for { 29922 d := v.AuxInt 29923 _ = v.Args[1] 29924 x := v.Args[0] 29925 v_1 := v.Args[1] 29926 if v_1.Op != OpARM64MOVDconst { 29927 break 29928 } 29929 c := v_1.AuxInt 29930 v.reset(OpARM64SUBconst) 29931 v.AuxInt = int64(uint64(c) >> uint64(d)) 29932 v.AddArg(x) 29933 return true 29934 } 29935 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 29936 // cond: c==d 29937 // result: (MOVDconst [0]) 29938 for { 29939 d := v.AuxInt 29940 _ = v.Args[1] 29941 x := v.Args[0] 29942 v_1 := v.Args[1] 29943 if v_1.Op != OpARM64SRLconst { 29944 break 29945 } 29946 c := v_1.AuxInt 29947 if x != v_1.Args[0] { 29948 break 29949 } 29950 if !(c == d) { 29951 break 29952 } 29953 v.reset(OpARM64MOVDconst) 29954 v.AuxInt = 0 29955 return true 29956 } 29957 return false 29958 } 29959 func rewriteValueARM64_OpARM64TST_0(v *Value) bool { 29960 // match: (TST x (MOVDconst [c])) 29961 // cond: 29962 // result: (TSTconst [c] x) 29963 for { 29964 _ = v.Args[1] 29965 x := v.Args[0] 29966 v_1 := v.Args[1] 29967 if v_1.Op != OpARM64MOVDconst { 29968 break 29969 } 29970 c := v_1.AuxInt 29971 v.reset(OpARM64TSTconst) 29972 v.AuxInt = c 29973 v.AddArg(x) 29974 return true 29975 } 29976 // match: (TST (MOVDconst [c]) x) 29977 // cond: 29978 // result: (TSTconst [c] x) 29979 for { 29980 _ = v.Args[1] 29981 v_0 := v.Args[0] 29982 if v_0.Op != OpARM64MOVDconst { 29983 break 29984 } 29985 c := v_0.AuxInt 29986 x := v.Args[1] 29987 v.reset(OpARM64TSTconst) 29988 v.AuxInt = c 29989 v.AddArg(x) 29990 return true 29991 } 29992 // match: (TST x0 x1:(SLLconst [c] y)) 29993 // cond: clobberIfDead(x1) 29994 // result: (TSTshiftLL x0 y [c]) 29995 for { 29996 _ = v.Args[1] 29997 x0 := v.Args[0] 29998 x1 := v.Args[1] 29999 if x1.Op != OpARM64SLLconst { 30000 break 30001 } 30002 c := x1.AuxInt 30003 y := x1.Args[0] 30004 if !(clobberIfDead(x1)) { 30005 break 30006 } 30007 v.reset(OpARM64TSTshiftLL) 30008 v.AuxInt = c 30009 v.AddArg(x0) 30010 v.AddArg(y) 30011 return true 30012 } 30013 // match: (TST x1:(SLLconst [c] y) x0) 30014 // cond: clobberIfDead(x1) 30015 // result: (TSTshiftLL x0 y [c]) 30016 for { 30017 _ = v.Args[1] 30018 x1 := v.Args[0] 30019 if x1.Op != OpARM64SLLconst { 30020 break 30021 } 30022 c := x1.AuxInt 30023 y := x1.Args[0] 30024 x0 := v.Args[1] 30025 if !(clobberIfDead(x1)) { 30026 break 30027 } 30028 v.reset(OpARM64TSTshiftLL) 30029 v.AuxInt = c 30030 v.AddArg(x0) 30031 v.AddArg(y) 30032 return true 30033 } 30034 // match: (TST x0 x1:(SRLconst [c] y)) 30035 // cond: clobberIfDead(x1) 30036 // result: (TSTshiftRL x0 y [c]) 30037 for { 30038 _ = v.Args[1] 30039 x0 := v.Args[0] 30040 x1 := v.Args[1] 30041 if x1.Op != OpARM64SRLconst { 30042 break 30043 } 30044 c := x1.AuxInt 30045 y := x1.Args[0] 30046 if !(clobberIfDead(x1)) { 30047 break 30048 } 30049 v.reset(OpARM64TSTshiftRL) 30050 v.AuxInt = c 30051 v.AddArg(x0) 30052 v.AddArg(y) 30053 return true 30054 } 30055 // match: (TST x1:(SRLconst [c] y) x0) 30056 // cond: clobberIfDead(x1) 30057 // result: (TSTshiftRL x0 y [c]) 30058 for { 30059 _ = v.Args[1] 30060 x1 := v.Args[0] 30061 if x1.Op != OpARM64SRLconst { 30062 break 30063 } 30064 c := x1.AuxInt 30065 y := x1.Args[0] 30066 x0 := v.Args[1] 30067 if !(clobberIfDead(x1)) { 30068 break 30069 } 30070 v.reset(OpARM64TSTshiftRL) 30071 v.AuxInt = c 30072 v.AddArg(x0) 30073 v.AddArg(y) 30074 return true 30075 } 30076 // match: (TST x0 x1:(SRAconst [c] y)) 30077 // cond: clobberIfDead(x1) 30078 // result: (TSTshiftRA x0 y [c]) 30079 for { 30080 _ = v.Args[1] 30081 x0 := v.Args[0] 30082 x1 := v.Args[1] 30083 if x1.Op != OpARM64SRAconst { 30084 break 30085 } 30086 c := x1.AuxInt 30087 y := x1.Args[0] 30088 if !(clobberIfDead(x1)) { 30089 break 30090 } 30091 v.reset(OpARM64TSTshiftRA) 30092 v.AuxInt = c 30093 v.AddArg(x0) 30094 v.AddArg(y) 30095 return true 30096 } 30097 // match: (TST x1:(SRAconst [c] y) x0) 30098 // cond: clobberIfDead(x1) 30099 // result: (TSTshiftRA x0 y [c]) 30100 for { 30101 _ = v.Args[1] 30102 x1 := v.Args[0] 30103 if x1.Op != OpARM64SRAconst { 30104 break 30105 } 30106 c := x1.AuxInt 30107 y := x1.Args[0] 30108 x0 := v.Args[1] 30109 if !(clobberIfDead(x1)) { 30110 break 30111 } 30112 v.reset(OpARM64TSTshiftRA) 30113 v.AuxInt = c 30114 v.AddArg(x0) 30115 v.AddArg(y) 30116 return true 30117 } 30118 return false 30119 } 30120 func rewriteValueARM64_OpARM64TSTW_0(v *Value) bool { 30121 // match: (TSTW x (MOVDconst [c])) 30122 // cond: 30123 // result: (TSTWconst [c] x) 30124 for { 30125 _ = v.Args[1] 30126 x := v.Args[0] 30127 v_1 := v.Args[1] 30128 if v_1.Op != OpARM64MOVDconst { 30129 break 30130 } 30131 c := v_1.AuxInt 30132 v.reset(OpARM64TSTWconst) 30133 v.AuxInt = c 30134 v.AddArg(x) 30135 return true 30136 } 30137 // match: (TSTW (MOVDconst [c]) x) 30138 // cond: 30139 // result: (TSTWconst [c] x) 30140 for { 30141 _ = v.Args[1] 30142 v_0 := v.Args[0] 30143 if v_0.Op != OpARM64MOVDconst { 30144 break 30145 } 30146 c := v_0.AuxInt 30147 x := v.Args[1] 30148 v.reset(OpARM64TSTWconst) 30149 v.AuxInt = c 30150 v.AddArg(x) 30151 return true 30152 } 30153 return false 30154 } 30155 func rewriteValueARM64_OpARM64TSTWconst_0(v *Value) bool { 30156 // match: (TSTWconst (MOVDconst [x]) [y]) 30157 // cond: int32(x&y)==0 30158 // result: (FlagEQ) 30159 for { 30160 y := v.AuxInt 30161 v_0 := v.Args[0] 30162 if v_0.Op != OpARM64MOVDconst { 30163 break 30164 } 30165 x := v_0.AuxInt 30166 if !(int32(x&y) == 0) { 30167 break 30168 } 30169 v.reset(OpARM64FlagEQ) 30170 return true 30171 } 30172 // match: (TSTWconst (MOVDconst [x]) [y]) 30173 // cond: int32(x&y)<0 30174 // result: (FlagLT_UGT) 30175 for { 30176 y := v.AuxInt 30177 v_0 := v.Args[0] 30178 if v_0.Op != OpARM64MOVDconst { 30179 break 30180 } 30181 x := v_0.AuxInt 30182 if !(int32(x&y) < 0) { 30183 break 30184 } 30185 v.reset(OpARM64FlagLT_UGT) 30186 return true 30187 } 30188 // match: (TSTWconst (MOVDconst [x]) [y]) 30189 // cond: int32(x&y)>0 30190 // result: (FlagGT_UGT) 30191 for { 30192 y := v.AuxInt 30193 v_0 := v.Args[0] 30194 if v_0.Op != OpARM64MOVDconst { 30195 break 30196 } 30197 x := v_0.AuxInt 30198 if !(int32(x&y) > 0) { 30199 break 30200 } 30201 v.reset(OpARM64FlagGT_UGT) 30202 return true 30203 } 30204 return false 30205 } 30206 func rewriteValueARM64_OpARM64TSTconst_0(v *Value) bool { 30207 // match: (TSTconst (MOVDconst [x]) [y]) 30208 // cond: int64(x&y)==0 30209 // result: (FlagEQ) 30210 for { 30211 y := v.AuxInt 30212 v_0 := v.Args[0] 30213 if v_0.Op != OpARM64MOVDconst { 30214 break 30215 } 30216 x := v_0.AuxInt 30217 if !(int64(x&y) == 0) { 30218 break 30219 } 30220 v.reset(OpARM64FlagEQ) 30221 return true 30222 } 30223 // match: (TSTconst (MOVDconst [x]) [y]) 30224 // cond: int64(x&y)<0 30225 // result: (FlagLT_UGT) 30226 for { 30227 y := v.AuxInt 30228 v_0 := v.Args[0] 30229 if v_0.Op != OpARM64MOVDconst { 30230 break 30231 } 30232 x := v_0.AuxInt 30233 if !(int64(x&y) < 0) { 30234 break 30235 } 30236 v.reset(OpARM64FlagLT_UGT) 30237 return true 30238 } 30239 // match: (TSTconst (MOVDconst [x]) [y]) 30240 // cond: int64(x&y)>0 30241 // result: (FlagGT_UGT) 30242 for { 30243 y := v.AuxInt 30244 v_0 := v.Args[0] 30245 if v_0.Op != OpARM64MOVDconst { 30246 break 30247 } 30248 x := v_0.AuxInt 30249 if !(int64(x&y) > 0) { 30250 break 30251 } 30252 v.reset(OpARM64FlagGT_UGT) 30253 return true 30254 } 30255 return false 30256 } 30257 func rewriteValueARM64_OpARM64TSTshiftLL_0(v *Value) bool { 30258 b := v.Block 30259 _ = b 30260 // match: (TSTshiftLL (MOVDconst [c]) x [d]) 30261 // cond: 30262 // result: (TSTconst [c] (SLLconst <x.Type> x [d])) 30263 for { 30264 d := v.AuxInt 30265 _ = v.Args[1] 30266 v_0 := v.Args[0] 30267 if v_0.Op != OpARM64MOVDconst { 30268 break 30269 } 30270 c := v_0.AuxInt 30271 x := v.Args[1] 30272 v.reset(OpARM64TSTconst) 30273 v.AuxInt = c 30274 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 30275 v0.AuxInt = d 30276 v0.AddArg(x) 30277 v.AddArg(v0) 30278 return true 30279 } 30280 // match: (TSTshiftLL x (MOVDconst [c]) [d]) 30281 // cond: 30282 // result: (TSTconst x [int64(uint64(c)<<uint64(d))]) 30283 for { 30284 d := v.AuxInt 30285 _ = v.Args[1] 30286 x := v.Args[0] 30287 v_1 := v.Args[1] 30288 if v_1.Op != OpARM64MOVDconst { 30289 break 30290 } 30291 c := v_1.AuxInt 30292 v.reset(OpARM64TSTconst) 30293 v.AuxInt = int64(uint64(c) << uint64(d)) 30294 v.AddArg(x) 30295 return true 30296 } 30297 return false 30298 } 30299 func rewriteValueARM64_OpARM64TSTshiftRA_0(v *Value) bool { 30300 b := v.Block 30301 _ = b 30302 // match: (TSTshiftRA (MOVDconst [c]) x [d]) 30303 // cond: 30304 // result: (TSTconst [c] (SRAconst <x.Type> x [d])) 30305 for { 30306 d := v.AuxInt 30307 _ = v.Args[1] 30308 v_0 := v.Args[0] 30309 if v_0.Op != OpARM64MOVDconst { 30310 break 30311 } 30312 c := v_0.AuxInt 30313 x := v.Args[1] 30314 v.reset(OpARM64TSTconst) 30315 v.AuxInt = c 30316 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 30317 v0.AuxInt = d 30318 v0.AddArg(x) 30319 v.AddArg(v0) 30320 return true 30321 } 30322 // match: (TSTshiftRA x (MOVDconst [c]) [d]) 30323 // cond: 30324 // result: (TSTconst x [c>>uint64(d)]) 30325 for { 30326 d := v.AuxInt 30327 _ = v.Args[1] 30328 x := v.Args[0] 30329 v_1 := v.Args[1] 30330 if v_1.Op != OpARM64MOVDconst { 30331 break 30332 } 30333 c := v_1.AuxInt 30334 v.reset(OpARM64TSTconst) 30335 v.AuxInt = c >> uint64(d) 30336 v.AddArg(x) 30337 return true 30338 } 30339 return false 30340 } 30341 func rewriteValueARM64_OpARM64TSTshiftRL_0(v *Value) bool { 30342 b := v.Block 30343 _ = b 30344 // match: (TSTshiftRL (MOVDconst [c]) x [d]) 30345 // cond: 30346 // result: (TSTconst [c] (SRLconst <x.Type> x [d])) 30347 for { 30348 d := v.AuxInt 30349 _ = v.Args[1] 30350 v_0 := v.Args[0] 30351 if v_0.Op != OpARM64MOVDconst { 30352 break 30353 } 30354 c := v_0.AuxInt 30355 x := v.Args[1] 30356 v.reset(OpARM64TSTconst) 30357 v.AuxInt = c 30358 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 30359 v0.AuxInt = d 30360 v0.AddArg(x) 30361 v.AddArg(v0) 30362 return true 30363 } 30364 // match: (TSTshiftRL x (MOVDconst [c]) [d]) 30365 // cond: 30366 // result: (TSTconst x [int64(uint64(c)>>uint64(d))]) 30367 for { 30368 d := v.AuxInt 30369 _ = v.Args[1] 30370 x := v.Args[0] 30371 v_1 := v.Args[1] 30372 if v_1.Op != OpARM64MOVDconst { 30373 break 30374 } 30375 c := v_1.AuxInt 30376 v.reset(OpARM64TSTconst) 30377 v.AuxInt = int64(uint64(c) >> uint64(d)) 30378 v.AddArg(x) 30379 return true 30380 } 30381 return false 30382 } 30383 func rewriteValueARM64_OpARM64UBFIZ_0(v *Value) bool { 30384 // match: (UBFIZ [bfc] (SLLconst [sc] x)) 30385 // cond: sc < getARM64BFwidth(bfc) 30386 // result: (UBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)] x) 30387 for { 30388 bfc := v.AuxInt 30389 v_0 := v.Args[0] 30390 if v_0.Op != OpARM64SLLconst { 30391 break 30392 } 30393 sc := v_0.AuxInt 30394 x := v_0.Args[0] 30395 if !(sc < getARM64BFwidth(bfc)) { 30396 break 30397 } 30398 v.reset(OpARM64UBFIZ) 30399 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc) 30400 v.AddArg(x) 30401 return true 30402 } 30403 return false 30404 } 30405 func rewriteValueARM64_OpARM64UBFX_0(v *Value) bool { 30406 // match: (UBFX [bfc] (SRLconst [sc] x)) 30407 // cond: sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64 30408 // result: (UBFX [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))] x) 30409 for { 30410 bfc := v.AuxInt 30411 v_0 := v.Args[0] 30412 if v_0.Op != OpARM64SRLconst { 30413 break 30414 } 30415 sc := v_0.AuxInt 30416 x := v_0.Args[0] 30417 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) { 30418 break 30419 } 30420 v.reset(OpARM64UBFX) 30421 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)) 30422 v.AddArg(x) 30423 return true 30424 } 30425 // match: (UBFX [bfc] (SLLconst [sc] x)) 30426 // cond: sc == getARM64BFlsb(bfc) 30427 // result: (ANDconst [1<<uint(getARM64BFwidth(bfc))-1] x) 30428 for { 30429 bfc := v.AuxInt 30430 v_0 := v.Args[0] 30431 if v_0.Op != OpARM64SLLconst { 30432 break 30433 } 30434 sc := v_0.AuxInt 30435 x := v_0.Args[0] 30436 if !(sc == getARM64BFlsb(bfc)) { 30437 break 30438 } 30439 v.reset(OpARM64ANDconst) 30440 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1 30441 v.AddArg(x) 30442 return true 30443 } 30444 // match: (UBFX [bfc] (SLLconst [sc] x)) 30445 // cond: sc < getARM64BFlsb(bfc) 30446 // result: (UBFX [arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 30447 for { 30448 bfc := v.AuxInt 30449 v_0 := v.Args[0] 30450 if v_0.Op != OpARM64SLLconst { 30451 break 30452 } 30453 sc := v_0.AuxInt 30454 x := v_0.Args[0] 30455 if !(sc < getARM64BFlsb(bfc)) { 30456 break 30457 } 30458 v.reset(OpARM64UBFX) 30459 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 30460 v.AddArg(x) 30461 return true 30462 } 30463 // match: (UBFX [bfc] (SLLconst [sc] x)) 30464 // cond: sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 30465 // result: (UBFIZ [arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 30466 for { 30467 bfc := v.AuxInt 30468 v_0 := v.Args[0] 30469 if v_0.Op != OpARM64SLLconst { 30470 break 30471 } 30472 sc := v_0.AuxInt 30473 x := v_0.Args[0] 30474 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 30475 break 30476 } 30477 v.reset(OpARM64UBFIZ) 30478 v.AuxInt = arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 30479 v.AddArg(x) 30480 return true 30481 } 30482 return false 30483 } 30484 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 30485 // match: (UDIV x (MOVDconst [1])) 30486 // cond: 30487 // result: x 30488 for { 30489 _ = v.Args[1] 30490 x := v.Args[0] 30491 v_1 := v.Args[1] 30492 if v_1.Op != OpARM64MOVDconst { 30493 break 30494 } 30495 if v_1.AuxInt != 1 { 30496 break 30497 } 30498 v.reset(OpCopy) 30499 v.Type = x.Type 30500 v.AddArg(x) 30501 return true 30502 } 30503 // match: (UDIV x (MOVDconst [c])) 30504 // cond: isPowerOfTwo(c) 30505 // result: (SRLconst [log2(c)] x) 30506 for { 30507 _ = v.Args[1] 30508 x := v.Args[0] 30509 v_1 := v.Args[1] 30510 if v_1.Op != OpARM64MOVDconst { 30511 break 30512 } 30513 c := v_1.AuxInt 30514 if !(isPowerOfTwo(c)) { 30515 break 30516 } 30517 v.reset(OpARM64SRLconst) 30518 v.AuxInt = log2(c) 30519 v.AddArg(x) 30520 return true 30521 } 30522 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 30523 // cond: 30524 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 30525 for { 30526 _ = v.Args[1] 30527 v_0 := v.Args[0] 30528 if v_0.Op != OpARM64MOVDconst { 30529 break 30530 } 30531 c := v_0.AuxInt 30532 v_1 := v.Args[1] 30533 if v_1.Op != OpARM64MOVDconst { 30534 break 30535 } 30536 d := v_1.AuxInt 30537 v.reset(OpARM64MOVDconst) 30538 v.AuxInt = int64(uint64(c) / uint64(d)) 30539 return true 30540 } 30541 return false 30542 } 30543 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 30544 // match: (UDIVW x (MOVDconst [c])) 30545 // cond: uint32(c)==1 30546 // result: x 30547 for { 30548 _ = v.Args[1] 30549 x := v.Args[0] 30550 v_1 := v.Args[1] 30551 if v_1.Op != OpARM64MOVDconst { 30552 break 30553 } 30554 c := v_1.AuxInt 30555 if !(uint32(c) == 1) { 30556 break 30557 } 30558 v.reset(OpCopy) 30559 v.Type = x.Type 30560 v.AddArg(x) 30561 return true 30562 } 30563 // match: (UDIVW x (MOVDconst [c])) 30564 // cond: isPowerOfTwo(c) && is32Bit(c) 30565 // result: (SRLconst [log2(c)] x) 30566 for { 30567 _ = v.Args[1] 30568 x := v.Args[0] 30569 v_1 := v.Args[1] 30570 if v_1.Op != OpARM64MOVDconst { 30571 break 30572 } 30573 c := v_1.AuxInt 30574 if !(isPowerOfTwo(c) && is32Bit(c)) { 30575 break 30576 } 30577 v.reset(OpARM64SRLconst) 30578 v.AuxInt = log2(c) 30579 v.AddArg(x) 30580 return true 30581 } 30582 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 30583 // cond: 30584 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 30585 for { 30586 _ = v.Args[1] 30587 v_0 := v.Args[0] 30588 if v_0.Op != OpARM64MOVDconst { 30589 break 30590 } 30591 c := v_0.AuxInt 30592 v_1 := v.Args[1] 30593 if v_1.Op != OpARM64MOVDconst { 30594 break 30595 } 30596 d := v_1.AuxInt 30597 v.reset(OpARM64MOVDconst) 30598 v.AuxInt = int64(uint32(c) / uint32(d)) 30599 return true 30600 } 30601 return false 30602 } 30603 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 30604 // match: (UMOD _ (MOVDconst [1])) 30605 // cond: 30606 // result: (MOVDconst [0]) 30607 for { 30608 _ = v.Args[1] 30609 v_1 := v.Args[1] 30610 if v_1.Op != OpARM64MOVDconst { 30611 break 30612 } 30613 if v_1.AuxInt != 1 { 30614 break 30615 } 30616 v.reset(OpARM64MOVDconst) 30617 v.AuxInt = 0 30618 return true 30619 } 30620 // match: (UMOD x (MOVDconst [c])) 30621 // cond: isPowerOfTwo(c) 30622 // result: (ANDconst [c-1] x) 30623 for { 30624 _ = v.Args[1] 30625 x := v.Args[0] 30626 v_1 := v.Args[1] 30627 if v_1.Op != OpARM64MOVDconst { 30628 break 30629 } 30630 c := v_1.AuxInt 30631 if !(isPowerOfTwo(c)) { 30632 break 30633 } 30634 v.reset(OpARM64ANDconst) 30635 v.AuxInt = c - 1 30636 v.AddArg(x) 30637 return true 30638 } 30639 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 30640 // cond: 30641 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 30642 for { 30643 _ = v.Args[1] 30644 v_0 := v.Args[0] 30645 if v_0.Op != OpARM64MOVDconst { 30646 break 30647 } 30648 c := v_0.AuxInt 30649 v_1 := v.Args[1] 30650 if v_1.Op != OpARM64MOVDconst { 30651 break 30652 } 30653 d := v_1.AuxInt 30654 v.reset(OpARM64MOVDconst) 30655 v.AuxInt = int64(uint64(c) % uint64(d)) 30656 return true 30657 } 30658 return false 30659 } 30660 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 30661 // match: (UMODW _ (MOVDconst [c])) 30662 // cond: uint32(c)==1 30663 // result: (MOVDconst [0]) 30664 for { 30665 _ = v.Args[1] 30666 v_1 := v.Args[1] 30667 if v_1.Op != OpARM64MOVDconst { 30668 break 30669 } 30670 c := v_1.AuxInt 30671 if !(uint32(c) == 1) { 30672 break 30673 } 30674 v.reset(OpARM64MOVDconst) 30675 v.AuxInt = 0 30676 return true 30677 } 30678 // match: (UMODW x (MOVDconst [c])) 30679 // cond: isPowerOfTwo(c) && is32Bit(c) 30680 // result: (ANDconst [c-1] x) 30681 for { 30682 _ = v.Args[1] 30683 x := v.Args[0] 30684 v_1 := v.Args[1] 30685 if v_1.Op != OpARM64MOVDconst { 30686 break 30687 } 30688 c := v_1.AuxInt 30689 if !(isPowerOfTwo(c) && is32Bit(c)) { 30690 break 30691 } 30692 v.reset(OpARM64ANDconst) 30693 v.AuxInt = c - 1 30694 v.AddArg(x) 30695 return true 30696 } 30697 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 30698 // cond: 30699 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 30700 for { 30701 _ = v.Args[1] 30702 v_0 := v.Args[0] 30703 if v_0.Op != OpARM64MOVDconst { 30704 break 30705 } 30706 c := v_0.AuxInt 30707 v_1 := v.Args[1] 30708 if v_1.Op != OpARM64MOVDconst { 30709 break 30710 } 30711 d := v_1.AuxInt 30712 v.reset(OpARM64MOVDconst) 30713 v.AuxInt = int64(uint32(c) % uint32(d)) 30714 return true 30715 } 30716 return false 30717 } 30718 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 30719 // match: (XOR x (MOVDconst [c])) 30720 // cond: 30721 // result: (XORconst [c] x) 30722 for { 30723 _ = v.Args[1] 30724 x := v.Args[0] 30725 v_1 := v.Args[1] 30726 if v_1.Op != OpARM64MOVDconst { 30727 break 30728 } 30729 c := v_1.AuxInt 30730 v.reset(OpARM64XORconst) 30731 v.AuxInt = c 30732 v.AddArg(x) 30733 return true 30734 } 30735 // match: (XOR (MOVDconst [c]) x) 30736 // cond: 30737 // result: (XORconst [c] x) 30738 for { 30739 _ = v.Args[1] 30740 v_0 := v.Args[0] 30741 if v_0.Op != OpARM64MOVDconst { 30742 break 30743 } 30744 c := v_0.AuxInt 30745 x := v.Args[1] 30746 v.reset(OpARM64XORconst) 30747 v.AuxInt = c 30748 v.AddArg(x) 30749 return true 30750 } 30751 // match: (XOR x x) 30752 // cond: 30753 // result: (MOVDconst [0]) 30754 for { 30755 _ = v.Args[1] 30756 x := v.Args[0] 30757 if x != v.Args[1] { 30758 break 30759 } 30760 v.reset(OpARM64MOVDconst) 30761 v.AuxInt = 0 30762 return true 30763 } 30764 // match: (XOR x (MVN y)) 30765 // cond: 30766 // result: (EON x y) 30767 for { 30768 _ = v.Args[1] 30769 x := v.Args[0] 30770 v_1 := v.Args[1] 30771 if v_1.Op != OpARM64MVN { 30772 break 30773 } 30774 y := v_1.Args[0] 30775 v.reset(OpARM64EON) 30776 v.AddArg(x) 30777 v.AddArg(y) 30778 return true 30779 } 30780 // match: (XOR (MVN y) x) 30781 // cond: 30782 // result: (EON x y) 30783 for { 30784 _ = v.Args[1] 30785 v_0 := v.Args[0] 30786 if v_0.Op != OpARM64MVN { 30787 break 30788 } 30789 y := v_0.Args[0] 30790 x := v.Args[1] 30791 v.reset(OpARM64EON) 30792 v.AddArg(x) 30793 v.AddArg(y) 30794 return true 30795 } 30796 // match: (XOR x0 x1:(SLLconst [c] y)) 30797 // cond: clobberIfDead(x1) 30798 // result: (XORshiftLL x0 y [c]) 30799 for { 30800 _ = v.Args[1] 30801 x0 := v.Args[0] 30802 x1 := v.Args[1] 30803 if x1.Op != OpARM64SLLconst { 30804 break 30805 } 30806 c := x1.AuxInt 30807 y := x1.Args[0] 30808 if !(clobberIfDead(x1)) { 30809 break 30810 } 30811 v.reset(OpARM64XORshiftLL) 30812 v.AuxInt = c 30813 v.AddArg(x0) 30814 v.AddArg(y) 30815 return true 30816 } 30817 // match: (XOR x1:(SLLconst [c] y) x0) 30818 // cond: clobberIfDead(x1) 30819 // result: (XORshiftLL x0 y [c]) 30820 for { 30821 _ = v.Args[1] 30822 x1 := v.Args[0] 30823 if x1.Op != OpARM64SLLconst { 30824 break 30825 } 30826 c := x1.AuxInt 30827 y := x1.Args[0] 30828 x0 := v.Args[1] 30829 if !(clobberIfDead(x1)) { 30830 break 30831 } 30832 v.reset(OpARM64XORshiftLL) 30833 v.AuxInt = c 30834 v.AddArg(x0) 30835 v.AddArg(y) 30836 return true 30837 } 30838 // match: (XOR x0 x1:(SRLconst [c] y)) 30839 // cond: clobberIfDead(x1) 30840 // result: (XORshiftRL x0 y [c]) 30841 for { 30842 _ = v.Args[1] 30843 x0 := v.Args[0] 30844 x1 := v.Args[1] 30845 if x1.Op != OpARM64SRLconst { 30846 break 30847 } 30848 c := x1.AuxInt 30849 y := x1.Args[0] 30850 if !(clobberIfDead(x1)) { 30851 break 30852 } 30853 v.reset(OpARM64XORshiftRL) 30854 v.AuxInt = c 30855 v.AddArg(x0) 30856 v.AddArg(y) 30857 return true 30858 } 30859 // match: (XOR x1:(SRLconst [c] y) x0) 30860 // cond: clobberIfDead(x1) 30861 // result: (XORshiftRL x0 y [c]) 30862 for { 30863 _ = v.Args[1] 30864 x1 := v.Args[0] 30865 if x1.Op != OpARM64SRLconst { 30866 break 30867 } 30868 c := x1.AuxInt 30869 y := x1.Args[0] 30870 x0 := v.Args[1] 30871 if !(clobberIfDead(x1)) { 30872 break 30873 } 30874 v.reset(OpARM64XORshiftRL) 30875 v.AuxInt = c 30876 v.AddArg(x0) 30877 v.AddArg(y) 30878 return true 30879 } 30880 // match: (XOR x0 x1:(SRAconst [c] y)) 30881 // cond: clobberIfDead(x1) 30882 // result: (XORshiftRA x0 y [c]) 30883 for { 30884 _ = v.Args[1] 30885 x0 := v.Args[0] 30886 x1 := v.Args[1] 30887 if x1.Op != OpARM64SRAconst { 30888 break 30889 } 30890 c := x1.AuxInt 30891 y := x1.Args[0] 30892 if !(clobberIfDead(x1)) { 30893 break 30894 } 30895 v.reset(OpARM64XORshiftRA) 30896 v.AuxInt = c 30897 v.AddArg(x0) 30898 v.AddArg(y) 30899 return true 30900 } 30901 return false 30902 } 30903 func rewriteValueARM64_OpARM64XOR_10(v *Value) bool { 30904 b := v.Block 30905 _ = b 30906 typ := &b.Func.Config.Types 30907 _ = typ 30908 // match: (XOR x1:(SRAconst [c] y) x0) 30909 // cond: clobberIfDead(x1) 30910 // result: (XORshiftRA x0 y [c]) 30911 for { 30912 _ = v.Args[1] 30913 x1 := v.Args[0] 30914 if x1.Op != OpARM64SRAconst { 30915 break 30916 } 30917 c := x1.AuxInt 30918 y := x1.Args[0] 30919 x0 := v.Args[1] 30920 if !(clobberIfDead(x1)) { 30921 break 30922 } 30923 v.reset(OpARM64XORshiftRA) 30924 v.AuxInt = c 30925 v.AddArg(x0) 30926 v.AddArg(y) 30927 return true 30928 } 30929 // match: (XOR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 30930 // cond: cc.(Op) == OpARM64LessThanU 30931 // result: (ROR x (NEG <t> y)) 30932 for { 30933 _ = v.Args[1] 30934 v_0 := v.Args[0] 30935 if v_0.Op != OpARM64SLL { 30936 break 30937 } 30938 _ = v_0.Args[1] 30939 x := v_0.Args[0] 30940 v_0_1 := v_0.Args[1] 30941 if v_0_1.Op != OpARM64ANDconst { 30942 break 30943 } 30944 t := v_0_1.Type 30945 if v_0_1.AuxInt != 63 { 30946 break 30947 } 30948 y := v_0_1.Args[0] 30949 v_1 := v.Args[1] 30950 if v_1.Op != OpARM64CSEL0 { 30951 break 30952 } 30953 if v_1.Type != typ.UInt64 { 30954 break 30955 } 30956 cc := v_1.Aux 30957 _ = v_1.Args[1] 30958 v_1_0 := v_1.Args[0] 30959 if v_1_0.Op != OpARM64SRL { 30960 break 30961 } 30962 if v_1_0.Type != typ.UInt64 { 30963 break 30964 } 30965 _ = v_1_0.Args[1] 30966 if x != v_1_0.Args[0] { 30967 break 30968 } 30969 v_1_0_1 := v_1_0.Args[1] 30970 if v_1_0_1.Op != OpARM64SUB { 30971 break 30972 } 30973 if v_1_0_1.Type != t { 30974 break 30975 } 30976 _ = v_1_0_1.Args[1] 30977 v_1_0_1_0 := v_1_0_1.Args[0] 30978 if v_1_0_1_0.Op != OpARM64MOVDconst { 30979 break 30980 } 30981 if v_1_0_1_0.AuxInt != 64 { 30982 break 30983 } 30984 v_1_0_1_1 := v_1_0_1.Args[1] 30985 if v_1_0_1_1.Op != OpARM64ANDconst { 30986 break 30987 } 30988 if v_1_0_1_1.Type != t { 30989 break 30990 } 30991 if v_1_0_1_1.AuxInt != 63 { 30992 break 30993 } 30994 if y != v_1_0_1_1.Args[0] { 30995 break 30996 } 30997 v_1_1 := v_1.Args[1] 30998 if v_1_1.Op != OpARM64CMPconst { 30999 break 31000 } 31001 if v_1_1.AuxInt != 64 { 31002 break 31003 } 31004 v_1_1_0 := v_1_1.Args[0] 31005 if v_1_1_0.Op != OpARM64SUB { 31006 break 31007 } 31008 if v_1_1_0.Type != t { 31009 break 31010 } 31011 _ = v_1_1_0.Args[1] 31012 v_1_1_0_0 := v_1_1_0.Args[0] 31013 if v_1_1_0_0.Op != OpARM64MOVDconst { 31014 break 31015 } 31016 if v_1_1_0_0.AuxInt != 64 { 31017 break 31018 } 31019 v_1_1_0_1 := v_1_1_0.Args[1] 31020 if v_1_1_0_1.Op != OpARM64ANDconst { 31021 break 31022 } 31023 if v_1_1_0_1.Type != t { 31024 break 31025 } 31026 if v_1_1_0_1.AuxInt != 63 { 31027 break 31028 } 31029 if y != v_1_1_0_1.Args[0] { 31030 break 31031 } 31032 if !(cc.(Op) == OpARM64LessThanU) { 31033 break 31034 } 31035 v.reset(OpARM64ROR) 31036 v.AddArg(x) 31037 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31038 v0.AddArg(y) 31039 v.AddArg(v0) 31040 return true 31041 } 31042 // match: (XOR (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 31043 // cond: cc.(Op) == OpARM64LessThanU 31044 // result: (ROR x (NEG <t> y)) 31045 for { 31046 _ = v.Args[1] 31047 v_0 := v.Args[0] 31048 if v_0.Op != OpARM64CSEL0 { 31049 break 31050 } 31051 if v_0.Type != typ.UInt64 { 31052 break 31053 } 31054 cc := v_0.Aux 31055 _ = v_0.Args[1] 31056 v_0_0 := v_0.Args[0] 31057 if v_0_0.Op != OpARM64SRL { 31058 break 31059 } 31060 if v_0_0.Type != typ.UInt64 { 31061 break 31062 } 31063 _ = v_0_0.Args[1] 31064 x := v_0_0.Args[0] 31065 v_0_0_1 := v_0_0.Args[1] 31066 if v_0_0_1.Op != OpARM64SUB { 31067 break 31068 } 31069 t := v_0_0_1.Type 31070 _ = v_0_0_1.Args[1] 31071 v_0_0_1_0 := v_0_0_1.Args[0] 31072 if v_0_0_1_0.Op != OpARM64MOVDconst { 31073 break 31074 } 31075 if v_0_0_1_0.AuxInt != 64 { 31076 break 31077 } 31078 v_0_0_1_1 := v_0_0_1.Args[1] 31079 if v_0_0_1_1.Op != OpARM64ANDconst { 31080 break 31081 } 31082 if v_0_0_1_1.Type != t { 31083 break 31084 } 31085 if v_0_0_1_1.AuxInt != 63 { 31086 break 31087 } 31088 y := v_0_0_1_1.Args[0] 31089 v_0_1 := v_0.Args[1] 31090 if v_0_1.Op != OpARM64CMPconst { 31091 break 31092 } 31093 if v_0_1.AuxInt != 64 { 31094 break 31095 } 31096 v_0_1_0 := v_0_1.Args[0] 31097 if v_0_1_0.Op != OpARM64SUB { 31098 break 31099 } 31100 if v_0_1_0.Type != t { 31101 break 31102 } 31103 _ = v_0_1_0.Args[1] 31104 v_0_1_0_0 := v_0_1_0.Args[0] 31105 if v_0_1_0_0.Op != OpARM64MOVDconst { 31106 break 31107 } 31108 if v_0_1_0_0.AuxInt != 64 { 31109 break 31110 } 31111 v_0_1_0_1 := v_0_1_0.Args[1] 31112 if v_0_1_0_1.Op != OpARM64ANDconst { 31113 break 31114 } 31115 if v_0_1_0_1.Type != t { 31116 break 31117 } 31118 if v_0_1_0_1.AuxInt != 63 { 31119 break 31120 } 31121 if y != v_0_1_0_1.Args[0] { 31122 break 31123 } 31124 v_1 := v.Args[1] 31125 if v_1.Op != OpARM64SLL { 31126 break 31127 } 31128 _ = v_1.Args[1] 31129 if x != v_1.Args[0] { 31130 break 31131 } 31132 v_1_1 := v_1.Args[1] 31133 if v_1_1.Op != OpARM64ANDconst { 31134 break 31135 } 31136 if v_1_1.Type != t { 31137 break 31138 } 31139 if v_1_1.AuxInt != 63 { 31140 break 31141 } 31142 if y != v_1_1.Args[0] { 31143 break 31144 } 31145 if !(cc.(Op) == OpARM64LessThanU) { 31146 break 31147 } 31148 v.reset(OpARM64ROR) 31149 v.AddArg(x) 31150 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31151 v0.AddArg(y) 31152 v.AddArg(v0) 31153 return true 31154 } 31155 // match: (XOR (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 31156 // cond: cc.(Op) == OpARM64LessThanU 31157 // result: (ROR x y) 31158 for { 31159 _ = v.Args[1] 31160 v_0 := v.Args[0] 31161 if v_0.Op != OpARM64SRL { 31162 break 31163 } 31164 if v_0.Type != typ.UInt64 { 31165 break 31166 } 31167 _ = v_0.Args[1] 31168 x := v_0.Args[0] 31169 v_0_1 := v_0.Args[1] 31170 if v_0_1.Op != OpARM64ANDconst { 31171 break 31172 } 31173 t := v_0_1.Type 31174 if v_0_1.AuxInt != 63 { 31175 break 31176 } 31177 y := v_0_1.Args[0] 31178 v_1 := v.Args[1] 31179 if v_1.Op != OpARM64CSEL0 { 31180 break 31181 } 31182 if v_1.Type != typ.UInt64 { 31183 break 31184 } 31185 cc := v_1.Aux 31186 _ = v_1.Args[1] 31187 v_1_0 := v_1.Args[0] 31188 if v_1_0.Op != OpARM64SLL { 31189 break 31190 } 31191 _ = v_1_0.Args[1] 31192 if x != v_1_0.Args[0] { 31193 break 31194 } 31195 v_1_0_1 := v_1_0.Args[1] 31196 if v_1_0_1.Op != OpARM64SUB { 31197 break 31198 } 31199 if v_1_0_1.Type != t { 31200 break 31201 } 31202 _ = v_1_0_1.Args[1] 31203 v_1_0_1_0 := v_1_0_1.Args[0] 31204 if v_1_0_1_0.Op != OpARM64MOVDconst { 31205 break 31206 } 31207 if v_1_0_1_0.AuxInt != 64 { 31208 break 31209 } 31210 v_1_0_1_1 := v_1_0_1.Args[1] 31211 if v_1_0_1_1.Op != OpARM64ANDconst { 31212 break 31213 } 31214 if v_1_0_1_1.Type != t { 31215 break 31216 } 31217 if v_1_0_1_1.AuxInt != 63 { 31218 break 31219 } 31220 if y != v_1_0_1_1.Args[0] { 31221 break 31222 } 31223 v_1_1 := v_1.Args[1] 31224 if v_1_1.Op != OpARM64CMPconst { 31225 break 31226 } 31227 if v_1_1.AuxInt != 64 { 31228 break 31229 } 31230 v_1_1_0 := v_1_1.Args[0] 31231 if v_1_1_0.Op != OpARM64SUB { 31232 break 31233 } 31234 if v_1_1_0.Type != t { 31235 break 31236 } 31237 _ = v_1_1_0.Args[1] 31238 v_1_1_0_0 := v_1_1_0.Args[0] 31239 if v_1_1_0_0.Op != OpARM64MOVDconst { 31240 break 31241 } 31242 if v_1_1_0_0.AuxInt != 64 { 31243 break 31244 } 31245 v_1_1_0_1 := v_1_1_0.Args[1] 31246 if v_1_1_0_1.Op != OpARM64ANDconst { 31247 break 31248 } 31249 if v_1_1_0_1.Type != t { 31250 break 31251 } 31252 if v_1_1_0_1.AuxInt != 63 { 31253 break 31254 } 31255 if y != v_1_1_0_1.Args[0] { 31256 break 31257 } 31258 if !(cc.(Op) == OpARM64LessThanU) { 31259 break 31260 } 31261 v.reset(OpARM64ROR) 31262 v.AddArg(x) 31263 v.AddArg(y) 31264 return true 31265 } 31266 // match: (XOR (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 31267 // cond: cc.(Op) == OpARM64LessThanU 31268 // result: (ROR x y) 31269 for { 31270 _ = v.Args[1] 31271 v_0 := v.Args[0] 31272 if v_0.Op != OpARM64CSEL0 { 31273 break 31274 } 31275 if v_0.Type != typ.UInt64 { 31276 break 31277 } 31278 cc := v_0.Aux 31279 _ = v_0.Args[1] 31280 v_0_0 := v_0.Args[0] 31281 if v_0_0.Op != OpARM64SLL { 31282 break 31283 } 31284 _ = v_0_0.Args[1] 31285 x := v_0_0.Args[0] 31286 v_0_0_1 := v_0_0.Args[1] 31287 if v_0_0_1.Op != OpARM64SUB { 31288 break 31289 } 31290 t := v_0_0_1.Type 31291 _ = v_0_0_1.Args[1] 31292 v_0_0_1_0 := v_0_0_1.Args[0] 31293 if v_0_0_1_0.Op != OpARM64MOVDconst { 31294 break 31295 } 31296 if v_0_0_1_0.AuxInt != 64 { 31297 break 31298 } 31299 v_0_0_1_1 := v_0_0_1.Args[1] 31300 if v_0_0_1_1.Op != OpARM64ANDconst { 31301 break 31302 } 31303 if v_0_0_1_1.Type != t { 31304 break 31305 } 31306 if v_0_0_1_1.AuxInt != 63 { 31307 break 31308 } 31309 y := v_0_0_1_1.Args[0] 31310 v_0_1 := v_0.Args[1] 31311 if v_0_1.Op != OpARM64CMPconst { 31312 break 31313 } 31314 if v_0_1.AuxInt != 64 { 31315 break 31316 } 31317 v_0_1_0 := v_0_1.Args[0] 31318 if v_0_1_0.Op != OpARM64SUB { 31319 break 31320 } 31321 if v_0_1_0.Type != t { 31322 break 31323 } 31324 _ = v_0_1_0.Args[1] 31325 v_0_1_0_0 := v_0_1_0.Args[0] 31326 if v_0_1_0_0.Op != OpARM64MOVDconst { 31327 break 31328 } 31329 if v_0_1_0_0.AuxInt != 64 { 31330 break 31331 } 31332 v_0_1_0_1 := v_0_1_0.Args[1] 31333 if v_0_1_0_1.Op != OpARM64ANDconst { 31334 break 31335 } 31336 if v_0_1_0_1.Type != t { 31337 break 31338 } 31339 if v_0_1_0_1.AuxInt != 63 { 31340 break 31341 } 31342 if y != v_0_1_0_1.Args[0] { 31343 break 31344 } 31345 v_1 := v.Args[1] 31346 if v_1.Op != OpARM64SRL { 31347 break 31348 } 31349 if v_1.Type != typ.UInt64 { 31350 break 31351 } 31352 _ = v_1.Args[1] 31353 if x != v_1.Args[0] { 31354 break 31355 } 31356 v_1_1 := v_1.Args[1] 31357 if v_1_1.Op != OpARM64ANDconst { 31358 break 31359 } 31360 if v_1_1.Type != t { 31361 break 31362 } 31363 if v_1_1.AuxInt != 63 { 31364 break 31365 } 31366 if y != v_1_1.Args[0] { 31367 break 31368 } 31369 if !(cc.(Op) == OpARM64LessThanU) { 31370 break 31371 } 31372 v.reset(OpARM64ROR) 31373 v.AddArg(x) 31374 v.AddArg(y) 31375 return true 31376 } 31377 // match: (XOR (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 31378 // cond: cc.(Op) == OpARM64LessThanU 31379 // result: (RORW x (NEG <t> y)) 31380 for { 31381 _ = v.Args[1] 31382 v_0 := v.Args[0] 31383 if v_0.Op != OpARM64SLL { 31384 break 31385 } 31386 _ = v_0.Args[1] 31387 x := v_0.Args[0] 31388 v_0_1 := v_0.Args[1] 31389 if v_0_1.Op != OpARM64ANDconst { 31390 break 31391 } 31392 t := v_0_1.Type 31393 if v_0_1.AuxInt != 31 { 31394 break 31395 } 31396 y := v_0_1.Args[0] 31397 v_1 := v.Args[1] 31398 if v_1.Op != OpARM64CSEL0 { 31399 break 31400 } 31401 if v_1.Type != typ.UInt32 { 31402 break 31403 } 31404 cc := v_1.Aux 31405 _ = v_1.Args[1] 31406 v_1_0 := v_1.Args[0] 31407 if v_1_0.Op != OpARM64SRL { 31408 break 31409 } 31410 if v_1_0.Type != typ.UInt32 { 31411 break 31412 } 31413 _ = v_1_0.Args[1] 31414 v_1_0_0 := v_1_0.Args[0] 31415 if v_1_0_0.Op != OpARM64MOVWUreg { 31416 break 31417 } 31418 if x != v_1_0_0.Args[0] { 31419 break 31420 } 31421 v_1_0_1 := v_1_0.Args[1] 31422 if v_1_0_1.Op != OpARM64SUB { 31423 break 31424 } 31425 if v_1_0_1.Type != t { 31426 break 31427 } 31428 _ = v_1_0_1.Args[1] 31429 v_1_0_1_0 := v_1_0_1.Args[0] 31430 if v_1_0_1_0.Op != OpARM64MOVDconst { 31431 break 31432 } 31433 if v_1_0_1_0.AuxInt != 32 { 31434 break 31435 } 31436 v_1_0_1_1 := v_1_0_1.Args[1] 31437 if v_1_0_1_1.Op != OpARM64ANDconst { 31438 break 31439 } 31440 if v_1_0_1_1.Type != t { 31441 break 31442 } 31443 if v_1_0_1_1.AuxInt != 31 { 31444 break 31445 } 31446 if y != v_1_0_1_1.Args[0] { 31447 break 31448 } 31449 v_1_1 := v_1.Args[1] 31450 if v_1_1.Op != OpARM64CMPconst { 31451 break 31452 } 31453 if v_1_1.AuxInt != 64 { 31454 break 31455 } 31456 v_1_1_0 := v_1_1.Args[0] 31457 if v_1_1_0.Op != OpARM64SUB { 31458 break 31459 } 31460 if v_1_1_0.Type != t { 31461 break 31462 } 31463 _ = v_1_1_0.Args[1] 31464 v_1_1_0_0 := v_1_1_0.Args[0] 31465 if v_1_1_0_0.Op != OpARM64MOVDconst { 31466 break 31467 } 31468 if v_1_1_0_0.AuxInt != 32 { 31469 break 31470 } 31471 v_1_1_0_1 := v_1_1_0.Args[1] 31472 if v_1_1_0_1.Op != OpARM64ANDconst { 31473 break 31474 } 31475 if v_1_1_0_1.Type != t { 31476 break 31477 } 31478 if v_1_1_0_1.AuxInt != 31 { 31479 break 31480 } 31481 if y != v_1_1_0_1.Args[0] { 31482 break 31483 } 31484 if !(cc.(Op) == OpARM64LessThanU) { 31485 break 31486 } 31487 v.reset(OpARM64RORW) 31488 v.AddArg(x) 31489 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31490 v0.AddArg(y) 31491 v.AddArg(v0) 31492 return true 31493 } 31494 // match: (XOR (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 31495 // cond: cc.(Op) == OpARM64LessThanU 31496 // result: (RORW x (NEG <t> y)) 31497 for { 31498 _ = v.Args[1] 31499 v_0 := v.Args[0] 31500 if v_0.Op != OpARM64CSEL0 { 31501 break 31502 } 31503 if v_0.Type != typ.UInt32 { 31504 break 31505 } 31506 cc := v_0.Aux 31507 _ = v_0.Args[1] 31508 v_0_0 := v_0.Args[0] 31509 if v_0_0.Op != OpARM64SRL { 31510 break 31511 } 31512 if v_0_0.Type != typ.UInt32 { 31513 break 31514 } 31515 _ = v_0_0.Args[1] 31516 v_0_0_0 := v_0_0.Args[0] 31517 if v_0_0_0.Op != OpARM64MOVWUreg { 31518 break 31519 } 31520 x := v_0_0_0.Args[0] 31521 v_0_0_1 := v_0_0.Args[1] 31522 if v_0_0_1.Op != OpARM64SUB { 31523 break 31524 } 31525 t := v_0_0_1.Type 31526 _ = v_0_0_1.Args[1] 31527 v_0_0_1_0 := v_0_0_1.Args[0] 31528 if v_0_0_1_0.Op != OpARM64MOVDconst { 31529 break 31530 } 31531 if v_0_0_1_0.AuxInt != 32 { 31532 break 31533 } 31534 v_0_0_1_1 := v_0_0_1.Args[1] 31535 if v_0_0_1_1.Op != OpARM64ANDconst { 31536 break 31537 } 31538 if v_0_0_1_1.Type != t { 31539 break 31540 } 31541 if v_0_0_1_1.AuxInt != 31 { 31542 break 31543 } 31544 y := v_0_0_1_1.Args[0] 31545 v_0_1 := v_0.Args[1] 31546 if v_0_1.Op != OpARM64CMPconst { 31547 break 31548 } 31549 if v_0_1.AuxInt != 64 { 31550 break 31551 } 31552 v_0_1_0 := v_0_1.Args[0] 31553 if v_0_1_0.Op != OpARM64SUB { 31554 break 31555 } 31556 if v_0_1_0.Type != t { 31557 break 31558 } 31559 _ = v_0_1_0.Args[1] 31560 v_0_1_0_0 := v_0_1_0.Args[0] 31561 if v_0_1_0_0.Op != OpARM64MOVDconst { 31562 break 31563 } 31564 if v_0_1_0_0.AuxInt != 32 { 31565 break 31566 } 31567 v_0_1_0_1 := v_0_1_0.Args[1] 31568 if v_0_1_0_1.Op != OpARM64ANDconst { 31569 break 31570 } 31571 if v_0_1_0_1.Type != t { 31572 break 31573 } 31574 if v_0_1_0_1.AuxInt != 31 { 31575 break 31576 } 31577 if y != v_0_1_0_1.Args[0] { 31578 break 31579 } 31580 v_1 := v.Args[1] 31581 if v_1.Op != OpARM64SLL { 31582 break 31583 } 31584 _ = v_1.Args[1] 31585 if x != v_1.Args[0] { 31586 break 31587 } 31588 v_1_1 := v_1.Args[1] 31589 if v_1_1.Op != OpARM64ANDconst { 31590 break 31591 } 31592 if v_1_1.Type != t { 31593 break 31594 } 31595 if v_1_1.AuxInt != 31 { 31596 break 31597 } 31598 if y != v_1_1.Args[0] { 31599 break 31600 } 31601 if !(cc.(Op) == OpARM64LessThanU) { 31602 break 31603 } 31604 v.reset(OpARM64RORW) 31605 v.AddArg(x) 31606 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31607 v0.AddArg(y) 31608 v.AddArg(v0) 31609 return true 31610 } 31611 // match: (XOR (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 31612 // cond: cc.(Op) == OpARM64LessThanU 31613 // result: (RORW x y) 31614 for { 31615 _ = v.Args[1] 31616 v_0 := v.Args[0] 31617 if v_0.Op != OpARM64SRL { 31618 break 31619 } 31620 if v_0.Type != typ.UInt32 { 31621 break 31622 } 31623 _ = v_0.Args[1] 31624 v_0_0 := v_0.Args[0] 31625 if v_0_0.Op != OpARM64MOVWUreg { 31626 break 31627 } 31628 x := v_0_0.Args[0] 31629 v_0_1 := v_0.Args[1] 31630 if v_0_1.Op != OpARM64ANDconst { 31631 break 31632 } 31633 t := v_0_1.Type 31634 if v_0_1.AuxInt != 31 { 31635 break 31636 } 31637 y := v_0_1.Args[0] 31638 v_1 := v.Args[1] 31639 if v_1.Op != OpARM64CSEL0 { 31640 break 31641 } 31642 if v_1.Type != typ.UInt32 { 31643 break 31644 } 31645 cc := v_1.Aux 31646 _ = v_1.Args[1] 31647 v_1_0 := v_1.Args[0] 31648 if v_1_0.Op != OpARM64SLL { 31649 break 31650 } 31651 _ = v_1_0.Args[1] 31652 if x != v_1_0.Args[0] { 31653 break 31654 } 31655 v_1_0_1 := v_1_0.Args[1] 31656 if v_1_0_1.Op != OpARM64SUB { 31657 break 31658 } 31659 if v_1_0_1.Type != t { 31660 break 31661 } 31662 _ = v_1_0_1.Args[1] 31663 v_1_0_1_0 := v_1_0_1.Args[0] 31664 if v_1_0_1_0.Op != OpARM64MOVDconst { 31665 break 31666 } 31667 if v_1_0_1_0.AuxInt != 32 { 31668 break 31669 } 31670 v_1_0_1_1 := v_1_0_1.Args[1] 31671 if v_1_0_1_1.Op != OpARM64ANDconst { 31672 break 31673 } 31674 if v_1_0_1_1.Type != t { 31675 break 31676 } 31677 if v_1_0_1_1.AuxInt != 31 { 31678 break 31679 } 31680 if y != v_1_0_1_1.Args[0] { 31681 break 31682 } 31683 v_1_1 := v_1.Args[1] 31684 if v_1_1.Op != OpARM64CMPconst { 31685 break 31686 } 31687 if v_1_1.AuxInt != 64 { 31688 break 31689 } 31690 v_1_1_0 := v_1_1.Args[0] 31691 if v_1_1_0.Op != OpARM64SUB { 31692 break 31693 } 31694 if v_1_1_0.Type != t { 31695 break 31696 } 31697 _ = v_1_1_0.Args[1] 31698 v_1_1_0_0 := v_1_1_0.Args[0] 31699 if v_1_1_0_0.Op != OpARM64MOVDconst { 31700 break 31701 } 31702 if v_1_1_0_0.AuxInt != 32 { 31703 break 31704 } 31705 v_1_1_0_1 := v_1_1_0.Args[1] 31706 if v_1_1_0_1.Op != OpARM64ANDconst { 31707 break 31708 } 31709 if v_1_1_0_1.Type != t { 31710 break 31711 } 31712 if v_1_1_0_1.AuxInt != 31 { 31713 break 31714 } 31715 if y != v_1_1_0_1.Args[0] { 31716 break 31717 } 31718 if !(cc.(Op) == OpARM64LessThanU) { 31719 break 31720 } 31721 v.reset(OpARM64RORW) 31722 v.AddArg(x) 31723 v.AddArg(y) 31724 return true 31725 } 31726 // match: (XOR (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 31727 // cond: cc.(Op) == OpARM64LessThanU 31728 // result: (RORW x y) 31729 for { 31730 _ = v.Args[1] 31731 v_0 := v.Args[0] 31732 if v_0.Op != OpARM64CSEL0 { 31733 break 31734 } 31735 if v_0.Type != typ.UInt32 { 31736 break 31737 } 31738 cc := v_0.Aux 31739 _ = v_0.Args[1] 31740 v_0_0 := v_0.Args[0] 31741 if v_0_0.Op != OpARM64SLL { 31742 break 31743 } 31744 _ = v_0_0.Args[1] 31745 x := v_0_0.Args[0] 31746 v_0_0_1 := v_0_0.Args[1] 31747 if v_0_0_1.Op != OpARM64SUB { 31748 break 31749 } 31750 t := v_0_0_1.Type 31751 _ = v_0_0_1.Args[1] 31752 v_0_0_1_0 := v_0_0_1.Args[0] 31753 if v_0_0_1_0.Op != OpARM64MOVDconst { 31754 break 31755 } 31756 if v_0_0_1_0.AuxInt != 32 { 31757 break 31758 } 31759 v_0_0_1_1 := v_0_0_1.Args[1] 31760 if v_0_0_1_1.Op != OpARM64ANDconst { 31761 break 31762 } 31763 if v_0_0_1_1.Type != t { 31764 break 31765 } 31766 if v_0_0_1_1.AuxInt != 31 { 31767 break 31768 } 31769 y := v_0_0_1_1.Args[0] 31770 v_0_1 := v_0.Args[1] 31771 if v_0_1.Op != OpARM64CMPconst { 31772 break 31773 } 31774 if v_0_1.AuxInt != 64 { 31775 break 31776 } 31777 v_0_1_0 := v_0_1.Args[0] 31778 if v_0_1_0.Op != OpARM64SUB { 31779 break 31780 } 31781 if v_0_1_0.Type != t { 31782 break 31783 } 31784 _ = v_0_1_0.Args[1] 31785 v_0_1_0_0 := v_0_1_0.Args[0] 31786 if v_0_1_0_0.Op != OpARM64MOVDconst { 31787 break 31788 } 31789 if v_0_1_0_0.AuxInt != 32 { 31790 break 31791 } 31792 v_0_1_0_1 := v_0_1_0.Args[1] 31793 if v_0_1_0_1.Op != OpARM64ANDconst { 31794 break 31795 } 31796 if v_0_1_0_1.Type != t { 31797 break 31798 } 31799 if v_0_1_0_1.AuxInt != 31 { 31800 break 31801 } 31802 if y != v_0_1_0_1.Args[0] { 31803 break 31804 } 31805 v_1 := v.Args[1] 31806 if v_1.Op != OpARM64SRL { 31807 break 31808 } 31809 if v_1.Type != typ.UInt32 { 31810 break 31811 } 31812 _ = v_1.Args[1] 31813 v_1_0 := v_1.Args[0] 31814 if v_1_0.Op != OpARM64MOVWUreg { 31815 break 31816 } 31817 if x != v_1_0.Args[0] { 31818 break 31819 } 31820 v_1_1 := v_1.Args[1] 31821 if v_1_1.Op != OpARM64ANDconst { 31822 break 31823 } 31824 if v_1_1.Type != t { 31825 break 31826 } 31827 if v_1_1.AuxInt != 31 { 31828 break 31829 } 31830 if y != v_1_1.Args[0] { 31831 break 31832 } 31833 if !(cc.(Op) == OpARM64LessThanU) { 31834 break 31835 } 31836 v.reset(OpARM64RORW) 31837 v.AddArg(x) 31838 v.AddArg(y) 31839 return true 31840 } 31841 return false 31842 } 31843 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 31844 // match: (XORconst [0] x) 31845 // cond: 31846 // result: x 31847 for { 31848 if v.AuxInt != 0 { 31849 break 31850 } 31851 x := v.Args[0] 31852 v.reset(OpCopy) 31853 v.Type = x.Type 31854 v.AddArg(x) 31855 return true 31856 } 31857 // match: (XORconst [-1] x) 31858 // cond: 31859 // result: (MVN x) 31860 for { 31861 if v.AuxInt != -1 { 31862 break 31863 } 31864 x := v.Args[0] 31865 v.reset(OpARM64MVN) 31866 v.AddArg(x) 31867 return true 31868 } 31869 // match: (XORconst [c] (MOVDconst [d])) 31870 // cond: 31871 // result: (MOVDconst [c^d]) 31872 for { 31873 c := v.AuxInt 31874 v_0 := v.Args[0] 31875 if v_0.Op != OpARM64MOVDconst { 31876 break 31877 } 31878 d := v_0.AuxInt 31879 v.reset(OpARM64MOVDconst) 31880 v.AuxInt = c ^ d 31881 return true 31882 } 31883 // match: (XORconst [c] (XORconst [d] x)) 31884 // cond: 31885 // result: (XORconst [c^d] x) 31886 for { 31887 c := v.AuxInt 31888 v_0 := v.Args[0] 31889 if v_0.Op != OpARM64XORconst { 31890 break 31891 } 31892 d := v_0.AuxInt 31893 x := v_0.Args[0] 31894 v.reset(OpARM64XORconst) 31895 v.AuxInt = c ^ d 31896 v.AddArg(x) 31897 return true 31898 } 31899 return false 31900 } 31901 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 31902 b := v.Block 31903 _ = b 31904 // match: (XORshiftLL (MOVDconst [c]) x [d]) 31905 // cond: 31906 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 31907 for { 31908 d := v.AuxInt 31909 _ = v.Args[1] 31910 v_0 := v.Args[0] 31911 if v_0.Op != OpARM64MOVDconst { 31912 break 31913 } 31914 c := v_0.AuxInt 31915 x := v.Args[1] 31916 v.reset(OpARM64XORconst) 31917 v.AuxInt = c 31918 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 31919 v0.AuxInt = d 31920 v0.AddArg(x) 31921 v.AddArg(v0) 31922 return true 31923 } 31924 // match: (XORshiftLL x (MOVDconst [c]) [d]) 31925 // cond: 31926 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 31927 for { 31928 d := v.AuxInt 31929 _ = v.Args[1] 31930 x := v.Args[0] 31931 v_1 := v.Args[1] 31932 if v_1.Op != OpARM64MOVDconst { 31933 break 31934 } 31935 c := v_1.AuxInt 31936 v.reset(OpARM64XORconst) 31937 v.AuxInt = int64(uint64(c) << uint64(d)) 31938 v.AddArg(x) 31939 return true 31940 } 31941 // match: (XORshiftLL x (SLLconst x [c]) [d]) 31942 // cond: c==d 31943 // result: (MOVDconst [0]) 31944 for { 31945 d := v.AuxInt 31946 _ = v.Args[1] 31947 x := v.Args[0] 31948 v_1 := v.Args[1] 31949 if v_1.Op != OpARM64SLLconst { 31950 break 31951 } 31952 c := v_1.AuxInt 31953 if x != v_1.Args[0] { 31954 break 31955 } 31956 if !(c == d) { 31957 break 31958 } 31959 v.reset(OpARM64MOVDconst) 31960 v.AuxInt = 0 31961 return true 31962 } 31963 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 31964 // cond: 31965 // result: (RORconst [64-c] x) 31966 for { 31967 c := v.AuxInt 31968 _ = v.Args[1] 31969 v_0 := v.Args[0] 31970 if v_0.Op != OpARM64SRLconst { 31971 break 31972 } 31973 if v_0.AuxInt != 64-c { 31974 break 31975 } 31976 x := v_0.Args[0] 31977 if x != v.Args[1] { 31978 break 31979 } 31980 v.reset(OpARM64RORconst) 31981 v.AuxInt = 64 - c 31982 v.AddArg(x) 31983 return true 31984 } 31985 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x) 31986 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 31987 // result: (RORWconst [32-c] x) 31988 for { 31989 t := v.Type 31990 c := v.AuxInt 31991 _ = v.Args[1] 31992 v_0 := v.Args[0] 31993 if v_0.Op != OpARM64UBFX { 31994 break 31995 } 31996 bfc := v_0.AuxInt 31997 x := v_0.Args[0] 31998 if x != v.Args[1] { 31999 break 32000 } 32001 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 32002 break 32003 } 32004 v.reset(OpARM64RORWconst) 32005 v.AuxInt = 32 - c 32006 v.AddArg(x) 32007 return true 32008 } 32009 // match: (XORshiftLL [c] (SRLconst x [64-c]) x2) 32010 // cond: 32011 // result: (EXTRconst [64-c] x2 x) 32012 for { 32013 c := v.AuxInt 32014 _ = v.Args[1] 32015 v_0 := v.Args[0] 32016 if v_0.Op != OpARM64SRLconst { 32017 break 32018 } 32019 if v_0.AuxInt != 64-c { 32020 break 32021 } 32022 x := v_0.Args[0] 32023 x2 := v.Args[1] 32024 v.reset(OpARM64EXTRconst) 32025 v.AuxInt = 64 - c 32026 v.AddArg(x2) 32027 v.AddArg(x) 32028 return true 32029 } 32030 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x2) 32031 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 32032 // result: (EXTRWconst [32-c] x2 x) 32033 for { 32034 t := v.Type 32035 c := v.AuxInt 32036 _ = v.Args[1] 32037 v_0 := v.Args[0] 32038 if v_0.Op != OpARM64UBFX { 32039 break 32040 } 32041 bfc := v_0.AuxInt 32042 x := v_0.Args[0] 32043 x2 := v.Args[1] 32044 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 32045 break 32046 } 32047 v.reset(OpARM64EXTRWconst) 32048 v.AuxInt = 32 - c 32049 v.AddArg(x2) 32050 v.AddArg(x) 32051 return true 32052 } 32053 return false 32054 } 32055 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 32056 b := v.Block 32057 _ = b 32058 // match: (XORshiftRA (MOVDconst [c]) x [d]) 32059 // cond: 32060 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 32061 for { 32062 d := v.AuxInt 32063 _ = v.Args[1] 32064 v_0 := v.Args[0] 32065 if v_0.Op != OpARM64MOVDconst { 32066 break 32067 } 32068 c := v_0.AuxInt 32069 x := v.Args[1] 32070 v.reset(OpARM64XORconst) 32071 v.AuxInt = c 32072 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 32073 v0.AuxInt = d 32074 v0.AddArg(x) 32075 v.AddArg(v0) 32076 return true 32077 } 32078 // match: (XORshiftRA x (MOVDconst [c]) [d]) 32079 // cond: 32080 // result: (XORconst x [c>>uint64(d)]) 32081 for { 32082 d := v.AuxInt 32083 _ = v.Args[1] 32084 x := v.Args[0] 32085 v_1 := v.Args[1] 32086 if v_1.Op != OpARM64MOVDconst { 32087 break 32088 } 32089 c := v_1.AuxInt 32090 v.reset(OpARM64XORconst) 32091 v.AuxInt = c >> uint64(d) 32092 v.AddArg(x) 32093 return true 32094 } 32095 // match: (XORshiftRA x (SRAconst x [c]) [d]) 32096 // cond: c==d 32097 // result: (MOVDconst [0]) 32098 for { 32099 d := v.AuxInt 32100 _ = v.Args[1] 32101 x := v.Args[0] 32102 v_1 := v.Args[1] 32103 if v_1.Op != OpARM64SRAconst { 32104 break 32105 } 32106 c := v_1.AuxInt 32107 if x != v_1.Args[0] { 32108 break 32109 } 32110 if !(c == d) { 32111 break 32112 } 32113 v.reset(OpARM64MOVDconst) 32114 v.AuxInt = 0 32115 return true 32116 } 32117 return false 32118 } 32119 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 32120 b := v.Block 32121 _ = b 32122 // match: (XORshiftRL (MOVDconst [c]) x [d]) 32123 // cond: 32124 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 32125 for { 32126 d := v.AuxInt 32127 _ = v.Args[1] 32128 v_0 := v.Args[0] 32129 if v_0.Op != OpARM64MOVDconst { 32130 break 32131 } 32132 c := v_0.AuxInt 32133 x := v.Args[1] 32134 v.reset(OpARM64XORconst) 32135 v.AuxInt = c 32136 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 32137 v0.AuxInt = d 32138 v0.AddArg(x) 32139 v.AddArg(v0) 32140 return true 32141 } 32142 // match: (XORshiftRL x (MOVDconst [c]) [d]) 32143 // cond: 32144 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 32145 for { 32146 d := v.AuxInt 32147 _ = v.Args[1] 32148 x := v.Args[0] 32149 v_1 := v.Args[1] 32150 if v_1.Op != OpARM64MOVDconst { 32151 break 32152 } 32153 c := v_1.AuxInt 32154 v.reset(OpARM64XORconst) 32155 v.AuxInt = int64(uint64(c) >> uint64(d)) 32156 v.AddArg(x) 32157 return true 32158 } 32159 // match: (XORshiftRL x (SRLconst x [c]) [d]) 32160 // cond: c==d 32161 // result: (MOVDconst [0]) 32162 for { 32163 d := v.AuxInt 32164 _ = v.Args[1] 32165 x := v.Args[0] 32166 v_1 := v.Args[1] 32167 if v_1.Op != OpARM64SRLconst { 32168 break 32169 } 32170 c := v_1.AuxInt 32171 if x != v_1.Args[0] { 32172 break 32173 } 32174 if !(c == d) { 32175 break 32176 } 32177 v.reset(OpARM64MOVDconst) 32178 v.AuxInt = 0 32179 return true 32180 } 32181 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 32182 // cond: 32183 // result: (RORconst [ c] x) 32184 for { 32185 c := v.AuxInt 32186 _ = v.Args[1] 32187 v_0 := v.Args[0] 32188 if v_0.Op != OpARM64SLLconst { 32189 break 32190 } 32191 if v_0.AuxInt != 64-c { 32192 break 32193 } 32194 x := v_0.Args[0] 32195 if x != v.Args[1] { 32196 break 32197 } 32198 v.reset(OpARM64RORconst) 32199 v.AuxInt = c 32200 v.AddArg(x) 32201 return true 32202 } 32203 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 32204 // cond: c < 32 && t.Size() == 4 32205 // result: (RORWconst [c] x) 32206 for { 32207 t := v.Type 32208 c := v.AuxInt 32209 _ = v.Args[1] 32210 v_0 := v.Args[0] 32211 if v_0.Op != OpARM64SLLconst { 32212 break 32213 } 32214 if v_0.AuxInt != 32-c { 32215 break 32216 } 32217 x := v_0.Args[0] 32218 v_1 := v.Args[1] 32219 if v_1.Op != OpARM64MOVWUreg { 32220 break 32221 } 32222 if x != v_1.Args[0] { 32223 break 32224 } 32225 if !(c < 32 && t.Size() == 4) { 32226 break 32227 } 32228 v.reset(OpARM64RORWconst) 32229 v.AuxInt = c 32230 v.AddArg(x) 32231 return true 32232 } 32233 return false 32234 } 32235 func rewriteValueARM64_OpAbs_0(v *Value) bool { 32236 // match: (Abs x) 32237 // cond: 32238 // result: (FABSD x) 32239 for { 32240 x := v.Args[0] 32241 v.reset(OpARM64FABSD) 32242 v.AddArg(x) 32243 return true 32244 } 32245 } 32246 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 32247 // match: (Add16 x y) 32248 // cond: 32249 // result: (ADD x y) 32250 for { 32251 _ = v.Args[1] 32252 x := v.Args[0] 32253 y := v.Args[1] 32254 v.reset(OpARM64ADD) 32255 v.AddArg(x) 32256 v.AddArg(y) 32257 return true 32258 } 32259 } 32260 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 32261 // match: (Add32 x y) 32262 // cond: 32263 // result: (ADD x y) 32264 for { 32265 _ = v.Args[1] 32266 x := v.Args[0] 32267 y := v.Args[1] 32268 v.reset(OpARM64ADD) 32269 v.AddArg(x) 32270 v.AddArg(y) 32271 return true 32272 } 32273 } 32274 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 32275 // match: (Add32F x y) 32276 // cond: 32277 // result: (FADDS x y) 32278 for { 32279 _ = v.Args[1] 32280 x := v.Args[0] 32281 y := v.Args[1] 32282 v.reset(OpARM64FADDS) 32283 v.AddArg(x) 32284 v.AddArg(y) 32285 return true 32286 } 32287 } 32288 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 32289 // match: (Add64 x y) 32290 // cond: 32291 // result: (ADD x y) 32292 for { 32293 _ = v.Args[1] 32294 x := v.Args[0] 32295 y := v.Args[1] 32296 v.reset(OpARM64ADD) 32297 v.AddArg(x) 32298 v.AddArg(y) 32299 return true 32300 } 32301 } 32302 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 32303 // match: (Add64F x y) 32304 // cond: 32305 // result: (FADDD x y) 32306 for { 32307 _ = v.Args[1] 32308 x := v.Args[0] 32309 y := v.Args[1] 32310 v.reset(OpARM64FADDD) 32311 v.AddArg(x) 32312 v.AddArg(y) 32313 return true 32314 } 32315 } 32316 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 32317 // match: (Add8 x y) 32318 // cond: 32319 // result: (ADD x y) 32320 for { 32321 _ = v.Args[1] 32322 x := v.Args[0] 32323 y := v.Args[1] 32324 v.reset(OpARM64ADD) 32325 v.AddArg(x) 32326 v.AddArg(y) 32327 return true 32328 } 32329 } 32330 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 32331 // match: (AddPtr x y) 32332 // cond: 32333 // result: (ADD x y) 32334 for { 32335 _ = v.Args[1] 32336 x := v.Args[0] 32337 y := v.Args[1] 32338 v.reset(OpARM64ADD) 32339 v.AddArg(x) 32340 v.AddArg(y) 32341 return true 32342 } 32343 } 32344 func rewriteValueARM64_OpAddr_0(v *Value) bool { 32345 // match: (Addr {sym} base) 32346 // cond: 32347 // result: (MOVDaddr {sym} base) 32348 for { 32349 sym := v.Aux 32350 base := v.Args[0] 32351 v.reset(OpARM64MOVDaddr) 32352 v.Aux = sym 32353 v.AddArg(base) 32354 return true 32355 } 32356 } 32357 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 32358 // match: (And16 x y) 32359 // cond: 32360 // result: (AND x y) 32361 for { 32362 _ = v.Args[1] 32363 x := v.Args[0] 32364 y := v.Args[1] 32365 v.reset(OpARM64AND) 32366 v.AddArg(x) 32367 v.AddArg(y) 32368 return true 32369 } 32370 } 32371 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 32372 // match: (And32 x y) 32373 // cond: 32374 // result: (AND x y) 32375 for { 32376 _ = v.Args[1] 32377 x := v.Args[0] 32378 y := v.Args[1] 32379 v.reset(OpARM64AND) 32380 v.AddArg(x) 32381 v.AddArg(y) 32382 return true 32383 } 32384 } 32385 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 32386 // match: (And64 x y) 32387 // cond: 32388 // result: (AND x y) 32389 for { 32390 _ = v.Args[1] 32391 x := v.Args[0] 32392 y := v.Args[1] 32393 v.reset(OpARM64AND) 32394 v.AddArg(x) 32395 v.AddArg(y) 32396 return true 32397 } 32398 } 32399 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 32400 // match: (And8 x y) 32401 // cond: 32402 // result: (AND x y) 32403 for { 32404 _ = v.Args[1] 32405 x := v.Args[0] 32406 y := v.Args[1] 32407 v.reset(OpARM64AND) 32408 v.AddArg(x) 32409 v.AddArg(y) 32410 return true 32411 } 32412 } 32413 func rewriteValueARM64_OpAndB_0(v *Value) bool { 32414 // match: (AndB x y) 32415 // cond: 32416 // result: (AND x y) 32417 for { 32418 _ = v.Args[1] 32419 x := v.Args[0] 32420 y := v.Args[1] 32421 v.reset(OpARM64AND) 32422 v.AddArg(x) 32423 v.AddArg(y) 32424 return true 32425 } 32426 } 32427 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 32428 // match: (AtomicAdd32 ptr val mem) 32429 // cond: 32430 // result: (LoweredAtomicAdd32 ptr val mem) 32431 for { 32432 _ = v.Args[2] 32433 ptr := v.Args[0] 32434 val := v.Args[1] 32435 mem := v.Args[2] 32436 v.reset(OpARM64LoweredAtomicAdd32) 32437 v.AddArg(ptr) 32438 v.AddArg(val) 32439 v.AddArg(mem) 32440 return true 32441 } 32442 } 32443 func rewriteValueARM64_OpAtomicAdd32Variant_0(v *Value) bool { 32444 // match: (AtomicAdd32Variant ptr val mem) 32445 // cond: 32446 // result: (LoweredAtomicAdd32Variant ptr val mem) 32447 for { 32448 _ = v.Args[2] 32449 ptr := v.Args[0] 32450 val := v.Args[1] 32451 mem := v.Args[2] 32452 v.reset(OpARM64LoweredAtomicAdd32Variant) 32453 v.AddArg(ptr) 32454 v.AddArg(val) 32455 v.AddArg(mem) 32456 return true 32457 } 32458 } 32459 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 32460 // match: (AtomicAdd64 ptr val mem) 32461 // cond: 32462 // result: (LoweredAtomicAdd64 ptr val mem) 32463 for { 32464 _ = v.Args[2] 32465 ptr := v.Args[0] 32466 val := v.Args[1] 32467 mem := v.Args[2] 32468 v.reset(OpARM64LoweredAtomicAdd64) 32469 v.AddArg(ptr) 32470 v.AddArg(val) 32471 v.AddArg(mem) 32472 return true 32473 } 32474 } 32475 func rewriteValueARM64_OpAtomicAdd64Variant_0(v *Value) bool { 32476 // match: (AtomicAdd64Variant ptr val mem) 32477 // cond: 32478 // result: (LoweredAtomicAdd64Variant ptr val mem) 32479 for { 32480 _ = v.Args[2] 32481 ptr := v.Args[0] 32482 val := v.Args[1] 32483 mem := v.Args[2] 32484 v.reset(OpARM64LoweredAtomicAdd64Variant) 32485 v.AddArg(ptr) 32486 v.AddArg(val) 32487 v.AddArg(mem) 32488 return true 32489 } 32490 } 32491 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 32492 b := v.Block 32493 _ = b 32494 typ := &b.Func.Config.Types 32495 _ = typ 32496 // match: (AtomicAnd8 ptr val mem) 32497 // cond: 32498 // result: (Select1 (LoweredAtomicAnd8 ptr val mem)) 32499 for { 32500 _ = v.Args[2] 32501 ptr := v.Args[0] 32502 val := v.Args[1] 32503 mem := v.Args[2] 32504 v.reset(OpSelect1) 32505 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem)) 32506 v0.AddArg(ptr) 32507 v0.AddArg(val) 32508 v0.AddArg(mem) 32509 v.AddArg(v0) 32510 return true 32511 } 32512 } 32513 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 32514 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 32515 // cond: 32516 // result: (LoweredAtomicCas32 ptr old new_ mem) 32517 for { 32518 _ = v.Args[3] 32519 ptr := v.Args[0] 32520 old := v.Args[1] 32521 new_ := v.Args[2] 32522 mem := v.Args[3] 32523 v.reset(OpARM64LoweredAtomicCas32) 32524 v.AddArg(ptr) 32525 v.AddArg(old) 32526 v.AddArg(new_) 32527 v.AddArg(mem) 32528 return true 32529 } 32530 } 32531 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 32532 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 32533 // cond: 32534 // result: (LoweredAtomicCas64 ptr old new_ mem) 32535 for { 32536 _ = v.Args[3] 32537 ptr := v.Args[0] 32538 old := v.Args[1] 32539 new_ := v.Args[2] 32540 mem := v.Args[3] 32541 v.reset(OpARM64LoweredAtomicCas64) 32542 v.AddArg(ptr) 32543 v.AddArg(old) 32544 v.AddArg(new_) 32545 v.AddArg(mem) 32546 return true 32547 } 32548 } 32549 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 32550 // match: (AtomicExchange32 ptr val mem) 32551 // cond: 32552 // result: (LoweredAtomicExchange32 ptr val mem) 32553 for { 32554 _ = v.Args[2] 32555 ptr := v.Args[0] 32556 val := v.Args[1] 32557 mem := v.Args[2] 32558 v.reset(OpARM64LoweredAtomicExchange32) 32559 v.AddArg(ptr) 32560 v.AddArg(val) 32561 v.AddArg(mem) 32562 return true 32563 } 32564 } 32565 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 32566 // match: (AtomicExchange64 ptr val mem) 32567 // cond: 32568 // result: (LoweredAtomicExchange64 ptr val mem) 32569 for { 32570 _ = v.Args[2] 32571 ptr := v.Args[0] 32572 val := v.Args[1] 32573 mem := v.Args[2] 32574 v.reset(OpARM64LoweredAtomicExchange64) 32575 v.AddArg(ptr) 32576 v.AddArg(val) 32577 v.AddArg(mem) 32578 return true 32579 } 32580 } 32581 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 32582 // match: (AtomicLoad32 ptr mem) 32583 // cond: 32584 // result: (LDARW ptr mem) 32585 for { 32586 _ = v.Args[1] 32587 ptr := v.Args[0] 32588 mem := v.Args[1] 32589 v.reset(OpARM64LDARW) 32590 v.AddArg(ptr) 32591 v.AddArg(mem) 32592 return true 32593 } 32594 } 32595 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 32596 // match: (AtomicLoad64 ptr mem) 32597 // cond: 32598 // result: (LDAR ptr mem) 32599 for { 32600 _ = v.Args[1] 32601 ptr := v.Args[0] 32602 mem := v.Args[1] 32603 v.reset(OpARM64LDAR) 32604 v.AddArg(ptr) 32605 v.AddArg(mem) 32606 return true 32607 } 32608 } 32609 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 32610 // match: (AtomicLoadPtr ptr mem) 32611 // cond: 32612 // result: (LDAR ptr mem) 32613 for { 32614 _ = v.Args[1] 32615 ptr := v.Args[0] 32616 mem := v.Args[1] 32617 v.reset(OpARM64LDAR) 32618 v.AddArg(ptr) 32619 v.AddArg(mem) 32620 return true 32621 } 32622 } 32623 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 32624 b := v.Block 32625 _ = b 32626 typ := &b.Func.Config.Types 32627 _ = typ 32628 // match: (AtomicOr8 ptr val mem) 32629 // cond: 32630 // result: (Select1 (LoweredAtomicOr8 ptr val mem)) 32631 for { 32632 _ = v.Args[2] 32633 ptr := v.Args[0] 32634 val := v.Args[1] 32635 mem := v.Args[2] 32636 v.reset(OpSelect1) 32637 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem)) 32638 v0.AddArg(ptr) 32639 v0.AddArg(val) 32640 v0.AddArg(mem) 32641 v.AddArg(v0) 32642 return true 32643 } 32644 } 32645 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 32646 // match: (AtomicStore32 ptr val mem) 32647 // cond: 32648 // result: (STLRW ptr val mem) 32649 for { 32650 _ = v.Args[2] 32651 ptr := v.Args[0] 32652 val := v.Args[1] 32653 mem := v.Args[2] 32654 v.reset(OpARM64STLRW) 32655 v.AddArg(ptr) 32656 v.AddArg(val) 32657 v.AddArg(mem) 32658 return true 32659 } 32660 } 32661 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 32662 // match: (AtomicStore64 ptr val mem) 32663 // cond: 32664 // result: (STLR ptr val mem) 32665 for { 32666 _ = v.Args[2] 32667 ptr := v.Args[0] 32668 val := v.Args[1] 32669 mem := v.Args[2] 32670 v.reset(OpARM64STLR) 32671 v.AddArg(ptr) 32672 v.AddArg(val) 32673 v.AddArg(mem) 32674 return true 32675 } 32676 } 32677 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 32678 // match: (AtomicStorePtrNoWB ptr val mem) 32679 // cond: 32680 // result: (STLR ptr val mem) 32681 for { 32682 _ = v.Args[2] 32683 ptr := v.Args[0] 32684 val := v.Args[1] 32685 mem := v.Args[2] 32686 v.reset(OpARM64STLR) 32687 v.AddArg(ptr) 32688 v.AddArg(val) 32689 v.AddArg(mem) 32690 return true 32691 } 32692 } 32693 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 32694 b := v.Block 32695 _ = b 32696 // match: (Avg64u <t> x y) 32697 // cond: 32698 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 32699 for { 32700 t := v.Type 32701 _ = v.Args[1] 32702 x := v.Args[0] 32703 y := v.Args[1] 32704 v.reset(OpARM64ADD) 32705 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 32706 v0.AuxInt = 1 32707 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 32708 v1.AddArg(x) 32709 v1.AddArg(y) 32710 v0.AddArg(v1) 32711 v.AddArg(v0) 32712 v.AddArg(y) 32713 return true 32714 } 32715 } 32716 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 32717 b := v.Block 32718 _ = b 32719 typ := &b.Func.Config.Types 32720 _ = typ 32721 // match: (BitLen64 x) 32722 // cond: 32723 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 32724 for { 32725 x := v.Args[0] 32726 v.reset(OpARM64SUB) 32727 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32728 v0.AuxInt = 64 32729 v.AddArg(v0) 32730 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 32731 v1.AddArg(x) 32732 v.AddArg(v1) 32733 return true 32734 } 32735 } 32736 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 32737 b := v.Block 32738 _ = b 32739 typ := &b.Func.Config.Types 32740 _ = typ 32741 // match: (BitRev16 x) 32742 // cond: 32743 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 32744 for { 32745 x := v.Args[0] 32746 v.reset(OpARM64SRLconst) 32747 v.AuxInt = 48 32748 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 32749 v0.AddArg(x) 32750 v.AddArg(v0) 32751 return true 32752 } 32753 } 32754 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 32755 // match: (BitRev32 x) 32756 // cond: 32757 // result: (RBITW x) 32758 for { 32759 x := v.Args[0] 32760 v.reset(OpARM64RBITW) 32761 v.AddArg(x) 32762 return true 32763 } 32764 } 32765 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 32766 // match: (BitRev64 x) 32767 // cond: 32768 // result: (RBIT x) 32769 for { 32770 x := v.Args[0] 32771 v.reset(OpARM64RBIT) 32772 v.AddArg(x) 32773 return true 32774 } 32775 } 32776 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 32777 b := v.Block 32778 _ = b 32779 typ := &b.Func.Config.Types 32780 _ = typ 32781 // match: (BitRev8 x) 32782 // cond: 32783 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 32784 for { 32785 x := v.Args[0] 32786 v.reset(OpARM64SRLconst) 32787 v.AuxInt = 56 32788 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 32789 v0.AddArg(x) 32790 v.AddArg(v0) 32791 return true 32792 } 32793 } 32794 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 32795 // match: (Bswap32 x) 32796 // cond: 32797 // result: (REVW x) 32798 for { 32799 x := v.Args[0] 32800 v.reset(OpARM64REVW) 32801 v.AddArg(x) 32802 return true 32803 } 32804 } 32805 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 32806 // match: (Bswap64 x) 32807 // cond: 32808 // result: (REV x) 32809 for { 32810 x := v.Args[0] 32811 v.reset(OpARM64REV) 32812 v.AddArg(x) 32813 return true 32814 } 32815 } 32816 func rewriteValueARM64_OpCeil_0(v *Value) bool { 32817 // match: (Ceil x) 32818 // cond: 32819 // result: (FRINTPD x) 32820 for { 32821 x := v.Args[0] 32822 v.reset(OpARM64FRINTPD) 32823 v.AddArg(x) 32824 return true 32825 } 32826 } 32827 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 32828 // match: (ClosureCall [argwid] entry closure mem) 32829 // cond: 32830 // result: (CALLclosure [argwid] entry closure mem) 32831 for { 32832 argwid := v.AuxInt 32833 _ = v.Args[2] 32834 entry := v.Args[0] 32835 closure := v.Args[1] 32836 mem := v.Args[2] 32837 v.reset(OpARM64CALLclosure) 32838 v.AuxInt = argwid 32839 v.AddArg(entry) 32840 v.AddArg(closure) 32841 v.AddArg(mem) 32842 return true 32843 } 32844 } 32845 func rewriteValueARM64_OpCom16_0(v *Value) bool { 32846 // match: (Com16 x) 32847 // cond: 32848 // result: (MVN x) 32849 for { 32850 x := v.Args[0] 32851 v.reset(OpARM64MVN) 32852 v.AddArg(x) 32853 return true 32854 } 32855 } 32856 func rewriteValueARM64_OpCom32_0(v *Value) bool { 32857 // match: (Com32 x) 32858 // cond: 32859 // result: (MVN x) 32860 for { 32861 x := v.Args[0] 32862 v.reset(OpARM64MVN) 32863 v.AddArg(x) 32864 return true 32865 } 32866 } 32867 func rewriteValueARM64_OpCom64_0(v *Value) bool { 32868 // match: (Com64 x) 32869 // cond: 32870 // result: (MVN x) 32871 for { 32872 x := v.Args[0] 32873 v.reset(OpARM64MVN) 32874 v.AddArg(x) 32875 return true 32876 } 32877 } 32878 func rewriteValueARM64_OpCom8_0(v *Value) bool { 32879 // match: (Com8 x) 32880 // cond: 32881 // result: (MVN x) 32882 for { 32883 x := v.Args[0] 32884 v.reset(OpARM64MVN) 32885 v.AddArg(x) 32886 return true 32887 } 32888 } 32889 func rewriteValueARM64_OpCondSelect_0(v *Value) bool { 32890 b := v.Block 32891 _ = b 32892 // match: (CondSelect x y bool) 32893 // cond: flagArg(bool) != nil 32894 // result: (CSEL {bool.Op} x y flagArg(bool)) 32895 for { 32896 _ = v.Args[2] 32897 x := v.Args[0] 32898 y := v.Args[1] 32899 bool := v.Args[2] 32900 if !(flagArg(bool) != nil) { 32901 break 32902 } 32903 v.reset(OpARM64CSEL) 32904 v.Aux = bool.Op 32905 v.AddArg(x) 32906 v.AddArg(y) 32907 v.AddArg(flagArg(bool)) 32908 return true 32909 } 32910 // match: (CondSelect x y bool) 32911 // cond: flagArg(bool) == nil 32912 // result: (CSEL {OpARM64NotEqual} x y (CMPWconst [0] bool)) 32913 for { 32914 _ = v.Args[2] 32915 x := v.Args[0] 32916 y := v.Args[1] 32917 bool := v.Args[2] 32918 if !(flagArg(bool) == nil) { 32919 break 32920 } 32921 v.reset(OpARM64CSEL) 32922 v.Aux = OpARM64NotEqual 32923 v.AddArg(x) 32924 v.AddArg(y) 32925 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 32926 v0.AuxInt = 0 32927 v0.AddArg(bool) 32928 v.AddArg(v0) 32929 return true 32930 } 32931 return false 32932 } 32933 func rewriteValueARM64_OpConst16_0(v *Value) bool { 32934 // match: (Const16 [val]) 32935 // cond: 32936 // result: (MOVDconst [val]) 32937 for { 32938 val := v.AuxInt 32939 v.reset(OpARM64MOVDconst) 32940 v.AuxInt = val 32941 return true 32942 } 32943 } 32944 func rewriteValueARM64_OpConst32_0(v *Value) bool { 32945 // match: (Const32 [val]) 32946 // cond: 32947 // result: (MOVDconst [val]) 32948 for { 32949 val := v.AuxInt 32950 v.reset(OpARM64MOVDconst) 32951 v.AuxInt = val 32952 return true 32953 } 32954 } 32955 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 32956 // match: (Const32F [val]) 32957 // cond: 32958 // result: (FMOVSconst [val]) 32959 for { 32960 val := v.AuxInt 32961 v.reset(OpARM64FMOVSconst) 32962 v.AuxInt = val 32963 return true 32964 } 32965 } 32966 func rewriteValueARM64_OpConst64_0(v *Value) bool { 32967 // match: (Const64 [val]) 32968 // cond: 32969 // result: (MOVDconst [val]) 32970 for { 32971 val := v.AuxInt 32972 v.reset(OpARM64MOVDconst) 32973 v.AuxInt = val 32974 return true 32975 } 32976 } 32977 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 32978 // match: (Const64F [val]) 32979 // cond: 32980 // result: (FMOVDconst [val]) 32981 for { 32982 val := v.AuxInt 32983 v.reset(OpARM64FMOVDconst) 32984 v.AuxInt = val 32985 return true 32986 } 32987 } 32988 func rewriteValueARM64_OpConst8_0(v *Value) bool { 32989 // match: (Const8 [val]) 32990 // cond: 32991 // result: (MOVDconst [val]) 32992 for { 32993 val := v.AuxInt 32994 v.reset(OpARM64MOVDconst) 32995 v.AuxInt = val 32996 return true 32997 } 32998 } 32999 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 33000 // match: (ConstBool [b]) 33001 // cond: 33002 // result: (MOVDconst [b]) 33003 for { 33004 b := v.AuxInt 33005 v.reset(OpARM64MOVDconst) 33006 v.AuxInt = b 33007 return true 33008 } 33009 } 33010 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 33011 // match: (ConstNil) 33012 // cond: 33013 // result: (MOVDconst [0]) 33014 for { 33015 v.reset(OpARM64MOVDconst) 33016 v.AuxInt = 0 33017 return true 33018 } 33019 } 33020 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 33021 b := v.Block 33022 _ = b 33023 // match: (Ctz32 <t> x) 33024 // cond: 33025 // result: (CLZW (RBITW <t> x)) 33026 for { 33027 t := v.Type 33028 x := v.Args[0] 33029 v.reset(OpARM64CLZW) 33030 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 33031 v0.AddArg(x) 33032 v.AddArg(v0) 33033 return true 33034 } 33035 } 33036 func rewriteValueARM64_OpCtz32NonZero_0(v *Value) bool { 33037 // match: (Ctz32NonZero x) 33038 // cond: 33039 // result: (Ctz32 x) 33040 for { 33041 x := v.Args[0] 33042 v.reset(OpCtz32) 33043 v.AddArg(x) 33044 return true 33045 } 33046 } 33047 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 33048 b := v.Block 33049 _ = b 33050 // match: (Ctz64 <t> x) 33051 // cond: 33052 // result: (CLZ (RBIT <t> x)) 33053 for { 33054 t := v.Type 33055 x := v.Args[0] 33056 v.reset(OpARM64CLZ) 33057 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 33058 v0.AddArg(x) 33059 v.AddArg(v0) 33060 return true 33061 } 33062 } 33063 func rewriteValueARM64_OpCtz64NonZero_0(v *Value) bool { 33064 // match: (Ctz64NonZero x) 33065 // cond: 33066 // result: (Ctz64 x) 33067 for { 33068 x := v.Args[0] 33069 v.reset(OpCtz64) 33070 v.AddArg(x) 33071 return true 33072 } 33073 } 33074 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 33075 // match: (Cvt32Fto32 x) 33076 // cond: 33077 // result: (FCVTZSSW x) 33078 for { 33079 x := v.Args[0] 33080 v.reset(OpARM64FCVTZSSW) 33081 v.AddArg(x) 33082 return true 33083 } 33084 } 33085 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 33086 // match: (Cvt32Fto32U x) 33087 // cond: 33088 // result: (FCVTZUSW x) 33089 for { 33090 x := v.Args[0] 33091 v.reset(OpARM64FCVTZUSW) 33092 v.AddArg(x) 33093 return true 33094 } 33095 } 33096 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 33097 // match: (Cvt32Fto64 x) 33098 // cond: 33099 // result: (FCVTZSS x) 33100 for { 33101 x := v.Args[0] 33102 v.reset(OpARM64FCVTZSS) 33103 v.AddArg(x) 33104 return true 33105 } 33106 } 33107 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 33108 // match: (Cvt32Fto64F x) 33109 // cond: 33110 // result: (FCVTSD x) 33111 for { 33112 x := v.Args[0] 33113 v.reset(OpARM64FCVTSD) 33114 v.AddArg(x) 33115 return true 33116 } 33117 } 33118 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 33119 // match: (Cvt32Fto64U x) 33120 // cond: 33121 // result: (FCVTZUS x) 33122 for { 33123 x := v.Args[0] 33124 v.reset(OpARM64FCVTZUS) 33125 v.AddArg(x) 33126 return true 33127 } 33128 } 33129 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 33130 // match: (Cvt32Uto32F x) 33131 // cond: 33132 // result: (UCVTFWS x) 33133 for { 33134 x := v.Args[0] 33135 v.reset(OpARM64UCVTFWS) 33136 v.AddArg(x) 33137 return true 33138 } 33139 } 33140 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 33141 // match: (Cvt32Uto64F x) 33142 // cond: 33143 // result: (UCVTFWD x) 33144 for { 33145 x := v.Args[0] 33146 v.reset(OpARM64UCVTFWD) 33147 v.AddArg(x) 33148 return true 33149 } 33150 } 33151 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 33152 // match: (Cvt32to32F x) 33153 // cond: 33154 // result: (SCVTFWS x) 33155 for { 33156 x := v.Args[0] 33157 v.reset(OpARM64SCVTFWS) 33158 v.AddArg(x) 33159 return true 33160 } 33161 } 33162 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 33163 // match: (Cvt32to64F x) 33164 // cond: 33165 // result: (SCVTFWD x) 33166 for { 33167 x := v.Args[0] 33168 v.reset(OpARM64SCVTFWD) 33169 v.AddArg(x) 33170 return true 33171 } 33172 } 33173 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 33174 // match: (Cvt64Fto32 x) 33175 // cond: 33176 // result: (FCVTZSDW x) 33177 for { 33178 x := v.Args[0] 33179 v.reset(OpARM64FCVTZSDW) 33180 v.AddArg(x) 33181 return true 33182 } 33183 } 33184 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 33185 // match: (Cvt64Fto32F x) 33186 // cond: 33187 // result: (FCVTDS x) 33188 for { 33189 x := v.Args[0] 33190 v.reset(OpARM64FCVTDS) 33191 v.AddArg(x) 33192 return true 33193 } 33194 } 33195 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 33196 // match: (Cvt64Fto32U x) 33197 // cond: 33198 // result: (FCVTZUDW x) 33199 for { 33200 x := v.Args[0] 33201 v.reset(OpARM64FCVTZUDW) 33202 v.AddArg(x) 33203 return true 33204 } 33205 } 33206 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 33207 // match: (Cvt64Fto64 x) 33208 // cond: 33209 // result: (FCVTZSD x) 33210 for { 33211 x := v.Args[0] 33212 v.reset(OpARM64FCVTZSD) 33213 v.AddArg(x) 33214 return true 33215 } 33216 } 33217 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 33218 // match: (Cvt64Fto64U x) 33219 // cond: 33220 // result: (FCVTZUD x) 33221 for { 33222 x := v.Args[0] 33223 v.reset(OpARM64FCVTZUD) 33224 v.AddArg(x) 33225 return true 33226 } 33227 } 33228 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 33229 // match: (Cvt64Uto32F x) 33230 // cond: 33231 // result: (UCVTFS x) 33232 for { 33233 x := v.Args[0] 33234 v.reset(OpARM64UCVTFS) 33235 v.AddArg(x) 33236 return true 33237 } 33238 } 33239 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 33240 // match: (Cvt64Uto64F x) 33241 // cond: 33242 // result: (UCVTFD x) 33243 for { 33244 x := v.Args[0] 33245 v.reset(OpARM64UCVTFD) 33246 v.AddArg(x) 33247 return true 33248 } 33249 } 33250 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 33251 // match: (Cvt64to32F x) 33252 // cond: 33253 // result: (SCVTFS x) 33254 for { 33255 x := v.Args[0] 33256 v.reset(OpARM64SCVTFS) 33257 v.AddArg(x) 33258 return true 33259 } 33260 } 33261 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 33262 // match: (Cvt64to64F x) 33263 // cond: 33264 // result: (SCVTFD x) 33265 for { 33266 x := v.Args[0] 33267 v.reset(OpARM64SCVTFD) 33268 v.AddArg(x) 33269 return true 33270 } 33271 } 33272 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 33273 b := v.Block 33274 _ = b 33275 typ := &b.Func.Config.Types 33276 _ = typ 33277 // match: (Div16 x y) 33278 // cond: 33279 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 33280 for { 33281 _ = v.Args[1] 33282 x := v.Args[0] 33283 y := v.Args[1] 33284 v.reset(OpARM64DIVW) 33285 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33286 v0.AddArg(x) 33287 v.AddArg(v0) 33288 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33289 v1.AddArg(y) 33290 v.AddArg(v1) 33291 return true 33292 } 33293 } 33294 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 33295 b := v.Block 33296 _ = b 33297 typ := &b.Func.Config.Types 33298 _ = typ 33299 // match: (Div16u x y) 33300 // cond: 33301 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 33302 for { 33303 _ = v.Args[1] 33304 x := v.Args[0] 33305 y := v.Args[1] 33306 v.reset(OpARM64UDIVW) 33307 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33308 v0.AddArg(x) 33309 v.AddArg(v0) 33310 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33311 v1.AddArg(y) 33312 v.AddArg(v1) 33313 return true 33314 } 33315 } 33316 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 33317 // match: (Div32 x y) 33318 // cond: 33319 // result: (DIVW x y) 33320 for { 33321 _ = v.Args[1] 33322 x := v.Args[0] 33323 y := v.Args[1] 33324 v.reset(OpARM64DIVW) 33325 v.AddArg(x) 33326 v.AddArg(y) 33327 return true 33328 } 33329 } 33330 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 33331 // match: (Div32F x y) 33332 // cond: 33333 // result: (FDIVS x y) 33334 for { 33335 _ = v.Args[1] 33336 x := v.Args[0] 33337 y := v.Args[1] 33338 v.reset(OpARM64FDIVS) 33339 v.AddArg(x) 33340 v.AddArg(y) 33341 return true 33342 } 33343 } 33344 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 33345 // match: (Div32u x y) 33346 // cond: 33347 // result: (UDIVW x y) 33348 for { 33349 _ = v.Args[1] 33350 x := v.Args[0] 33351 y := v.Args[1] 33352 v.reset(OpARM64UDIVW) 33353 v.AddArg(x) 33354 v.AddArg(y) 33355 return true 33356 } 33357 } 33358 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 33359 // match: (Div64 x y) 33360 // cond: 33361 // result: (DIV x y) 33362 for { 33363 _ = v.Args[1] 33364 x := v.Args[0] 33365 y := v.Args[1] 33366 v.reset(OpARM64DIV) 33367 v.AddArg(x) 33368 v.AddArg(y) 33369 return true 33370 } 33371 } 33372 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 33373 // match: (Div64F x y) 33374 // cond: 33375 // result: (FDIVD x y) 33376 for { 33377 _ = v.Args[1] 33378 x := v.Args[0] 33379 y := v.Args[1] 33380 v.reset(OpARM64FDIVD) 33381 v.AddArg(x) 33382 v.AddArg(y) 33383 return true 33384 } 33385 } 33386 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 33387 // match: (Div64u x y) 33388 // cond: 33389 // result: (UDIV x y) 33390 for { 33391 _ = v.Args[1] 33392 x := v.Args[0] 33393 y := v.Args[1] 33394 v.reset(OpARM64UDIV) 33395 v.AddArg(x) 33396 v.AddArg(y) 33397 return true 33398 } 33399 } 33400 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 33401 b := v.Block 33402 _ = b 33403 typ := &b.Func.Config.Types 33404 _ = typ 33405 // match: (Div8 x y) 33406 // cond: 33407 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 33408 for { 33409 _ = v.Args[1] 33410 x := v.Args[0] 33411 y := v.Args[1] 33412 v.reset(OpARM64DIVW) 33413 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33414 v0.AddArg(x) 33415 v.AddArg(v0) 33416 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33417 v1.AddArg(y) 33418 v.AddArg(v1) 33419 return true 33420 } 33421 } 33422 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 33423 b := v.Block 33424 _ = b 33425 typ := &b.Func.Config.Types 33426 _ = typ 33427 // match: (Div8u x y) 33428 // cond: 33429 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 33430 for { 33431 _ = v.Args[1] 33432 x := v.Args[0] 33433 y := v.Args[1] 33434 v.reset(OpARM64UDIVW) 33435 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33436 v0.AddArg(x) 33437 v.AddArg(v0) 33438 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33439 v1.AddArg(y) 33440 v.AddArg(v1) 33441 return true 33442 } 33443 } 33444 func rewriteValueARM64_OpEq16_0(v *Value) bool { 33445 b := v.Block 33446 _ = b 33447 typ := &b.Func.Config.Types 33448 _ = typ 33449 // match: (Eq16 x y) 33450 // cond: 33451 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 33452 for { 33453 _ = v.Args[1] 33454 x := v.Args[0] 33455 y := v.Args[1] 33456 v.reset(OpARM64Equal) 33457 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33458 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33459 v1.AddArg(x) 33460 v0.AddArg(v1) 33461 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33462 v2.AddArg(y) 33463 v0.AddArg(v2) 33464 v.AddArg(v0) 33465 return true 33466 } 33467 } 33468 func rewriteValueARM64_OpEq32_0(v *Value) bool { 33469 b := v.Block 33470 _ = b 33471 // match: (Eq32 x y) 33472 // cond: 33473 // result: (Equal (CMPW x y)) 33474 for { 33475 _ = v.Args[1] 33476 x := v.Args[0] 33477 y := v.Args[1] 33478 v.reset(OpARM64Equal) 33479 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33480 v0.AddArg(x) 33481 v0.AddArg(y) 33482 v.AddArg(v0) 33483 return true 33484 } 33485 } 33486 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 33487 b := v.Block 33488 _ = b 33489 // match: (Eq32F x y) 33490 // cond: 33491 // result: (Equal (FCMPS x y)) 33492 for { 33493 _ = v.Args[1] 33494 x := v.Args[0] 33495 y := v.Args[1] 33496 v.reset(OpARM64Equal) 33497 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 33498 v0.AddArg(x) 33499 v0.AddArg(y) 33500 v.AddArg(v0) 33501 return true 33502 } 33503 } 33504 func rewriteValueARM64_OpEq64_0(v *Value) bool { 33505 b := v.Block 33506 _ = b 33507 // match: (Eq64 x y) 33508 // cond: 33509 // result: (Equal (CMP x y)) 33510 for { 33511 _ = v.Args[1] 33512 x := v.Args[0] 33513 y := v.Args[1] 33514 v.reset(OpARM64Equal) 33515 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33516 v0.AddArg(x) 33517 v0.AddArg(y) 33518 v.AddArg(v0) 33519 return true 33520 } 33521 } 33522 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 33523 b := v.Block 33524 _ = b 33525 // match: (Eq64F x y) 33526 // cond: 33527 // result: (Equal (FCMPD x y)) 33528 for { 33529 _ = v.Args[1] 33530 x := v.Args[0] 33531 y := v.Args[1] 33532 v.reset(OpARM64Equal) 33533 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 33534 v0.AddArg(x) 33535 v0.AddArg(y) 33536 v.AddArg(v0) 33537 return true 33538 } 33539 } 33540 func rewriteValueARM64_OpEq8_0(v *Value) bool { 33541 b := v.Block 33542 _ = b 33543 typ := &b.Func.Config.Types 33544 _ = typ 33545 // match: (Eq8 x y) 33546 // cond: 33547 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 33548 for { 33549 _ = v.Args[1] 33550 x := v.Args[0] 33551 y := v.Args[1] 33552 v.reset(OpARM64Equal) 33553 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33554 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33555 v1.AddArg(x) 33556 v0.AddArg(v1) 33557 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33558 v2.AddArg(y) 33559 v0.AddArg(v2) 33560 v.AddArg(v0) 33561 return true 33562 } 33563 } 33564 func rewriteValueARM64_OpEqB_0(v *Value) bool { 33565 b := v.Block 33566 _ = b 33567 typ := &b.Func.Config.Types 33568 _ = typ 33569 // match: (EqB x y) 33570 // cond: 33571 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 33572 for { 33573 _ = v.Args[1] 33574 x := v.Args[0] 33575 y := v.Args[1] 33576 v.reset(OpARM64XOR) 33577 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33578 v0.AuxInt = 1 33579 v.AddArg(v0) 33580 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 33581 v1.AddArg(x) 33582 v1.AddArg(y) 33583 v.AddArg(v1) 33584 return true 33585 } 33586 } 33587 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 33588 b := v.Block 33589 _ = b 33590 // match: (EqPtr x y) 33591 // cond: 33592 // result: (Equal (CMP x y)) 33593 for { 33594 _ = v.Args[1] 33595 x := v.Args[0] 33596 y := v.Args[1] 33597 v.reset(OpARM64Equal) 33598 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33599 v0.AddArg(x) 33600 v0.AddArg(y) 33601 v.AddArg(v0) 33602 return true 33603 } 33604 } 33605 func rewriteValueARM64_OpFloor_0(v *Value) bool { 33606 // match: (Floor x) 33607 // cond: 33608 // result: (FRINTMD x) 33609 for { 33610 x := v.Args[0] 33611 v.reset(OpARM64FRINTMD) 33612 v.AddArg(x) 33613 return true 33614 } 33615 } 33616 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 33617 b := v.Block 33618 _ = b 33619 typ := &b.Func.Config.Types 33620 _ = typ 33621 // match: (Geq16 x y) 33622 // cond: 33623 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 33624 for { 33625 _ = v.Args[1] 33626 x := v.Args[0] 33627 y := v.Args[1] 33628 v.reset(OpARM64GreaterEqual) 33629 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33630 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33631 v1.AddArg(x) 33632 v0.AddArg(v1) 33633 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33634 v2.AddArg(y) 33635 v0.AddArg(v2) 33636 v.AddArg(v0) 33637 return true 33638 } 33639 } 33640 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 33641 b := v.Block 33642 _ = b 33643 typ := &b.Func.Config.Types 33644 _ = typ 33645 // match: (Geq16U x y) 33646 // cond: 33647 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 33648 for { 33649 _ = v.Args[1] 33650 x := v.Args[0] 33651 y := v.Args[1] 33652 v.reset(OpARM64GreaterEqualU) 33653 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33654 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33655 v1.AddArg(x) 33656 v0.AddArg(v1) 33657 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33658 v2.AddArg(y) 33659 v0.AddArg(v2) 33660 v.AddArg(v0) 33661 return true 33662 } 33663 } 33664 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 33665 b := v.Block 33666 _ = b 33667 // match: (Geq32 x y) 33668 // cond: 33669 // result: (GreaterEqual (CMPW x y)) 33670 for { 33671 _ = v.Args[1] 33672 x := v.Args[0] 33673 y := v.Args[1] 33674 v.reset(OpARM64GreaterEqual) 33675 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33676 v0.AddArg(x) 33677 v0.AddArg(y) 33678 v.AddArg(v0) 33679 return true 33680 } 33681 } 33682 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 33683 b := v.Block 33684 _ = b 33685 // match: (Geq32F x y) 33686 // cond: 33687 // result: (GreaterEqual (FCMPS x y)) 33688 for { 33689 _ = v.Args[1] 33690 x := v.Args[0] 33691 y := v.Args[1] 33692 v.reset(OpARM64GreaterEqual) 33693 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 33694 v0.AddArg(x) 33695 v0.AddArg(y) 33696 v.AddArg(v0) 33697 return true 33698 } 33699 } 33700 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 33701 b := v.Block 33702 _ = b 33703 // match: (Geq32U x y) 33704 // cond: 33705 // result: (GreaterEqualU (CMPW x y)) 33706 for { 33707 _ = v.Args[1] 33708 x := v.Args[0] 33709 y := v.Args[1] 33710 v.reset(OpARM64GreaterEqualU) 33711 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33712 v0.AddArg(x) 33713 v0.AddArg(y) 33714 v.AddArg(v0) 33715 return true 33716 } 33717 } 33718 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 33719 b := v.Block 33720 _ = b 33721 // match: (Geq64 x y) 33722 // cond: 33723 // result: (GreaterEqual (CMP x y)) 33724 for { 33725 _ = v.Args[1] 33726 x := v.Args[0] 33727 y := v.Args[1] 33728 v.reset(OpARM64GreaterEqual) 33729 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33730 v0.AddArg(x) 33731 v0.AddArg(y) 33732 v.AddArg(v0) 33733 return true 33734 } 33735 } 33736 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 33737 b := v.Block 33738 _ = b 33739 // match: (Geq64F x y) 33740 // cond: 33741 // result: (GreaterEqual (FCMPD x y)) 33742 for { 33743 _ = v.Args[1] 33744 x := v.Args[0] 33745 y := v.Args[1] 33746 v.reset(OpARM64GreaterEqual) 33747 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 33748 v0.AddArg(x) 33749 v0.AddArg(y) 33750 v.AddArg(v0) 33751 return true 33752 } 33753 } 33754 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 33755 b := v.Block 33756 _ = b 33757 // match: (Geq64U x y) 33758 // cond: 33759 // result: (GreaterEqualU (CMP x y)) 33760 for { 33761 _ = v.Args[1] 33762 x := v.Args[0] 33763 y := v.Args[1] 33764 v.reset(OpARM64GreaterEqualU) 33765 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33766 v0.AddArg(x) 33767 v0.AddArg(y) 33768 v.AddArg(v0) 33769 return true 33770 } 33771 } 33772 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 33773 b := v.Block 33774 _ = b 33775 typ := &b.Func.Config.Types 33776 _ = typ 33777 // match: (Geq8 x y) 33778 // cond: 33779 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 33780 for { 33781 _ = v.Args[1] 33782 x := v.Args[0] 33783 y := v.Args[1] 33784 v.reset(OpARM64GreaterEqual) 33785 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33786 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33787 v1.AddArg(x) 33788 v0.AddArg(v1) 33789 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33790 v2.AddArg(y) 33791 v0.AddArg(v2) 33792 v.AddArg(v0) 33793 return true 33794 } 33795 } 33796 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 33797 b := v.Block 33798 _ = b 33799 typ := &b.Func.Config.Types 33800 _ = typ 33801 // match: (Geq8U x y) 33802 // cond: 33803 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 33804 for { 33805 _ = v.Args[1] 33806 x := v.Args[0] 33807 y := v.Args[1] 33808 v.reset(OpARM64GreaterEqualU) 33809 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33810 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33811 v1.AddArg(x) 33812 v0.AddArg(v1) 33813 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33814 v2.AddArg(y) 33815 v0.AddArg(v2) 33816 v.AddArg(v0) 33817 return true 33818 } 33819 } 33820 func rewriteValueARM64_OpGetCallerPC_0(v *Value) bool { 33821 // match: (GetCallerPC) 33822 // cond: 33823 // result: (LoweredGetCallerPC) 33824 for { 33825 v.reset(OpARM64LoweredGetCallerPC) 33826 return true 33827 } 33828 } 33829 func rewriteValueARM64_OpGetCallerSP_0(v *Value) bool { 33830 // match: (GetCallerSP) 33831 // cond: 33832 // result: (LoweredGetCallerSP) 33833 for { 33834 v.reset(OpARM64LoweredGetCallerSP) 33835 return true 33836 } 33837 } 33838 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 33839 // match: (GetClosurePtr) 33840 // cond: 33841 // result: (LoweredGetClosurePtr) 33842 for { 33843 v.reset(OpARM64LoweredGetClosurePtr) 33844 return true 33845 } 33846 } 33847 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 33848 b := v.Block 33849 _ = b 33850 typ := &b.Func.Config.Types 33851 _ = typ 33852 // match: (Greater16 x y) 33853 // cond: 33854 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 33855 for { 33856 _ = v.Args[1] 33857 x := v.Args[0] 33858 y := v.Args[1] 33859 v.reset(OpARM64GreaterThan) 33860 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33861 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33862 v1.AddArg(x) 33863 v0.AddArg(v1) 33864 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33865 v2.AddArg(y) 33866 v0.AddArg(v2) 33867 v.AddArg(v0) 33868 return true 33869 } 33870 } 33871 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 33872 b := v.Block 33873 _ = b 33874 typ := &b.Func.Config.Types 33875 _ = typ 33876 // match: (Greater16U x y) 33877 // cond: 33878 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 33879 for { 33880 _ = v.Args[1] 33881 x := v.Args[0] 33882 y := v.Args[1] 33883 v.reset(OpARM64GreaterThanU) 33884 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33885 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33886 v1.AddArg(x) 33887 v0.AddArg(v1) 33888 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33889 v2.AddArg(y) 33890 v0.AddArg(v2) 33891 v.AddArg(v0) 33892 return true 33893 } 33894 } 33895 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 33896 b := v.Block 33897 _ = b 33898 // match: (Greater32 x y) 33899 // cond: 33900 // result: (GreaterThan (CMPW x y)) 33901 for { 33902 _ = v.Args[1] 33903 x := v.Args[0] 33904 y := v.Args[1] 33905 v.reset(OpARM64GreaterThan) 33906 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33907 v0.AddArg(x) 33908 v0.AddArg(y) 33909 v.AddArg(v0) 33910 return true 33911 } 33912 } 33913 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 33914 b := v.Block 33915 _ = b 33916 // match: (Greater32F x y) 33917 // cond: 33918 // result: (GreaterThan (FCMPS x y)) 33919 for { 33920 _ = v.Args[1] 33921 x := v.Args[0] 33922 y := v.Args[1] 33923 v.reset(OpARM64GreaterThan) 33924 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 33925 v0.AddArg(x) 33926 v0.AddArg(y) 33927 v.AddArg(v0) 33928 return true 33929 } 33930 } 33931 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 33932 b := v.Block 33933 _ = b 33934 // match: (Greater32U x y) 33935 // cond: 33936 // result: (GreaterThanU (CMPW x y)) 33937 for { 33938 _ = v.Args[1] 33939 x := v.Args[0] 33940 y := v.Args[1] 33941 v.reset(OpARM64GreaterThanU) 33942 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33943 v0.AddArg(x) 33944 v0.AddArg(y) 33945 v.AddArg(v0) 33946 return true 33947 } 33948 } 33949 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 33950 b := v.Block 33951 _ = b 33952 // match: (Greater64 x y) 33953 // cond: 33954 // result: (GreaterThan (CMP x y)) 33955 for { 33956 _ = v.Args[1] 33957 x := v.Args[0] 33958 y := v.Args[1] 33959 v.reset(OpARM64GreaterThan) 33960 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33961 v0.AddArg(x) 33962 v0.AddArg(y) 33963 v.AddArg(v0) 33964 return true 33965 } 33966 } 33967 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 33968 b := v.Block 33969 _ = b 33970 // match: (Greater64F x y) 33971 // cond: 33972 // result: (GreaterThan (FCMPD x y)) 33973 for { 33974 _ = v.Args[1] 33975 x := v.Args[0] 33976 y := v.Args[1] 33977 v.reset(OpARM64GreaterThan) 33978 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 33979 v0.AddArg(x) 33980 v0.AddArg(y) 33981 v.AddArg(v0) 33982 return true 33983 } 33984 } 33985 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 33986 b := v.Block 33987 _ = b 33988 // match: (Greater64U x y) 33989 // cond: 33990 // result: (GreaterThanU (CMP x y)) 33991 for { 33992 _ = v.Args[1] 33993 x := v.Args[0] 33994 y := v.Args[1] 33995 v.reset(OpARM64GreaterThanU) 33996 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33997 v0.AddArg(x) 33998 v0.AddArg(y) 33999 v.AddArg(v0) 34000 return true 34001 } 34002 } 34003 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 34004 b := v.Block 34005 _ = b 34006 typ := &b.Func.Config.Types 34007 _ = typ 34008 // match: (Greater8 x y) 34009 // cond: 34010 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 34011 for { 34012 _ = v.Args[1] 34013 x := v.Args[0] 34014 y := v.Args[1] 34015 v.reset(OpARM64GreaterThan) 34016 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34017 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34018 v1.AddArg(x) 34019 v0.AddArg(v1) 34020 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34021 v2.AddArg(y) 34022 v0.AddArg(v2) 34023 v.AddArg(v0) 34024 return true 34025 } 34026 } 34027 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 34028 b := v.Block 34029 _ = b 34030 typ := &b.Func.Config.Types 34031 _ = typ 34032 // match: (Greater8U x y) 34033 // cond: 34034 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 34035 for { 34036 _ = v.Args[1] 34037 x := v.Args[0] 34038 y := v.Args[1] 34039 v.reset(OpARM64GreaterThanU) 34040 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34041 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34042 v1.AddArg(x) 34043 v0.AddArg(v1) 34044 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34045 v2.AddArg(y) 34046 v0.AddArg(v2) 34047 v.AddArg(v0) 34048 return true 34049 } 34050 } 34051 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 34052 b := v.Block 34053 _ = b 34054 typ := &b.Func.Config.Types 34055 _ = typ 34056 // match: (Hmul32 x y) 34057 // cond: 34058 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 34059 for { 34060 _ = v.Args[1] 34061 x := v.Args[0] 34062 y := v.Args[1] 34063 v.reset(OpARM64SRAconst) 34064 v.AuxInt = 32 34065 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 34066 v0.AddArg(x) 34067 v0.AddArg(y) 34068 v.AddArg(v0) 34069 return true 34070 } 34071 } 34072 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 34073 b := v.Block 34074 _ = b 34075 typ := &b.Func.Config.Types 34076 _ = typ 34077 // match: (Hmul32u x y) 34078 // cond: 34079 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 34080 for { 34081 _ = v.Args[1] 34082 x := v.Args[0] 34083 y := v.Args[1] 34084 v.reset(OpARM64SRAconst) 34085 v.AuxInt = 32 34086 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 34087 v0.AddArg(x) 34088 v0.AddArg(y) 34089 v.AddArg(v0) 34090 return true 34091 } 34092 } 34093 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 34094 // match: (Hmul64 x y) 34095 // cond: 34096 // result: (MULH x y) 34097 for { 34098 _ = v.Args[1] 34099 x := v.Args[0] 34100 y := v.Args[1] 34101 v.reset(OpARM64MULH) 34102 v.AddArg(x) 34103 v.AddArg(y) 34104 return true 34105 } 34106 } 34107 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 34108 // match: (Hmul64u x y) 34109 // cond: 34110 // result: (UMULH x y) 34111 for { 34112 _ = v.Args[1] 34113 x := v.Args[0] 34114 y := v.Args[1] 34115 v.reset(OpARM64UMULH) 34116 v.AddArg(x) 34117 v.AddArg(y) 34118 return true 34119 } 34120 } 34121 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 34122 // match: (InterCall [argwid] entry mem) 34123 // cond: 34124 // result: (CALLinter [argwid] entry mem) 34125 for { 34126 argwid := v.AuxInt 34127 _ = v.Args[1] 34128 entry := v.Args[0] 34129 mem := v.Args[1] 34130 v.reset(OpARM64CALLinter) 34131 v.AuxInt = argwid 34132 v.AddArg(entry) 34133 v.AddArg(mem) 34134 return true 34135 } 34136 } 34137 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 34138 b := v.Block 34139 _ = b 34140 // match: (IsInBounds idx len) 34141 // cond: 34142 // result: (LessThanU (CMP idx len)) 34143 for { 34144 _ = v.Args[1] 34145 idx := v.Args[0] 34146 len := v.Args[1] 34147 v.reset(OpARM64LessThanU) 34148 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34149 v0.AddArg(idx) 34150 v0.AddArg(len) 34151 v.AddArg(v0) 34152 return true 34153 } 34154 } 34155 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 34156 b := v.Block 34157 _ = b 34158 // match: (IsNonNil ptr) 34159 // cond: 34160 // result: (NotEqual (CMPconst [0] ptr)) 34161 for { 34162 ptr := v.Args[0] 34163 v.reset(OpARM64NotEqual) 34164 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34165 v0.AuxInt = 0 34166 v0.AddArg(ptr) 34167 v.AddArg(v0) 34168 return true 34169 } 34170 } 34171 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 34172 b := v.Block 34173 _ = b 34174 // match: (IsSliceInBounds idx len) 34175 // cond: 34176 // result: (LessEqualU (CMP idx len)) 34177 for { 34178 _ = v.Args[1] 34179 idx := v.Args[0] 34180 len := v.Args[1] 34181 v.reset(OpARM64LessEqualU) 34182 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34183 v0.AddArg(idx) 34184 v0.AddArg(len) 34185 v.AddArg(v0) 34186 return true 34187 } 34188 } 34189 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 34190 b := v.Block 34191 _ = b 34192 typ := &b.Func.Config.Types 34193 _ = typ 34194 // match: (Leq16 x y) 34195 // cond: 34196 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 34197 for { 34198 _ = v.Args[1] 34199 x := v.Args[0] 34200 y := v.Args[1] 34201 v.reset(OpARM64LessEqual) 34202 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34203 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34204 v1.AddArg(x) 34205 v0.AddArg(v1) 34206 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34207 v2.AddArg(y) 34208 v0.AddArg(v2) 34209 v.AddArg(v0) 34210 return true 34211 } 34212 } 34213 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 34214 b := v.Block 34215 _ = b 34216 typ := &b.Func.Config.Types 34217 _ = typ 34218 // match: (Leq16U x y) 34219 // cond: 34220 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 34221 for { 34222 _ = v.Args[1] 34223 x := v.Args[0] 34224 y := v.Args[1] 34225 v.reset(OpARM64LessEqualU) 34226 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34227 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34228 v1.AddArg(x) 34229 v0.AddArg(v1) 34230 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34231 v2.AddArg(y) 34232 v0.AddArg(v2) 34233 v.AddArg(v0) 34234 return true 34235 } 34236 } 34237 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 34238 b := v.Block 34239 _ = b 34240 // match: (Leq32 x y) 34241 // cond: 34242 // result: (LessEqual (CMPW x y)) 34243 for { 34244 _ = v.Args[1] 34245 x := v.Args[0] 34246 y := v.Args[1] 34247 v.reset(OpARM64LessEqual) 34248 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34249 v0.AddArg(x) 34250 v0.AddArg(y) 34251 v.AddArg(v0) 34252 return true 34253 } 34254 } 34255 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 34256 b := v.Block 34257 _ = b 34258 // match: (Leq32F x y) 34259 // cond: 34260 // result: (GreaterEqual (FCMPS y x)) 34261 for { 34262 _ = v.Args[1] 34263 x := v.Args[0] 34264 y := v.Args[1] 34265 v.reset(OpARM64GreaterEqual) 34266 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 34267 v0.AddArg(y) 34268 v0.AddArg(x) 34269 v.AddArg(v0) 34270 return true 34271 } 34272 } 34273 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 34274 b := v.Block 34275 _ = b 34276 // match: (Leq32U x y) 34277 // cond: 34278 // result: (LessEqualU (CMPW x y)) 34279 for { 34280 _ = v.Args[1] 34281 x := v.Args[0] 34282 y := v.Args[1] 34283 v.reset(OpARM64LessEqualU) 34284 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34285 v0.AddArg(x) 34286 v0.AddArg(y) 34287 v.AddArg(v0) 34288 return true 34289 } 34290 } 34291 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 34292 b := v.Block 34293 _ = b 34294 // match: (Leq64 x y) 34295 // cond: 34296 // result: (LessEqual (CMP x y)) 34297 for { 34298 _ = v.Args[1] 34299 x := v.Args[0] 34300 y := v.Args[1] 34301 v.reset(OpARM64LessEqual) 34302 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34303 v0.AddArg(x) 34304 v0.AddArg(y) 34305 v.AddArg(v0) 34306 return true 34307 } 34308 } 34309 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 34310 b := v.Block 34311 _ = b 34312 // match: (Leq64F x y) 34313 // cond: 34314 // result: (GreaterEqual (FCMPD y x)) 34315 for { 34316 _ = v.Args[1] 34317 x := v.Args[0] 34318 y := v.Args[1] 34319 v.reset(OpARM64GreaterEqual) 34320 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 34321 v0.AddArg(y) 34322 v0.AddArg(x) 34323 v.AddArg(v0) 34324 return true 34325 } 34326 } 34327 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 34328 b := v.Block 34329 _ = b 34330 // match: (Leq64U x y) 34331 // cond: 34332 // result: (LessEqualU (CMP x y)) 34333 for { 34334 _ = v.Args[1] 34335 x := v.Args[0] 34336 y := v.Args[1] 34337 v.reset(OpARM64LessEqualU) 34338 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34339 v0.AddArg(x) 34340 v0.AddArg(y) 34341 v.AddArg(v0) 34342 return true 34343 } 34344 } 34345 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 34346 b := v.Block 34347 _ = b 34348 typ := &b.Func.Config.Types 34349 _ = typ 34350 // match: (Leq8 x y) 34351 // cond: 34352 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 34353 for { 34354 _ = v.Args[1] 34355 x := v.Args[0] 34356 y := v.Args[1] 34357 v.reset(OpARM64LessEqual) 34358 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34359 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34360 v1.AddArg(x) 34361 v0.AddArg(v1) 34362 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34363 v2.AddArg(y) 34364 v0.AddArg(v2) 34365 v.AddArg(v0) 34366 return true 34367 } 34368 } 34369 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 34370 b := v.Block 34371 _ = b 34372 typ := &b.Func.Config.Types 34373 _ = typ 34374 // match: (Leq8U x y) 34375 // cond: 34376 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 34377 for { 34378 _ = v.Args[1] 34379 x := v.Args[0] 34380 y := v.Args[1] 34381 v.reset(OpARM64LessEqualU) 34382 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34383 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34384 v1.AddArg(x) 34385 v0.AddArg(v1) 34386 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34387 v2.AddArg(y) 34388 v0.AddArg(v2) 34389 v.AddArg(v0) 34390 return true 34391 } 34392 } 34393 func rewriteValueARM64_OpLess16_0(v *Value) bool { 34394 b := v.Block 34395 _ = b 34396 typ := &b.Func.Config.Types 34397 _ = typ 34398 // match: (Less16 x y) 34399 // cond: 34400 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 34401 for { 34402 _ = v.Args[1] 34403 x := v.Args[0] 34404 y := v.Args[1] 34405 v.reset(OpARM64LessThan) 34406 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34407 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34408 v1.AddArg(x) 34409 v0.AddArg(v1) 34410 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34411 v2.AddArg(y) 34412 v0.AddArg(v2) 34413 v.AddArg(v0) 34414 return true 34415 } 34416 } 34417 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 34418 b := v.Block 34419 _ = b 34420 typ := &b.Func.Config.Types 34421 _ = typ 34422 // match: (Less16U x y) 34423 // cond: 34424 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 34425 for { 34426 _ = v.Args[1] 34427 x := v.Args[0] 34428 y := v.Args[1] 34429 v.reset(OpARM64LessThanU) 34430 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34431 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34432 v1.AddArg(x) 34433 v0.AddArg(v1) 34434 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34435 v2.AddArg(y) 34436 v0.AddArg(v2) 34437 v.AddArg(v0) 34438 return true 34439 } 34440 } 34441 func rewriteValueARM64_OpLess32_0(v *Value) bool { 34442 b := v.Block 34443 _ = b 34444 // match: (Less32 x y) 34445 // cond: 34446 // result: (LessThan (CMPW x y)) 34447 for { 34448 _ = v.Args[1] 34449 x := v.Args[0] 34450 y := v.Args[1] 34451 v.reset(OpARM64LessThan) 34452 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34453 v0.AddArg(x) 34454 v0.AddArg(y) 34455 v.AddArg(v0) 34456 return true 34457 } 34458 } 34459 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 34460 b := v.Block 34461 _ = b 34462 // match: (Less32F x y) 34463 // cond: 34464 // result: (GreaterThan (FCMPS y x)) 34465 for { 34466 _ = v.Args[1] 34467 x := v.Args[0] 34468 y := v.Args[1] 34469 v.reset(OpARM64GreaterThan) 34470 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 34471 v0.AddArg(y) 34472 v0.AddArg(x) 34473 v.AddArg(v0) 34474 return true 34475 } 34476 } 34477 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 34478 b := v.Block 34479 _ = b 34480 // match: (Less32U x y) 34481 // cond: 34482 // result: (LessThanU (CMPW x y)) 34483 for { 34484 _ = v.Args[1] 34485 x := v.Args[0] 34486 y := v.Args[1] 34487 v.reset(OpARM64LessThanU) 34488 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34489 v0.AddArg(x) 34490 v0.AddArg(y) 34491 v.AddArg(v0) 34492 return true 34493 } 34494 } 34495 func rewriteValueARM64_OpLess64_0(v *Value) bool { 34496 b := v.Block 34497 _ = b 34498 // match: (Less64 x y) 34499 // cond: 34500 // result: (LessThan (CMP x y)) 34501 for { 34502 _ = v.Args[1] 34503 x := v.Args[0] 34504 y := v.Args[1] 34505 v.reset(OpARM64LessThan) 34506 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34507 v0.AddArg(x) 34508 v0.AddArg(y) 34509 v.AddArg(v0) 34510 return true 34511 } 34512 } 34513 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 34514 b := v.Block 34515 _ = b 34516 // match: (Less64F x y) 34517 // cond: 34518 // result: (GreaterThan (FCMPD y x)) 34519 for { 34520 _ = v.Args[1] 34521 x := v.Args[0] 34522 y := v.Args[1] 34523 v.reset(OpARM64GreaterThan) 34524 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 34525 v0.AddArg(y) 34526 v0.AddArg(x) 34527 v.AddArg(v0) 34528 return true 34529 } 34530 } 34531 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 34532 b := v.Block 34533 _ = b 34534 // match: (Less64U x y) 34535 // cond: 34536 // result: (LessThanU (CMP x y)) 34537 for { 34538 _ = v.Args[1] 34539 x := v.Args[0] 34540 y := v.Args[1] 34541 v.reset(OpARM64LessThanU) 34542 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34543 v0.AddArg(x) 34544 v0.AddArg(y) 34545 v.AddArg(v0) 34546 return true 34547 } 34548 } 34549 func rewriteValueARM64_OpLess8_0(v *Value) bool { 34550 b := v.Block 34551 _ = b 34552 typ := &b.Func.Config.Types 34553 _ = typ 34554 // match: (Less8 x y) 34555 // cond: 34556 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 34557 for { 34558 _ = v.Args[1] 34559 x := v.Args[0] 34560 y := v.Args[1] 34561 v.reset(OpARM64LessThan) 34562 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34563 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34564 v1.AddArg(x) 34565 v0.AddArg(v1) 34566 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34567 v2.AddArg(y) 34568 v0.AddArg(v2) 34569 v.AddArg(v0) 34570 return true 34571 } 34572 } 34573 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 34574 b := v.Block 34575 _ = b 34576 typ := &b.Func.Config.Types 34577 _ = typ 34578 // match: (Less8U x y) 34579 // cond: 34580 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 34581 for { 34582 _ = v.Args[1] 34583 x := v.Args[0] 34584 y := v.Args[1] 34585 v.reset(OpARM64LessThanU) 34586 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34587 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34588 v1.AddArg(x) 34589 v0.AddArg(v1) 34590 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34591 v2.AddArg(y) 34592 v0.AddArg(v2) 34593 v.AddArg(v0) 34594 return true 34595 } 34596 } 34597 func rewriteValueARM64_OpLoad_0(v *Value) bool { 34598 // match: (Load <t> ptr mem) 34599 // cond: t.IsBoolean() 34600 // result: (MOVBUload ptr mem) 34601 for { 34602 t := v.Type 34603 _ = v.Args[1] 34604 ptr := v.Args[0] 34605 mem := v.Args[1] 34606 if !(t.IsBoolean()) { 34607 break 34608 } 34609 v.reset(OpARM64MOVBUload) 34610 v.AddArg(ptr) 34611 v.AddArg(mem) 34612 return true 34613 } 34614 // match: (Load <t> ptr mem) 34615 // cond: (is8BitInt(t) && isSigned(t)) 34616 // result: (MOVBload ptr mem) 34617 for { 34618 t := v.Type 34619 _ = v.Args[1] 34620 ptr := v.Args[0] 34621 mem := v.Args[1] 34622 if !(is8BitInt(t) && isSigned(t)) { 34623 break 34624 } 34625 v.reset(OpARM64MOVBload) 34626 v.AddArg(ptr) 34627 v.AddArg(mem) 34628 return true 34629 } 34630 // match: (Load <t> ptr mem) 34631 // cond: (is8BitInt(t) && !isSigned(t)) 34632 // result: (MOVBUload ptr mem) 34633 for { 34634 t := v.Type 34635 _ = v.Args[1] 34636 ptr := v.Args[0] 34637 mem := v.Args[1] 34638 if !(is8BitInt(t) && !isSigned(t)) { 34639 break 34640 } 34641 v.reset(OpARM64MOVBUload) 34642 v.AddArg(ptr) 34643 v.AddArg(mem) 34644 return true 34645 } 34646 // match: (Load <t> ptr mem) 34647 // cond: (is16BitInt(t) && isSigned(t)) 34648 // result: (MOVHload ptr mem) 34649 for { 34650 t := v.Type 34651 _ = v.Args[1] 34652 ptr := v.Args[0] 34653 mem := v.Args[1] 34654 if !(is16BitInt(t) && isSigned(t)) { 34655 break 34656 } 34657 v.reset(OpARM64MOVHload) 34658 v.AddArg(ptr) 34659 v.AddArg(mem) 34660 return true 34661 } 34662 // match: (Load <t> ptr mem) 34663 // cond: (is16BitInt(t) && !isSigned(t)) 34664 // result: (MOVHUload ptr mem) 34665 for { 34666 t := v.Type 34667 _ = v.Args[1] 34668 ptr := v.Args[0] 34669 mem := v.Args[1] 34670 if !(is16BitInt(t) && !isSigned(t)) { 34671 break 34672 } 34673 v.reset(OpARM64MOVHUload) 34674 v.AddArg(ptr) 34675 v.AddArg(mem) 34676 return true 34677 } 34678 // match: (Load <t> ptr mem) 34679 // cond: (is32BitInt(t) && isSigned(t)) 34680 // result: (MOVWload ptr mem) 34681 for { 34682 t := v.Type 34683 _ = v.Args[1] 34684 ptr := v.Args[0] 34685 mem := v.Args[1] 34686 if !(is32BitInt(t) && isSigned(t)) { 34687 break 34688 } 34689 v.reset(OpARM64MOVWload) 34690 v.AddArg(ptr) 34691 v.AddArg(mem) 34692 return true 34693 } 34694 // match: (Load <t> ptr mem) 34695 // cond: (is32BitInt(t) && !isSigned(t)) 34696 // result: (MOVWUload ptr mem) 34697 for { 34698 t := v.Type 34699 _ = v.Args[1] 34700 ptr := v.Args[0] 34701 mem := v.Args[1] 34702 if !(is32BitInt(t) && !isSigned(t)) { 34703 break 34704 } 34705 v.reset(OpARM64MOVWUload) 34706 v.AddArg(ptr) 34707 v.AddArg(mem) 34708 return true 34709 } 34710 // match: (Load <t> ptr mem) 34711 // cond: (is64BitInt(t) || isPtr(t)) 34712 // result: (MOVDload ptr mem) 34713 for { 34714 t := v.Type 34715 _ = v.Args[1] 34716 ptr := v.Args[0] 34717 mem := v.Args[1] 34718 if !(is64BitInt(t) || isPtr(t)) { 34719 break 34720 } 34721 v.reset(OpARM64MOVDload) 34722 v.AddArg(ptr) 34723 v.AddArg(mem) 34724 return true 34725 } 34726 // match: (Load <t> ptr mem) 34727 // cond: is32BitFloat(t) 34728 // result: (FMOVSload ptr mem) 34729 for { 34730 t := v.Type 34731 _ = v.Args[1] 34732 ptr := v.Args[0] 34733 mem := v.Args[1] 34734 if !(is32BitFloat(t)) { 34735 break 34736 } 34737 v.reset(OpARM64FMOVSload) 34738 v.AddArg(ptr) 34739 v.AddArg(mem) 34740 return true 34741 } 34742 // match: (Load <t> ptr mem) 34743 // cond: is64BitFloat(t) 34744 // result: (FMOVDload ptr mem) 34745 for { 34746 t := v.Type 34747 _ = v.Args[1] 34748 ptr := v.Args[0] 34749 mem := v.Args[1] 34750 if !(is64BitFloat(t)) { 34751 break 34752 } 34753 v.reset(OpARM64FMOVDload) 34754 v.AddArg(ptr) 34755 v.AddArg(mem) 34756 return true 34757 } 34758 return false 34759 } 34760 func rewriteValueARM64_OpLocalAddr_0(v *Value) bool { 34761 // match: (LocalAddr {sym} base _) 34762 // cond: 34763 // result: (MOVDaddr {sym} base) 34764 for { 34765 sym := v.Aux 34766 _ = v.Args[1] 34767 base := v.Args[0] 34768 v.reset(OpARM64MOVDaddr) 34769 v.Aux = sym 34770 v.AddArg(base) 34771 return true 34772 } 34773 } 34774 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 34775 b := v.Block 34776 _ = b 34777 typ := &b.Func.Config.Types 34778 _ = typ 34779 // match: (Lsh16x16 <t> x y) 34780 // cond: 34781 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 34782 for { 34783 t := v.Type 34784 _ = v.Args[1] 34785 x := v.Args[0] 34786 y := v.Args[1] 34787 v.reset(OpARM64CSEL) 34788 v.Aux = OpARM64LessThanU 34789 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34790 v0.AddArg(x) 34791 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34792 v1.AddArg(y) 34793 v0.AddArg(v1) 34794 v.AddArg(v0) 34795 v2 := b.NewValue0(v.Pos, OpConst64, t) 34796 v2.AuxInt = 0 34797 v.AddArg(v2) 34798 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34799 v3.AuxInt = 64 34800 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34801 v4.AddArg(y) 34802 v3.AddArg(v4) 34803 v.AddArg(v3) 34804 return true 34805 } 34806 } 34807 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 34808 b := v.Block 34809 _ = b 34810 typ := &b.Func.Config.Types 34811 _ = typ 34812 // match: (Lsh16x32 <t> x y) 34813 // cond: 34814 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 34815 for { 34816 t := v.Type 34817 _ = v.Args[1] 34818 x := v.Args[0] 34819 y := v.Args[1] 34820 v.reset(OpARM64CSEL) 34821 v.Aux = OpARM64LessThanU 34822 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34823 v0.AddArg(x) 34824 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34825 v1.AddArg(y) 34826 v0.AddArg(v1) 34827 v.AddArg(v0) 34828 v2 := b.NewValue0(v.Pos, OpConst64, t) 34829 v2.AuxInt = 0 34830 v.AddArg(v2) 34831 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34832 v3.AuxInt = 64 34833 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34834 v4.AddArg(y) 34835 v3.AddArg(v4) 34836 v.AddArg(v3) 34837 return true 34838 } 34839 } 34840 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 34841 b := v.Block 34842 _ = b 34843 // match: (Lsh16x64 <t> x y) 34844 // cond: 34845 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 34846 for { 34847 t := v.Type 34848 _ = v.Args[1] 34849 x := v.Args[0] 34850 y := v.Args[1] 34851 v.reset(OpARM64CSEL) 34852 v.Aux = OpARM64LessThanU 34853 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34854 v0.AddArg(x) 34855 v0.AddArg(y) 34856 v.AddArg(v0) 34857 v1 := b.NewValue0(v.Pos, OpConst64, t) 34858 v1.AuxInt = 0 34859 v.AddArg(v1) 34860 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34861 v2.AuxInt = 64 34862 v2.AddArg(y) 34863 v.AddArg(v2) 34864 return true 34865 } 34866 } 34867 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 34868 b := v.Block 34869 _ = b 34870 typ := &b.Func.Config.Types 34871 _ = typ 34872 // match: (Lsh16x8 <t> x y) 34873 // cond: 34874 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 34875 for { 34876 t := v.Type 34877 _ = v.Args[1] 34878 x := v.Args[0] 34879 y := v.Args[1] 34880 v.reset(OpARM64CSEL) 34881 v.Aux = OpARM64LessThanU 34882 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34883 v0.AddArg(x) 34884 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 34885 v1.AddArg(y) 34886 v0.AddArg(v1) 34887 v.AddArg(v0) 34888 v2 := b.NewValue0(v.Pos, OpConst64, t) 34889 v2.AuxInt = 0 34890 v.AddArg(v2) 34891 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34892 v3.AuxInt = 64 34893 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 34894 v4.AddArg(y) 34895 v3.AddArg(v4) 34896 v.AddArg(v3) 34897 return true 34898 } 34899 } 34900 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 34901 b := v.Block 34902 _ = b 34903 typ := &b.Func.Config.Types 34904 _ = typ 34905 // match: (Lsh32x16 <t> x y) 34906 // cond: 34907 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 34908 for { 34909 t := v.Type 34910 _ = v.Args[1] 34911 x := v.Args[0] 34912 y := v.Args[1] 34913 v.reset(OpARM64CSEL) 34914 v.Aux = OpARM64LessThanU 34915 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34916 v0.AddArg(x) 34917 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34918 v1.AddArg(y) 34919 v0.AddArg(v1) 34920 v.AddArg(v0) 34921 v2 := b.NewValue0(v.Pos, OpConst64, t) 34922 v2.AuxInt = 0 34923 v.AddArg(v2) 34924 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34925 v3.AuxInt = 64 34926 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34927 v4.AddArg(y) 34928 v3.AddArg(v4) 34929 v.AddArg(v3) 34930 return true 34931 } 34932 } 34933 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 34934 b := v.Block 34935 _ = b 34936 typ := &b.Func.Config.Types 34937 _ = typ 34938 // match: (Lsh32x32 <t> x y) 34939 // cond: 34940 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 34941 for { 34942 t := v.Type 34943 _ = v.Args[1] 34944 x := v.Args[0] 34945 y := v.Args[1] 34946 v.reset(OpARM64CSEL) 34947 v.Aux = OpARM64LessThanU 34948 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34949 v0.AddArg(x) 34950 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34951 v1.AddArg(y) 34952 v0.AddArg(v1) 34953 v.AddArg(v0) 34954 v2 := b.NewValue0(v.Pos, OpConst64, t) 34955 v2.AuxInt = 0 34956 v.AddArg(v2) 34957 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34958 v3.AuxInt = 64 34959 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34960 v4.AddArg(y) 34961 v3.AddArg(v4) 34962 v.AddArg(v3) 34963 return true 34964 } 34965 } 34966 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 34967 b := v.Block 34968 _ = b 34969 // match: (Lsh32x64 <t> x y) 34970 // cond: 34971 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 34972 for { 34973 t := v.Type 34974 _ = v.Args[1] 34975 x := v.Args[0] 34976 y := v.Args[1] 34977 v.reset(OpARM64CSEL) 34978 v.Aux = OpARM64LessThanU 34979 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34980 v0.AddArg(x) 34981 v0.AddArg(y) 34982 v.AddArg(v0) 34983 v1 := b.NewValue0(v.Pos, OpConst64, t) 34984 v1.AuxInt = 0 34985 v.AddArg(v1) 34986 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34987 v2.AuxInt = 64 34988 v2.AddArg(y) 34989 v.AddArg(v2) 34990 return true 34991 } 34992 } 34993 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 34994 b := v.Block 34995 _ = b 34996 typ := &b.Func.Config.Types 34997 _ = typ 34998 // match: (Lsh32x8 <t> x y) 34999 // cond: 35000 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 35001 for { 35002 t := v.Type 35003 _ = v.Args[1] 35004 x := v.Args[0] 35005 y := v.Args[1] 35006 v.reset(OpARM64CSEL) 35007 v.Aux = OpARM64LessThanU 35008 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35009 v0.AddArg(x) 35010 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35011 v1.AddArg(y) 35012 v0.AddArg(v1) 35013 v.AddArg(v0) 35014 v2 := b.NewValue0(v.Pos, OpConst64, t) 35015 v2.AuxInt = 0 35016 v.AddArg(v2) 35017 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35018 v3.AuxInt = 64 35019 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35020 v4.AddArg(y) 35021 v3.AddArg(v4) 35022 v.AddArg(v3) 35023 return true 35024 } 35025 } 35026 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 35027 b := v.Block 35028 _ = b 35029 typ := &b.Func.Config.Types 35030 _ = typ 35031 // match: (Lsh64x16 <t> x y) 35032 // cond: 35033 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 35034 for { 35035 t := v.Type 35036 _ = v.Args[1] 35037 x := v.Args[0] 35038 y := v.Args[1] 35039 v.reset(OpARM64CSEL) 35040 v.Aux = OpARM64LessThanU 35041 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35042 v0.AddArg(x) 35043 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35044 v1.AddArg(y) 35045 v0.AddArg(v1) 35046 v.AddArg(v0) 35047 v2 := b.NewValue0(v.Pos, OpConst64, t) 35048 v2.AuxInt = 0 35049 v.AddArg(v2) 35050 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35051 v3.AuxInt = 64 35052 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35053 v4.AddArg(y) 35054 v3.AddArg(v4) 35055 v.AddArg(v3) 35056 return true 35057 } 35058 } 35059 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 35060 b := v.Block 35061 _ = b 35062 typ := &b.Func.Config.Types 35063 _ = typ 35064 // match: (Lsh64x32 <t> x y) 35065 // cond: 35066 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 35067 for { 35068 t := v.Type 35069 _ = v.Args[1] 35070 x := v.Args[0] 35071 y := v.Args[1] 35072 v.reset(OpARM64CSEL) 35073 v.Aux = OpARM64LessThanU 35074 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35075 v0.AddArg(x) 35076 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35077 v1.AddArg(y) 35078 v0.AddArg(v1) 35079 v.AddArg(v0) 35080 v2 := b.NewValue0(v.Pos, OpConst64, t) 35081 v2.AuxInt = 0 35082 v.AddArg(v2) 35083 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35084 v3.AuxInt = 64 35085 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35086 v4.AddArg(y) 35087 v3.AddArg(v4) 35088 v.AddArg(v3) 35089 return true 35090 } 35091 } 35092 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 35093 b := v.Block 35094 _ = b 35095 // match: (Lsh64x64 <t> x y) 35096 // cond: 35097 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 35098 for { 35099 t := v.Type 35100 _ = v.Args[1] 35101 x := v.Args[0] 35102 y := v.Args[1] 35103 v.reset(OpARM64CSEL) 35104 v.Aux = OpARM64LessThanU 35105 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35106 v0.AddArg(x) 35107 v0.AddArg(y) 35108 v.AddArg(v0) 35109 v1 := b.NewValue0(v.Pos, OpConst64, t) 35110 v1.AuxInt = 0 35111 v.AddArg(v1) 35112 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35113 v2.AuxInt = 64 35114 v2.AddArg(y) 35115 v.AddArg(v2) 35116 return true 35117 } 35118 } 35119 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 35120 b := v.Block 35121 _ = b 35122 typ := &b.Func.Config.Types 35123 _ = typ 35124 // match: (Lsh64x8 <t> x y) 35125 // cond: 35126 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 35127 for { 35128 t := v.Type 35129 _ = v.Args[1] 35130 x := v.Args[0] 35131 y := v.Args[1] 35132 v.reset(OpARM64CSEL) 35133 v.Aux = OpARM64LessThanU 35134 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35135 v0.AddArg(x) 35136 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35137 v1.AddArg(y) 35138 v0.AddArg(v1) 35139 v.AddArg(v0) 35140 v2 := b.NewValue0(v.Pos, OpConst64, t) 35141 v2.AuxInt = 0 35142 v.AddArg(v2) 35143 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35144 v3.AuxInt = 64 35145 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35146 v4.AddArg(y) 35147 v3.AddArg(v4) 35148 v.AddArg(v3) 35149 return true 35150 } 35151 } 35152 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 35153 b := v.Block 35154 _ = b 35155 typ := &b.Func.Config.Types 35156 _ = typ 35157 // match: (Lsh8x16 <t> x y) 35158 // cond: 35159 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 35160 for { 35161 t := v.Type 35162 _ = v.Args[1] 35163 x := v.Args[0] 35164 y := v.Args[1] 35165 v.reset(OpARM64CSEL) 35166 v.Aux = OpARM64LessThanU 35167 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35168 v0.AddArg(x) 35169 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35170 v1.AddArg(y) 35171 v0.AddArg(v1) 35172 v.AddArg(v0) 35173 v2 := b.NewValue0(v.Pos, OpConst64, t) 35174 v2.AuxInt = 0 35175 v.AddArg(v2) 35176 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35177 v3.AuxInt = 64 35178 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35179 v4.AddArg(y) 35180 v3.AddArg(v4) 35181 v.AddArg(v3) 35182 return true 35183 } 35184 } 35185 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 35186 b := v.Block 35187 _ = b 35188 typ := &b.Func.Config.Types 35189 _ = typ 35190 // match: (Lsh8x32 <t> x y) 35191 // cond: 35192 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 35193 for { 35194 t := v.Type 35195 _ = v.Args[1] 35196 x := v.Args[0] 35197 y := v.Args[1] 35198 v.reset(OpARM64CSEL) 35199 v.Aux = OpARM64LessThanU 35200 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35201 v0.AddArg(x) 35202 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35203 v1.AddArg(y) 35204 v0.AddArg(v1) 35205 v.AddArg(v0) 35206 v2 := b.NewValue0(v.Pos, OpConst64, t) 35207 v2.AuxInt = 0 35208 v.AddArg(v2) 35209 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35210 v3.AuxInt = 64 35211 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35212 v4.AddArg(y) 35213 v3.AddArg(v4) 35214 v.AddArg(v3) 35215 return true 35216 } 35217 } 35218 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 35219 b := v.Block 35220 _ = b 35221 // match: (Lsh8x64 <t> x y) 35222 // cond: 35223 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 35224 for { 35225 t := v.Type 35226 _ = v.Args[1] 35227 x := v.Args[0] 35228 y := v.Args[1] 35229 v.reset(OpARM64CSEL) 35230 v.Aux = OpARM64LessThanU 35231 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35232 v0.AddArg(x) 35233 v0.AddArg(y) 35234 v.AddArg(v0) 35235 v1 := b.NewValue0(v.Pos, OpConst64, t) 35236 v1.AuxInt = 0 35237 v.AddArg(v1) 35238 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35239 v2.AuxInt = 64 35240 v2.AddArg(y) 35241 v.AddArg(v2) 35242 return true 35243 } 35244 } 35245 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 35246 b := v.Block 35247 _ = b 35248 typ := &b.Func.Config.Types 35249 _ = typ 35250 // match: (Lsh8x8 <t> x y) 35251 // cond: 35252 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 35253 for { 35254 t := v.Type 35255 _ = v.Args[1] 35256 x := v.Args[0] 35257 y := v.Args[1] 35258 v.reset(OpARM64CSEL) 35259 v.Aux = OpARM64LessThanU 35260 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35261 v0.AddArg(x) 35262 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35263 v1.AddArg(y) 35264 v0.AddArg(v1) 35265 v.AddArg(v0) 35266 v2 := b.NewValue0(v.Pos, OpConst64, t) 35267 v2.AuxInt = 0 35268 v.AddArg(v2) 35269 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35270 v3.AuxInt = 64 35271 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35272 v4.AddArg(y) 35273 v3.AddArg(v4) 35274 v.AddArg(v3) 35275 return true 35276 } 35277 } 35278 func rewriteValueARM64_OpMod16_0(v *Value) bool { 35279 b := v.Block 35280 _ = b 35281 typ := &b.Func.Config.Types 35282 _ = typ 35283 // match: (Mod16 x y) 35284 // cond: 35285 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 35286 for { 35287 _ = v.Args[1] 35288 x := v.Args[0] 35289 y := v.Args[1] 35290 v.reset(OpARM64MODW) 35291 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 35292 v0.AddArg(x) 35293 v.AddArg(v0) 35294 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 35295 v1.AddArg(y) 35296 v.AddArg(v1) 35297 return true 35298 } 35299 } 35300 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 35301 b := v.Block 35302 _ = b 35303 typ := &b.Func.Config.Types 35304 _ = typ 35305 // match: (Mod16u x y) 35306 // cond: 35307 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 35308 for { 35309 _ = v.Args[1] 35310 x := v.Args[0] 35311 y := v.Args[1] 35312 v.reset(OpARM64UMODW) 35313 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 35314 v0.AddArg(x) 35315 v.AddArg(v0) 35316 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 35317 v1.AddArg(y) 35318 v.AddArg(v1) 35319 return true 35320 } 35321 } 35322 func rewriteValueARM64_OpMod32_0(v *Value) bool { 35323 // match: (Mod32 x y) 35324 // cond: 35325 // result: (MODW x y) 35326 for { 35327 _ = v.Args[1] 35328 x := v.Args[0] 35329 y := v.Args[1] 35330 v.reset(OpARM64MODW) 35331 v.AddArg(x) 35332 v.AddArg(y) 35333 return true 35334 } 35335 } 35336 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 35337 // match: (Mod32u x y) 35338 // cond: 35339 // result: (UMODW x y) 35340 for { 35341 _ = v.Args[1] 35342 x := v.Args[0] 35343 y := v.Args[1] 35344 v.reset(OpARM64UMODW) 35345 v.AddArg(x) 35346 v.AddArg(y) 35347 return true 35348 } 35349 } 35350 func rewriteValueARM64_OpMod64_0(v *Value) bool { 35351 // match: (Mod64 x y) 35352 // cond: 35353 // result: (MOD x y) 35354 for { 35355 _ = v.Args[1] 35356 x := v.Args[0] 35357 y := v.Args[1] 35358 v.reset(OpARM64MOD) 35359 v.AddArg(x) 35360 v.AddArg(y) 35361 return true 35362 } 35363 } 35364 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 35365 // match: (Mod64u x y) 35366 // cond: 35367 // result: (UMOD x y) 35368 for { 35369 _ = v.Args[1] 35370 x := v.Args[0] 35371 y := v.Args[1] 35372 v.reset(OpARM64UMOD) 35373 v.AddArg(x) 35374 v.AddArg(y) 35375 return true 35376 } 35377 } 35378 func rewriteValueARM64_OpMod8_0(v *Value) bool { 35379 b := v.Block 35380 _ = b 35381 typ := &b.Func.Config.Types 35382 _ = typ 35383 // match: (Mod8 x y) 35384 // cond: 35385 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 35386 for { 35387 _ = v.Args[1] 35388 x := v.Args[0] 35389 y := v.Args[1] 35390 v.reset(OpARM64MODW) 35391 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 35392 v0.AddArg(x) 35393 v.AddArg(v0) 35394 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 35395 v1.AddArg(y) 35396 v.AddArg(v1) 35397 return true 35398 } 35399 } 35400 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 35401 b := v.Block 35402 _ = b 35403 typ := &b.Func.Config.Types 35404 _ = typ 35405 // match: (Mod8u x y) 35406 // cond: 35407 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 35408 for { 35409 _ = v.Args[1] 35410 x := v.Args[0] 35411 y := v.Args[1] 35412 v.reset(OpARM64UMODW) 35413 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 35414 v0.AddArg(x) 35415 v.AddArg(v0) 35416 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 35417 v1.AddArg(y) 35418 v.AddArg(v1) 35419 return true 35420 } 35421 } 35422 func rewriteValueARM64_OpMove_0(v *Value) bool { 35423 b := v.Block 35424 _ = b 35425 typ := &b.Func.Config.Types 35426 _ = typ 35427 // match: (Move [0] _ _ mem) 35428 // cond: 35429 // result: mem 35430 for { 35431 if v.AuxInt != 0 { 35432 break 35433 } 35434 _ = v.Args[2] 35435 mem := v.Args[2] 35436 v.reset(OpCopy) 35437 v.Type = mem.Type 35438 v.AddArg(mem) 35439 return true 35440 } 35441 // match: (Move [1] dst src mem) 35442 // cond: 35443 // result: (MOVBstore dst (MOVBUload src mem) mem) 35444 for { 35445 if v.AuxInt != 1 { 35446 break 35447 } 35448 _ = v.Args[2] 35449 dst := v.Args[0] 35450 src := v.Args[1] 35451 mem := v.Args[2] 35452 v.reset(OpARM64MOVBstore) 35453 v.AddArg(dst) 35454 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35455 v0.AddArg(src) 35456 v0.AddArg(mem) 35457 v.AddArg(v0) 35458 v.AddArg(mem) 35459 return true 35460 } 35461 // match: (Move [2] dst src mem) 35462 // cond: 35463 // result: (MOVHstore dst (MOVHUload src mem) mem) 35464 for { 35465 if v.AuxInt != 2 { 35466 break 35467 } 35468 _ = v.Args[2] 35469 dst := v.Args[0] 35470 src := v.Args[1] 35471 mem := v.Args[2] 35472 v.reset(OpARM64MOVHstore) 35473 v.AddArg(dst) 35474 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35475 v0.AddArg(src) 35476 v0.AddArg(mem) 35477 v.AddArg(v0) 35478 v.AddArg(mem) 35479 return true 35480 } 35481 // match: (Move [4] dst src mem) 35482 // cond: 35483 // result: (MOVWstore dst (MOVWUload src mem) mem) 35484 for { 35485 if v.AuxInt != 4 { 35486 break 35487 } 35488 _ = v.Args[2] 35489 dst := v.Args[0] 35490 src := v.Args[1] 35491 mem := v.Args[2] 35492 v.reset(OpARM64MOVWstore) 35493 v.AddArg(dst) 35494 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35495 v0.AddArg(src) 35496 v0.AddArg(mem) 35497 v.AddArg(v0) 35498 v.AddArg(mem) 35499 return true 35500 } 35501 // match: (Move [8] dst src mem) 35502 // cond: 35503 // result: (MOVDstore dst (MOVDload src mem) mem) 35504 for { 35505 if v.AuxInt != 8 { 35506 break 35507 } 35508 _ = v.Args[2] 35509 dst := v.Args[0] 35510 src := v.Args[1] 35511 mem := v.Args[2] 35512 v.reset(OpARM64MOVDstore) 35513 v.AddArg(dst) 35514 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35515 v0.AddArg(src) 35516 v0.AddArg(mem) 35517 v.AddArg(v0) 35518 v.AddArg(mem) 35519 return true 35520 } 35521 // match: (Move [3] dst src mem) 35522 // cond: 35523 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 35524 for { 35525 if v.AuxInt != 3 { 35526 break 35527 } 35528 _ = v.Args[2] 35529 dst := v.Args[0] 35530 src := v.Args[1] 35531 mem := v.Args[2] 35532 v.reset(OpARM64MOVBstore) 35533 v.AuxInt = 2 35534 v.AddArg(dst) 35535 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35536 v0.AuxInt = 2 35537 v0.AddArg(src) 35538 v0.AddArg(mem) 35539 v.AddArg(v0) 35540 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 35541 v1.AddArg(dst) 35542 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35543 v2.AddArg(src) 35544 v2.AddArg(mem) 35545 v1.AddArg(v2) 35546 v1.AddArg(mem) 35547 v.AddArg(v1) 35548 return true 35549 } 35550 // match: (Move [5] dst src mem) 35551 // cond: 35552 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 35553 for { 35554 if v.AuxInt != 5 { 35555 break 35556 } 35557 _ = v.Args[2] 35558 dst := v.Args[0] 35559 src := v.Args[1] 35560 mem := v.Args[2] 35561 v.reset(OpARM64MOVBstore) 35562 v.AuxInt = 4 35563 v.AddArg(dst) 35564 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35565 v0.AuxInt = 4 35566 v0.AddArg(src) 35567 v0.AddArg(mem) 35568 v.AddArg(v0) 35569 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 35570 v1.AddArg(dst) 35571 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35572 v2.AddArg(src) 35573 v2.AddArg(mem) 35574 v1.AddArg(v2) 35575 v1.AddArg(mem) 35576 v.AddArg(v1) 35577 return true 35578 } 35579 // match: (Move [6] dst src mem) 35580 // cond: 35581 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 35582 for { 35583 if v.AuxInt != 6 { 35584 break 35585 } 35586 _ = v.Args[2] 35587 dst := v.Args[0] 35588 src := v.Args[1] 35589 mem := v.Args[2] 35590 v.reset(OpARM64MOVHstore) 35591 v.AuxInt = 4 35592 v.AddArg(dst) 35593 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35594 v0.AuxInt = 4 35595 v0.AddArg(src) 35596 v0.AddArg(mem) 35597 v.AddArg(v0) 35598 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 35599 v1.AddArg(dst) 35600 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35601 v2.AddArg(src) 35602 v2.AddArg(mem) 35603 v1.AddArg(v2) 35604 v1.AddArg(mem) 35605 v.AddArg(v1) 35606 return true 35607 } 35608 // match: (Move [7] dst src mem) 35609 // cond: 35610 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 35611 for { 35612 if v.AuxInt != 7 { 35613 break 35614 } 35615 _ = v.Args[2] 35616 dst := v.Args[0] 35617 src := v.Args[1] 35618 mem := v.Args[2] 35619 v.reset(OpARM64MOVBstore) 35620 v.AuxInt = 6 35621 v.AddArg(dst) 35622 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35623 v0.AuxInt = 6 35624 v0.AddArg(src) 35625 v0.AddArg(mem) 35626 v.AddArg(v0) 35627 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 35628 v1.AuxInt = 4 35629 v1.AddArg(dst) 35630 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35631 v2.AuxInt = 4 35632 v2.AddArg(src) 35633 v2.AddArg(mem) 35634 v1.AddArg(v2) 35635 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 35636 v3.AddArg(dst) 35637 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35638 v4.AddArg(src) 35639 v4.AddArg(mem) 35640 v3.AddArg(v4) 35641 v3.AddArg(mem) 35642 v1.AddArg(v3) 35643 v.AddArg(v1) 35644 return true 35645 } 35646 // match: (Move [12] dst src mem) 35647 // cond: 35648 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 35649 for { 35650 if v.AuxInt != 12 { 35651 break 35652 } 35653 _ = v.Args[2] 35654 dst := v.Args[0] 35655 src := v.Args[1] 35656 mem := v.Args[2] 35657 v.reset(OpARM64MOVWstore) 35658 v.AuxInt = 8 35659 v.AddArg(dst) 35660 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35661 v0.AuxInt = 8 35662 v0.AddArg(src) 35663 v0.AddArg(mem) 35664 v.AddArg(v0) 35665 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35666 v1.AddArg(dst) 35667 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35668 v2.AddArg(src) 35669 v2.AddArg(mem) 35670 v1.AddArg(v2) 35671 v1.AddArg(mem) 35672 v.AddArg(v1) 35673 return true 35674 } 35675 return false 35676 } 35677 func rewriteValueARM64_OpMove_10(v *Value) bool { 35678 b := v.Block 35679 _ = b 35680 config := b.Func.Config 35681 _ = config 35682 typ := &b.Func.Config.Types 35683 _ = typ 35684 // match: (Move [16] dst src mem) 35685 // cond: 35686 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 35687 for { 35688 if v.AuxInt != 16 { 35689 break 35690 } 35691 _ = v.Args[2] 35692 dst := v.Args[0] 35693 src := v.Args[1] 35694 mem := v.Args[2] 35695 v.reset(OpARM64MOVDstore) 35696 v.AuxInt = 8 35697 v.AddArg(dst) 35698 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35699 v0.AuxInt = 8 35700 v0.AddArg(src) 35701 v0.AddArg(mem) 35702 v.AddArg(v0) 35703 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35704 v1.AddArg(dst) 35705 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35706 v2.AddArg(src) 35707 v2.AddArg(mem) 35708 v1.AddArg(v2) 35709 v1.AddArg(mem) 35710 v.AddArg(v1) 35711 return true 35712 } 35713 // match: (Move [24] dst src mem) 35714 // cond: 35715 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 35716 for { 35717 if v.AuxInt != 24 { 35718 break 35719 } 35720 _ = v.Args[2] 35721 dst := v.Args[0] 35722 src := v.Args[1] 35723 mem := v.Args[2] 35724 v.reset(OpARM64MOVDstore) 35725 v.AuxInt = 16 35726 v.AddArg(dst) 35727 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35728 v0.AuxInt = 16 35729 v0.AddArg(src) 35730 v0.AddArg(mem) 35731 v.AddArg(v0) 35732 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35733 v1.AuxInt = 8 35734 v1.AddArg(dst) 35735 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35736 v2.AuxInt = 8 35737 v2.AddArg(src) 35738 v2.AddArg(mem) 35739 v1.AddArg(v2) 35740 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35741 v3.AddArg(dst) 35742 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35743 v4.AddArg(src) 35744 v4.AddArg(mem) 35745 v3.AddArg(v4) 35746 v3.AddArg(mem) 35747 v1.AddArg(v3) 35748 v.AddArg(v1) 35749 return true 35750 } 35751 // match: (Move [s] dst src mem) 35752 // cond: s%8 != 0 && s > 8 35753 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 35754 for { 35755 s := v.AuxInt 35756 _ = v.Args[2] 35757 dst := v.Args[0] 35758 src := v.Args[1] 35759 mem := v.Args[2] 35760 if !(s%8 != 0 && s > 8) { 35761 break 35762 } 35763 v.reset(OpMove) 35764 v.AuxInt = s % 8 35765 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 35766 v0.AuxInt = s - s%8 35767 v0.AddArg(dst) 35768 v.AddArg(v0) 35769 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 35770 v1.AuxInt = s - s%8 35771 v1.AddArg(src) 35772 v.AddArg(v1) 35773 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 35774 v2.AuxInt = s - s%8 35775 v2.AddArg(dst) 35776 v2.AddArg(src) 35777 v2.AddArg(mem) 35778 v.AddArg(v2) 35779 return true 35780 } 35781 // match: (Move [s] dst src mem) 35782 // cond: s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice 35783 // result: (MOVDstore [s-8] dst (MOVDload [s-8] src mem) (DUFFCOPY <types.TypeMem> [8*(64-(s-8)/16)] dst src mem)) 35784 for { 35785 s := v.AuxInt 35786 _ = v.Args[2] 35787 dst := v.Args[0] 35788 src := v.Args[1] 35789 mem := v.Args[2] 35790 if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice) { 35791 break 35792 } 35793 v.reset(OpARM64MOVDstore) 35794 v.AuxInt = s - 8 35795 v.AddArg(dst) 35796 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35797 v0.AuxInt = s - 8 35798 v0.AddArg(src) 35799 v0.AddArg(mem) 35800 v.AddArg(v0) 35801 v1 := b.NewValue0(v.Pos, OpARM64DUFFCOPY, types.TypeMem) 35802 v1.AuxInt = 8 * (64 - (s-8)/16) 35803 v1.AddArg(dst) 35804 v1.AddArg(src) 35805 v1.AddArg(mem) 35806 v.AddArg(v1) 35807 return true 35808 } 35809 // match: (Move [s] dst src mem) 35810 // cond: s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice 35811 // result: (DUFFCOPY [8 * (64 - s/16)] dst src mem) 35812 for { 35813 s := v.AuxInt 35814 _ = v.Args[2] 35815 dst := v.Args[0] 35816 src := v.Args[1] 35817 mem := v.Args[2] 35818 if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice) { 35819 break 35820 } 35821 v.reset(OpARM64DUFFCOPY) 35822 v.AuxInt = 8 * (64 - s/16) 35823 v.AddArg(dst) 35824 v.AddArg(src) 35825 v.AddArg(mem) 35826 return true 35827 } 35828 // match: (Move [s] dst src mem) 35829 // cond: s > 24 && s%8 == 0 35830 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 35831 for { 35832 s := v.AuxInt 35833 _ = v.Args[2] 35834 dst := v.Args[0] 35835 src := v.Args[1] 35836 mem := v.Args[2] 35837 if !(s > 24 && s%8 == 0) { 35838 break 35839 } 35840 v.reset(OpARM64LoweredMove) 35841 v.AddArg(dst) 35842 v.AddArg(src) 35843 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 35844 v0.AuxInt = s - 8 35845 v0.AddArg(src) 35846 v.AddArg(v0) 35847 v.AddArg(mem) 35848 return true 35849 } 35850 return false 35851 } 35852 func rewriteValueARM64_OpMul16_0(v *Value) bool { 35853 // match: (Mul16 x y) 35854 // cond: 35855 // result: (MULW x y) 35856 for { 35857 _ = v.Args[1] 35858 x := v.Args[0] 35859 y := v.Args[1] 35860 v.reset(OpARM64MULW) 35861 v.AddArg(x) 35862 v.AddArg(y) 35863 return true 35864 } 35865 } 35866 func rewriteValueARM64_OpMul32_0(v *Value) bool { 35867 // match: (Mul32 x y) 35868 // cond: 35869 // result: (MULW x y) 35870 for { 35871 _ = v.Args[1] 35872 x := v.Args[0] 35873 y := v.Args[1] 35874 v.reset(OpARM64MULW) 35875 v.AddArg(x) 35876 v.AddArg(y) 35877 return true 35878 } 35879 } 35880 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 35881 // match: (Mul32F x y) 35882 // cond: 35883 // result: (FMULS x y) 35884 for { 35885 _ = v.Args[1] 35886 x := v.Args[0] 35887 y := v.Args[1] 35888 v.reset(OpARM64FMULS) 35889 v.AddArg(x) 35890 v.AddArg(y) 35891 return true 35892 } 35893 } 35894 func rewriteValueARM64_OpMul64_0(v *Value) bool { 35895 // match: (Mul64 x y) 35896 // cond: 35897 // result: (MUL x y) 35898 for { 35899 _ = v.Args[1] 35900 x := v.Args[0] 35901 y := v.Args[1] 35902 v.reset(OpARM64MUL) 35903 v.AddArg(x) 35904 v.AddArg(y) 35905 return true 35906 } 35907 } 35908 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 35909 // match: (Mul64F x y) 35910 // cond: 35911 // result: (FMULD x y) 35912 for { 35913 _ = v.Args[1] 35914 x := v.Args[0] 35915 y := v.Args[1] 35916 v.reset(OpARM64FMULD) 35917 v.AddArg(x) 35918 v.AddArg(y) 35919 return true 35920 } 35921 } 35922 func rewriteValueARM64_OpMul64uhilo_0(v *Value) bool { 35923 // match: (Mul64uhilo x y) 35924 // cond: 35925 // result: (LoweredMuluhilo x y) 35926 for { 35927 _ = v.Args[1] 35928 x := v.Args[0] 35929 y := v.Args[1] 35930 v.reset(OpARM64LoweredMuluhilo) 35931 v.AddArg(x) 35932 v.AddArg(y) 35933 return true 35934 } 35935 } 35936 func rewriteValueARM64_OpMul8_0(v *Value) bool { 35937 // match: (Mul8 x y) 35938 // cond: 35939 // result: (MULW x y) 35940 for { 35941 _ = v.Args[1] 35942 x := v.Args[0] 35943 y := v.Args[1] 35944 v.reset(OpARM64MULW) 35945 v.AddArg(x) 35946 v.AddArg(y) 35947 return true 35948 } 35949 } 35950 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 35951 // match: (Neg16 x) 35952 // cond: 35953 // result: (NEG x) 35954 for { 35955 x := v.Args[0] 35956 v.reset(OpARM64NEG) 35957 v.AddArg(x) 35958 return true 35959 } 35960 } 35961 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 35962 // match: (Neg32 x) 35963 // cond: 35964 // result: (NEG x) 35965 for { 35966 x := v.Args[0] 35967 v.reset(OpARM64NEG) 35968 v.AddArg(x) 35969 return true 35970 } 35971 } 35972 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 35973 // match: (Neg32F x) 35974 // cond: 35975 // result: (FNEGS x) 35976 for { 35977 x := v.Args[0] 35978 v.reset(OpARM64FNEGS) 35979 v.AddArg(x) 35980 return true 35981 } 35982 } 35983 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 35984 // match: (Neg64 x) 35985 // cond: 35986 // result: (NEG x) 35987 for { 35988 x := v.Args[0] 35989 v.reset(OpARM64NEG) 35990 v.AddArg(x) 35991 return true 35992 } 35993 } 35994 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 35995 // match: (Neg64F x) 35996 // cond: 35997 // result: (FNEGD x) 35998 for { 35999 x := v.Args[0] 36000 v.reset(OpARM64FNEGD) 36001 v.AddArg(x) 36002 return true 36003 } 36004 } 36005 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 36006 // match: (Neg8 x) 36007 // cond: 36008 // result: (NEG x) 36009 for { 36010 x := v.Args[0] 36011 v.reset(OpARM64NEG) 36012 v.AddArg(x) 36013 return true 36014 } 36015 } 36016 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 36017 b := v.Block 36018 _ = b 36019 typ := &b.Func.Config.Types 36020 _ = typ 36021 // match: (Neq16 x y) 36022 // cond: 36023 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 36024 for { 36025 _ = v.Args[1] 36026 x := v.Args[0] 36027 y := v.Args[1] 36028 v.reset(OpARM64NotEqual) 36029 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 36030 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 36031 v1.AddArg(x) 36032 v0.AddArg(v1) 36033 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 36034 v2.AddArg(y) 36035 v0.AddArg(v2) 36036 v.AddArg(v0) 36037 return true 36038 } 36039 } 36040 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 36041 b := v.Block 36042 _ = b 36043 // match: (Neq32 x y) 36044 // cond: 36045 // result: (NotEqual (CMPW x y)) 36046 for { 36047 _ = v.Args[1] 36048 x := v.Args[0] 36049 y := v.Args[1] 36050 v.reset(OpARM64NotEqual) 36051 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 36052 v0.AddArg(x) 36053 v0.AddArg(y) 36054 v.AddArg(v0) 36055 return true 36056 } 36057 } 36058 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 36059 b := v.Block 36060 _ = b 36061 // match: (Neq32F x y) 36062 // cond: 36063 // result: (NotEqual (FCMPS x y)) 36064 for { 36065 _ = v.Args[1] 36066 x := v.Args[0] 36067 y := v.Args[1] 36068 v.reset(OpARM64NotEqual) 36069 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 36070 v0.AddArg(x) 36071 v0.AddArg(y) 36072 v.AddArg(v0) 36073 return true 36074 } 36075 } 36076 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 36077 b := v.Block 36078 _ = b 36079 // match: (Neq64 x y) 36080 // cond: 36081 // result: (NotEqual (CMP x y)) 36082 for { 36083 _ = v.Args[1] 36084 x := v.Args[0] 36085 y := v.Args[1] 36086 v.reset(OpARM64NotEqual) 36087 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 36088 v0.AddArg(x) 36089 v0.AddArg(y) 36090 v.AddArg(v0) 36091 return true 36092 } 36093 } 36094 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 36095 b := v.Block 36096 _ = b 36097 // match: (Neq64F x y) 36098 // cond: 36099 // result: (NotEqual (FCMPD x y)) 36100 for { 36101 _ = v.Args[1] 36102 x := v.Args[0] 36103 y := v.Args[1] 36104 v.reset(OpARM64NotEqual) 36105 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 36106 v0.AddArg(x) 36107 v0.AddArg(y) 36108 v.AddArg(v0) 36109 return true 36110 } 36111 } 36112 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 36113 b := v.Block 36114 _ = b 36115 typ := &b.Func.Config.Types 36116 _ = typ 36117 // match: (Neq8 x y) 36118 // cond: 36119 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 36120 for { 36121 _ = v.Args[1] 36122 x := v.Args[0] 36123 y := v.Args[1] 36124 v.reset(OpARM64NotEqual) 36125 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 36126 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 36127 v1.AddArg(x) 36128 v0.AddArg(v1) 36129 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 36130 v2.AddArg(y) 36131 v0.AddArg(v2) 36132 v.AddArg(v0) 36133 return true 36134 } 36135 } 36136 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 36137 // match: (NeqB x y) 36138 // cond: 36139 // result: (XOR x y) 36140 for { 36141 _ = v.Args[1] 36142 x := v.Args[0] 36143 y := v.Args[1] 36144 v.reset(OpARM64XOR) 36145 v.AddArg(x) 36146 v.AddArg(y) 36147 return true 36148 } 36149 } 36150 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 36151 b := v.Block 36152 _ = b 36153 // match: (NeqPtr x y) 36154 // cond: 36155 // result: (NotEqual (CMP x y)) 36156 for { 36157 _ = v.Args[1] 36158 x := v.Args[0] 36159 y := v.Args[1] 36160 v.reset(OpARM64NotEqual) 36161 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 36162 v0.AddArg(x) 36163 v0.AddArg(y) 36164 v.AddArg(v0) 36165 return true 36166 } 36167 } 36168 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 36169 // match: (NilCheck ptr mem) 36170 // cond: 36171 // result: (LoweredNilCheck ptr mem) 36172 for { 36173 _ = v.Args[1] 36174 ptr := v.Args[0] 36175 mem := v.Args[1] 36176 v.reset(OpARM64LoweredNilCheck) 36177 v.AddArg(ptr) 36178 v.AddArg(mem) 36179 return true 36180 } 36181 } 36182 func rewriteValueARM64_OpNot_0(v *Value) bool { 36183 b := v.Block 36184 _ = b 36185 typ := &b.Func.Config.Types 36186 _ = typ 36187 // match: (Not x) 36188 // cond: 36189 // result: (XOR (MOVDconst [1]) x) 36190 for { 36191 x := v.Args[0] 36192 v.reset(OpARM64XOR) 36193 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 36194 v0.AuxInt = 1 36195 v.AddArg(v0) 36196 v.AddArg(x) 36197 return true 36198 } 36199 } 36200 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 36201 // match: (OffPtr [off] ptr:(SP)) 36202 // cond: 36203 // result: (MOVDaddr [off] ptr) 36204 for { 36205 off := v.AuxInt 36206 ptr := v.Args[0] 36207 if ptr.Op != OpSP { 36208 break 36209 } 36210 v.reset(OpARM64MOVDaddr) 36211 v.AuxInt = off 36212 v.AddArg(ptr) 36213 return true 36214 } 36215 // match: (OffPtr [off] ptr) 36216 // cond: 36217 // result: (ADDconst [off] ptr) 36218 for { 36219 off := v.AuxInt 36220 ptr := v.Args[0] 36221 v.reset(OpARM64ADDconst) 36222 v.AuxInt = off 36223 v.AddArg(ptr) 36224 return true 36225 } 36226 } 36227 func rewriteValueARM64_OpOr16_0(v *Value) bool { 36228 // match: (Or16 x y) 36229 // cond: 36230 // result: (OR x y) 36231 for { 36232 _ = v.Args[1] 36233 x := v.Args[0] 36234 y := v.Args[1] 36235 v.reset(OpARM64OR) 36236 v.AddArg(x) 36237 v.AddArg(y) 36238 return true 36239 } 36240 } 36241 func rewriteValueARM64_OpOr32_0(v *Value) bool { 36242 // match: (Or32 x y) 36243 // cond: 36244 // result: (OR x y) 36245 for { 36246 _ = v.Args[1] 36247 x := v.Args[0] 36248 y := v.Args[1] 36249 v.reset(OpARM64OR) 36250 v.AddArg(x) 36251 v.AddArg(y) 36252 return true 36253 } 36254 } 36255 func rewriteValueARM64_OpOr64_0(v *Value) bool { 36256 // match: (Or64 x y) 36257 // cond: 36258 // result: (OR x y) 36259 for { 36260 _ = v.Args[1] 36261 x := v.Args[0] 36262 y := v.Args[1] 36263 v.reset(OpARM64OR) 36264 v.AddArg(x) 36265 v.AddArg(y) 36266 return true 36267 } 36268 } 36269 func rewriteValueARM64_OpOr8_0(v *Value) bool { 36270 // match: (Or8 x y) 36271 // cond: 36272 // result: (OR x y) 36273 for { 36274 _ = v.Args[1] 36275 x := v.Args[0] 36276 y := v.Args[1] 36277 v.reset(OpARM64OR) 36278 v.AddArg(x) 36279 v.AddArg(y) 36280 return true 36281 } 36282 } 36283 func rewriteValueARM64_OpOrB_0(v *Value) bool { 36284 // match: (OrB x y) 36285 // cond: 36286 // result: (OR x y) 36287 for { 36288 _ = v.Args[1] 36289 x := v.Args[0] 36290 y := v.Args[1] 36291 v.reset(OpARM64OR) 36292 v.AddArg(x) 36293 v.AddArg(y) 36294 return true 36295 } 36296 } 36297 func rewriteValueARM64_OpPopCount16_0(v *Value) bool { 36298 b := v.Block 36299 _ = b 36300 typ := &b.Func.Config.Types 36301 _ = typ 36302 // match: (PopCount16 <t> x) 36303 // cond: 36304 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x))))) 36305 for { 36306 t := v.Type 36307 x := v.Args[0] 36308 v.reset(OpARM64FMOVDfpgp) 36309 v.Type = t 36310 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 36311 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 36312 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 36313 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36314 v3.AddArg(x) 36315 v2.AddArg(v3) 36316 v1.AddArg(v2) 36317 v0.AddArg(v1) 36318 v.AddArg(v0) 36319 return true 36320 } 36321 } 36322 func rewriteValueARM64_OpPopCount32_0(v *Value) bool { 36323 b := v.Block 36324 _ = b 36325 typ := &b.Func.Config.Types 36326 _ = typ 36327 // match: (PopCount32 <t> x) 36328 // cond: 36329 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x))))) 36330 for { 36331 t := v.Type 36332 x := v.Args[0] 36333 v.reset(OpARM64FMOVDfpgp) 36334 v.Type = t 36335 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 36336 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 36337 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 36338 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36339 v3.AddArg(x) 36340 v2.AddArg(v3) 36341 v1.AddArg(v2) 36342 v0.AddArg(v1) 36343 v.AddArg(v0) 36344 return true 36345 } 36346 } 36347 func rewriteValueARM64_OpPopCount64_0(v *Value) bool { 36348 b := v.Block 36349 _ = b 36350 typ := &b.Func.Config.Types 36351 _ = typ 36352 // match: (PopCount64 <t> x) 36353 // cond: 36354 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x)))) 36355 for { 36356 t := v.Type 36357 x := v.Args[0] 36358 v.reset(OpARM64FMOVDfpgp) 36359 v.Type = t 36360 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 36361 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 36362 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 36363 v2.AddArg(x) 36364 v1.AddArg(v2) 36365 v0.AddArg(v1) 36366 v.AddArg(v0) 36367 return true 36368 } 36369 } 36370 func rewriteValueARM64_OpRotateLeft32_0(v *Value) bool { 36371 b := v.Block 36372 _ = b 36373 // match: (RotateLeft32 x y) 36374 // cond: 36375 // result: (RORW x (NEG <y.Type> y)) 36376 for { 36377 _ = v.Args[1] 36378 x := v.Args[0] 36379 y := v.Args[1] 36380 v.reset(OpARM64RORW) 36381 v.AddArg(x) 36382 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 36383 v0.AddArg(y) 36384 v.AddArg(v0) 36385 return true 36386 } 36387 } 36388 func rewriteValueARM64_OpRotateLeft64_0(v *Value) bool { 36389 b := v.Block 36390 _ = b 36391 // match: (RotateLeft64 x y) 36392 // cond: 36393 // result: (ROR x (NEG <y.Type> y)) 36394 for { 36395 _ = v.Args[1] 36396 x := v.Args[0] 36397 y := v.Args[1] 36398 v.reset(OpARM64ROR) 36399 v.AddArg(x) 36400 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 36401 v0.AddArg(y) 36402 v.AddArg(v0) 36403 return true 36404 } 36405 } 36406 func rewriteValueARM64_OpRound_0(v *Value) bool { 36407 // match: (Round x) 36408 // cond: 36409 // result: (FRINTAD x) 36410 for { 36411 x := v.Args[0] 36412 v.reset(OpARM64FRINTAD) 36413 v.AddArg(x) 36414 return true 36415 } 36416 } 36417 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 36418 // match: (Round32F x) 36419 // cond: 36420 // result: (LoweredRound32F x) 36421 for { 36422 x := v.Args[0] 36423 v.reset(OpARM64LoweredRound32F) 36424 v.AddArg(x) 36425 return true 36426 } 36427 } 36428 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 36429 // match: (Round64F x) 36430 // cond: 36431 // result: (LoweredRound64F x) 36432 for { 36433 x := v.Args[0] 36434 v.reset(OpARM64LoweredRound64F) 36435 v.AddArg(x) 36436 return true 36437 } 36438 } 36439 func rewriteValueARM64_OpRoundToEven_0(v *Value) bool { 36440 // match: (RoundToEven x) 36441 // cond: 36442 // result: (FRINTND x) 36443 for { 36444 x := v.Args[0] 36445 v.reset(OpARM64FRINTND) 36446 v.AddArg(x) 36447 return true 36448 } 36449 } 36450 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 36451 b := v.Block 36452 _ = b 36453 typ := &b.Func.Config.Types 36454 _ = typ 36455 // match: (Rsh16Ux16 <t> x y) 36456 // cond: 36457 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 36458 for { 36459 t := v.Type 36460 _ = v.Args[1] 36461 x := v.Args[0] 36462 y := v.Args[1] 36463 v.reset(OpARM64CSEL) 36464 v.Aux = OpARM64LessThanU 36465 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36466 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36467 v1.AddArg(x) 36468 v0.AddArg(v1) 36469 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36470 v2.AddArg(y) 36471 v0.AddArg(v2) 36472 v.AddArg(v0) 36473 v3 := b.NewValue0(v.Pos, OpConst64, t) 36474 v3.AuxInt = 0 36475 v.AddArg(v3) 36476 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36477 v4.AuxInt = 64 36478 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36479 v5.AddArg(y) 36480 v4.AddArg(v5) 36481 v.AddArg(v4) 36482 return true 36483 } 36484 } 36485 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 36486 b := v.Block 36487 _ = b 36488 typ := &b.Func.Config.Types 36489 _ = typ 36490 // match: (Rsh16Ux32 <t> x y) 36491 // cond: 36492 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 36493 for { 36494 t := v.Type 36495 _ = v.Args[1] 36496 x := v.Args[0] 36497 y := v.Args[1] 36498 v.reset(OpARM64CSEL) 36499 v.Aux = OpARM64LessThanU 36500 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36501 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36502 v1.AddArg(x) 36503 v0.AddArg(v1) 36504 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36505 v2.AddArg(y) 36506 v0.AddArg(v2) 36507 v.AddArg(v0) 36508 v3 := b.NewValue0(v.Pos, OpConst64, t) 36509 v3.AuxInt = 0 36510 v.AddArg(v3) 36511 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36512 v4.AuxInt = 64 36513 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36514 v5.AddArg(y) 36515 v4.AddArg(v5) 36516 v.AddArg(v4) 36517 return true 36518 } 36519 } 36520 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 36521 b := v.Block 36522 _ = b 36523 typ := &b.Func.Config.Types 36524 _ = typ 36525 // match: (Rsh16Ux64 <t> x y) 36526 // cond: 36527 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 36528 for { 36529 t := v.Type 36530 _ = v.Args[1] 36531 x := v.Args[0] 36532 y := v.Args[1] 36533 v.reset(OpARM64CSEL) 36534 v.Aux = OpARM64LessThanU 36535 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36536 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36537 v1.AddArg(x) 36538 v0.AddArg(v1) 36539 v0.AddArg(y) 36540 v.AddArg(v0) 36541 v2 := b.NewValue0(v.Pos, OpConst64, t) 36542 v2.AuxInt = 0 36543 v.AddArg(v2) 36544 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36545 v3.AuxInt = 64 36546 v3.AddArg(y) 36547 v.AddArg(v3) 36548 return true 36549 } 36550 } 36551 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 36552 b := v.Block 36553 _ = b 36554 typ := &b.Func.Config.Types 36555 _ = typ 36556 // match: (Rsh16Ux8 <t> x y) 36557 // cond: 36558 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 36559 for { 36560 t := v.Type 36561 _ = v.Args[1] 36562 x := v.Args[0] 36563 y := v.Args[1] 36564 v.reset(OpARM64CSEL) 36565 v.Aux = OpARM64LessThanU 36566 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36567 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36568 v1.AddArg(x) 36569 v0.AddArg(v1) 36570 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36571 v2.AddArg(y) 36572 v0.AddArg(v2) 36573 v.AddArg(v0) 36574 v3 := b.NewValue0(v.Pos, OpConst64, t) 36575 v3.AuxInt = 0 36576 v.AddArg(v3) 36577 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36578 v4.AuxInt = 64 36579 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36580 v5.AddArg(y) 36581 v4.AddArg(v5) 36582 v.AddArg(v4) 36583 return true 36584 } 36585 } 36586 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 36587 b := v.Block 36588 _ = b 36589 typ := &b.Func.Config.Types 36590 _ = typ 36591 // match: (Rsh16x16 x y) 36592 // cond: 36593 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 36594 for { 36595 _ = v.Args[1] 36596 x := v.Args[0] 36597 y := v.Args[1] 36598 v.reset(OpARM64SRA) 36599 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36600 v0.AddArg(x) 36601 v.AddArg(v0) 36602 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36603 v1.Aux = OpARM64LessThanU 36604 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36605 v2.AddArg(y) 36606 v1.AddArg(v2) 36607 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36608 v3.AuxInt = 63 36609 v1.AddArg(v3) 36610 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36611 v4.AuxInt = 64 36612 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36613 v5.AddArg(y) 36614 v4.AddArg(v5) 36615 v1.AddArg(v4) 36616 v.AddArg(v1) 36617 return true 36618 } 36619 } 36620 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 36621 b := v.Block 36622 _ = b 36623 typ := &b.Func.Config.Types 36624 _ = typ 36625 // match: (Rsh16x32 x y) 36626 // cond: 36627 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 36628 for { 36629 _ = v.Args[1] 36630 x := v.Args[0] 36631 y := v.Args[1] 36632 v.reset(OpARM64SRA) 36633 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36634 v0.AddArg(x) 36635 v.AddArg(v0) 36636 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36637 v1.Aux = OpARM64LessThanU 36638 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36639 v2.AddArg(y) 36640 v1.AddArg(v2) 36641 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36642 v3.AuxInt = 63 36643 v1.AddArg(v3) 36644 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36645 v4.AuxInt = 64 36646 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36647 v5.AddArg(y) 36648 v4.AddArg(v5) 36649 v1.AddArg(v4) 36650 v.AddArg(v1) 36651 return true 36652 } 36653 } 36654 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 36655 b := v.Block 36656 _ = b 36657 typ := &b.Func.Config.Types 36658 _ = typ 36659 // match: (Rsh16x64 x y) 36660 // cond: 36661 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 36662 for { 36663 _ = v.Args[1] 36664 x := v.Args[0] 36665 y := v.Args[1] 36666 v.reset(OpARM64SRA) 36667 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36668 v0.AddArg(x) 36669 v.AddArg(v0) 36670 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36671 v1.Aux = OpARM64LessThanU 36672 v1.AddArg(y) 36673 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 36674 v2.AuxInt = 63 36675 v1.AddArg(v2) 36676 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36677 v3.AuxInt = 64 36678 v3.AddArg(y) 36679 v1.AddArg(v3) 36680 v.AddArg(v1) 36681 return true 36682 } 36683 } 36684 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 36685 b := v.Block 36686 _ = b 36687 typ := &b.Func.Config.Types 36688 _ = typ 36689 // match: (Rsh16x8 x y) 36690 // cond: 36691 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 36692 for { 36693 _ = v.Args[1] 36694 x := v.Args[0] 36695 y := v.Args[1] 36696 v.reset(OpARM64SRA) 36697 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36698 v0.AddArg(x) 36699 v.AddArg(v0) 36700 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36701 v1.Aux = OpARM64LessThanU 36702 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36703 v2.AddArg(y) 36704 v1.AddArg(v2) 36705 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36706 v3.AuxInt = 63 36707 v1.AddArg(v3) 36708 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36709 v4.AuxInt = 64 36710 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36711 v5.AddArg(y) 36712 v4.AddArg(v5) 36713 v1.AddArg(v4) 36714 v.AddArg(v1) 36715 return true 36716 } 36717 } 36718 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 36719 b := v.Block 36720 _ = b 36721 typ := &b.Func.Config.Types 36722 _ = typ 36723 // match: (Rsh32Ux16 <t> x y) 36724 // cond: 36725 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 36726 for { 36727 t := v.Type 36728 _ = v.Args[1] 36729 x := v.Args[0] 36730 y := v.Args[1] 36731 v.reset(OpARM64CSEL) 36732 v.Aux = OpARM64LessThanU 36733 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36734 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36735 v1.AddArg(x) 36736 v0.AddArg(v1) 36737 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36738 v2.AddArg(y) 36739 v0.AddArg(v2) 36740 v.AddArg(v0) 36741 v3 := b.NewValue0(v.Pos, OpConst64, t) 36742 v3.AuxInt = 0 36743 v.AddArg(v3) 36744 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36745 v4.AuxInt = 64 36746 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36747 v5.AddArg(y) 36748 v4.AddArg(v5) 36749 v.AddArg(v4) 36750 return true 36751 } 36752 } 36753 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 36754 b := v.Block 36755 _ = b 36756 typ := &b.Func.Config.Types 36757 _ = typ 36758 // match: (Rsh32Ux32 <t> x y) 36759 // cond: 36760 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 36761 for { 36762 t := v.Type 36763 _ = v.Args[1] 36764 x := v.Args[0] 36765 y := v.Args[1] 36766 v.reset(OpARM64CSEL) 36767 v.Aux = OpARM64LessThanU 36768 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36769 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36770 v1.AddArg(x) 36771 v0.AddArg(v1) 36772 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36773 v2.AddArg(y) 36774 v0.AddArg(v2) 36775 v.AddArg(v0) 36776 v3 := b.NewValue0(v.Pos, OpConst64, t) 36777 v3.AuxInt = 0 36778 v.AddArg(v3) 36779 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36780 v4.AuxInt = 64 36781 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36782 v5.AddArg(y) 36783 v4.AddArg(v5) 36784 v.AddArg(v4) 36785 return true 36786 } 36787 } 36788 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 36789 b := v.Block 36790 _ = b 36791 typ := &b.Func.Config.Types 36792 _ = typ 36793 // match: (Rsh32Ux64 <t> x y) 36794 // cond: 36795 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 36796 for { 36797 t := v.Type 36798 _ = v.Args[1] 36799 x := v.Args[0] 36800 y := v.Args[1] 36801 v.reset(OpARM64CSEL) 36802 v.Aux = OpARM64LessThanU 36803 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36804 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36805 v1.AddArg(x) 36806 v0.AddArg(v1) 36807 v0.AddArg(y) 36808 v.AddArg(v0) 36809 v2 := b.NewValue0(v.Pos, OpConst64, t) 36810 v2.AuxInt = 0 36811 v.AddArg(v2) 36812 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36813 v3.AuxInt = 64 36814 v3.AddArg(y) 36815 v.AddArg(v3) 36816 return true 36817 } 36818 } 36819 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 36820 b := v.Block 36821 _ = b 36822 typ := &b.Func.Config.Types 36823 _ = typ 36824 // match: (Rsh32Ux8 <t> x y) 36825 // cond: 36826 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 36827 for { 36828 t := v.Type 36829 _ = v.Args[1] 36830 x := v.Args[0] 36831 y := v.Args[1] 36832 v.reset(OpARM64CSEL) 36833 v.Aux = OpARM64LessThanU 36834 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36835 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36836 v1.AddArg(x) 36837 v0.AddArg(v1) 36838 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36839 v2.AddArg(y) 36840 v0.AddArg(v2) 36841 v.AddArg(v0) 36842 v3 := b.NewValue0(v.Pos, OpConst64, t) 36843 v3.AuxInt = 0 36844 v.AddArg(v3) 36845 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36846 v4.AuxInt = 64 36847 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36848 v5.AddArg(y) 36849 v4.AddArg(v5) 36850 v.AddArg(v4) 36851 return true 36852 } 36853 } 36854 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 36855 b := v.Block 36856 _ = b 36857 typ := &b.Func.Config.Types 36858 _ = typ 36859 // match: (Rsh32x16 x y) 36860 // cond: 36861 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 36862 for { 36863 _ = v.Args[1] 36864 x := v.Args[0] 36865 y := v.Args[1] 36866 v.reset(OpARM64SRA) 36867 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36868 v0.AddArg(x) 36869 v.AddArg(v0) 36870 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36871 v1.Aux = OpARM64LessThanU 36872 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36873 v2.AddArg(y) 36874 v1.AddArg(v2) 36875 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36876 v3.AuxInt = 63 36877 v1.AddArg(v3) 36878 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36879 v4.AuxInt = 64 36880 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36881 v5.AddArg(y) 36882 v4.AddArg(v5) 36883 v1.AddArg(v4) 36884 v.AddArg(v1) 36885 return true 36886 } 36887 } 36888 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 36889 b := v.Block 36890 _ = b 36891 typ := &b.Func.Config.Types 36892 _ = typ 36893 // match: (Rsh32x32 x y) 36894 // cond: 36895 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 36896 for { 36897 _ = v.Args[1] 36898 x := v.Args[0] 36899 y := v.Args[1] 36900 v.reset(OpARM64SRA) 36901 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36902 v0.AddArg(x) 36903 v.AddArg(v0) 36904 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36905 v1.Aux = OpARM64LessThanU 36906 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36907 v2.AddArg(y) 36908 v1.AddArg(v2) 36909 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36910 v3.AuxInt = 63 36911 v1.AddArg(v3) 36912 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36913 v4.AuxInt = 64 36914 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36915 v5.AddArg(y) 36916 v4.AddArg(v5) 36917 v1.AddArg(v4) 36918 v.AddArg(v1) 36919 return true 36920 } 36921 } 36922 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 36923 b := v.Block 36924 _ = b 36925 typ := &b.Func.Config.Types 36926 _ = typ 36927 // match: (Rsh32x64 x y) 36928 // cond: 36929 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 36930 for { 36931 _ = v.Args[1] 36932 x := v.Args[0] 36933 y := v.Args[1] 36934 v.reset(OpARM64SRA) 36935 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36936 v0.AddArg(x) 36937 v.AddArg(v0) 36938 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36939 v1.Aux = OpARM64LessThanU 36940 v1.AddArg(y) 36941 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 36942 v2.AuxInt = 63 36943 v1.AddArg(v2) 36944 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36945 v3.AuxInt = 64 36946 v3.AddArg(y) 36947 v1.AddArg(v3) 36948 v.AddArg(v1) 36949 return true 36950 } 36951 } 36952 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 36953 b := v.Block 36954 _ = b 36955 typ := &b.Func.Config.Types 36956 _ = typ 36957 // match: (Rsh32x8 x y) 36958 // cond: 36959 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 36960 for { 36961 _ = v.Args[1] 36962 x := v.Args[0] 36963 y := v.Args[1] 36964 v.reset(OpARM64SRA) 36965 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36966 v0.AddArg(x) 36967 v.AddArg(v0) 36968 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36969 v1.Aux = OpARM64LessThanU 36970 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36971 v2.AddArg(y) 36972 v1.AddArg(v2) 36973 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36974 v3.AuxInt = 63 36975 v1.AddArg(v3) 36976 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36977 v4.AuxInt = 64 36978 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36979 v5.AddArg(y) 36980 v4.AddArg(v5) 36981 v1.AddArg(v4) 36982 v.AddArg(v1) 36983 return true 36984 } 36985 } 36986 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 36987 b := v.Block 36988 _ = b 36989 typ := &b.Func.Config.Types 36990 _ = typ 36991 // match: (Rsh64Ux16 <t> x y) 36992 // cond: 36993 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 36994 for { 36995 t := v.Type 36996 _ = v.Args[1] 36997 x := v.Args[0] 36998 y := v.Args[1] 36999 v.reset(OpARM64CSEL) 37000 v.Aux = OpARM64LessThanU 37001 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37002 v0.AddArg(x) 37003 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37004 v1.AddArg(y) 37005 v0.AddArg(v1) 37006 v.AddArg(v0) 37007 v2 := b.NewValue0(v.Pos, OpConst64, t) 37008 v2.AuxInt = 0 37009 v.AddArg(v2) 37010 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37011 v3.AuxInt = 64 37012 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37013 v4.AddArg(y) 37014 v3.AddArg(v4) 37015 v.AddArg(v3) 37016 return true 37017 } 37018 } 37019 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 37020 b := v.Block 37021 _ = b 37022 typ := &b.Func.Config.Types 37023 _ = typ 37024 // match: (Rsh64Ux32 <t> x y) 37025 // cond: 37026 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 37027 for { 37028 t := v.Type 37029 _ = v.Args[1] 37030 x := v.Args[0] 37031 y := v.Args[1] 37032 v.reset(OpARM64CSEL) 37033 v.Aux = OpARM64LessThanU 37034 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37035 v0.AddArg(x) 37036 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37037 v1.AddArg(y) 37038 v0.AddArg(v1) 37039 v.AddArg(v0) 37040 v2 := b.NewValue0(v.Pos, OpConst64, t) 37041 v2.AuxInt = 0 37042 v.AddArg(v2) 37043 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37044 v3.AuxInt = 64 37045 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37046 v4.AddArg(y) 37047 v3.AddArg(v4) 37048 v.AddArg(v3) 37049 return true 37050 } 37051 } 37052 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 37053 b := v.Block 37054 _ = b 37055 // match: (Rsh64Ux64 <t> x y) 37056 // cond: 37057 // result: (CSEL {OpARM64LessThanU} (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 37058 for { 37059 t := v.Type 37060 _ = v.Args[1] 37061 x := v.Args[0] 37062 y := v.Args[1] 37063 v.reset(OpARM64CSEL) 37064 v.Aux = OpARM64LessThanU 37065 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37066 v0.AddArg(x) 37067 v0.AddArg(y) 37068 v.AddArg(v0) 37069 v1 := b.NewValue0(v.Pos, OpConst64, t) 37070 v1.AuxInt = 0 37071 v.AddArg(v1) 37072 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37073 v2.AuxInt = 64 37074 v2.AddArg(y) 37075 v.AddArg(v2) 37076 return true 37077 } 37078 } 37079 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 37080 b := v.Block 37081 _ = b 37082 typ := &b.Func.Config.Types 37083 _ = typ 37084 // match: (Rsh64Ux8 <t> x y) 37085 // cond: 37086 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 37087 for { 37088 t := v.Type 37089 _ = v.Args[1] 37090 x := v.Args[0] 37091 y := v.Args[1] 37092 v.reset(OpARM64CSEL) 37093 v.Aux = OpARM64LessThanU 37094 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37095 v0.AddArg(x) 37096 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37097 v1.AddArg(y) 37098 v0.AddArg(v1) 37099 v.AddArg(v0) 37100 v2 := b.NewValue0(v.Pos, OpConst64, t) 37101 v2.AuxInt = 0 37102 v.AddArg(v2) 37103 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37104 v3.AuxInt = 64 37105 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37106 v4.AddArg(y) 37107 v3.AddArg(v4) 37108 v.AddArg(v3) 37109 return true 37110 } 37111 } 37112 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 37113 b := v.Block 37114 _ = b 37115 typ := &b.Func.Config.Types 37116 _ = typ 37117 // match: (Rsh64x16 x y) 37118 // cond: 37119 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 37120 for { 37121 _ = v.Args[1] 37122 x := v.Args[0] 37123 y := v.Args[1] 37124 v.reset(OpARM64SRA) 37125 v.AddArg(x) 37126 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37127 v0.Aux = OpARM64LessThanU 37128 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37129 v1.AddArg(y) 37130 v0.AddArg(v1) 37131 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37132 v2.AuxInt = 63 37133 v0.AddArg(v2) 37134 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37135 v3.AuxInt = 64 37136 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37137 v4.AddArg(y) 37138 v3.AddArg(v4) 37139 v0.AddArg(v3) 37140 v.AddArg(v0) 37141 return true 37142 } 37143 } 37144 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 37145 b := v.Block 37146 _ = b 37147 typ := &b.Func.Config.Types 37148 _ = typ 37149 // match: (Rsh64x32 x y) 37150 // cond: 37151 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 37152 for { 37153 _ = v.Args[1] 37154 x := v.Args[0] 37155 y := v.Args[1] 37156 v.reset(OpARM64SRA) 37157 v.AddArg(x) 37158 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37159 v0.Aux = OpARM64LessThanU 37160 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37161 v1.AddArg(y) 37162 v0.AddArg(v1) 37163 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37164 v2.AuxInt = 63 37165 v0.AddArg(v2) 37166 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37167 v3.AuxInt = 64 37168 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37169 v4.AddArg(y) 37170 v3.AddArg(v4) 37171 v0.AddArg(v3) 37172 v.AddArg(v0) 37173 return true 37174 } 37175 } 37176 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 37177 b := v.Block 37178 _ = b 37179 // match: (Rsh64x64 x y) 37180 // cond: 37181 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 37182 for { 37183 _ = v.Args[1] 37184 x := v.Args[0] 37185 y := v.Args[1] 37186 v.reset(OpARM64SRA) 37187 v.AddArg(x) 37188 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37189 v0.Aux = OpARM64LessThanU 37190 v0.AddArg(y) 37191 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 37192 v1.AuxInt = 63 37193 v0.AddArg(v1) 37194 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37195 v2.AuxInt = 64 37196 v2.AddArg(y) 37197 v0.AddArg(v2) 37198 v.AddArg(v0) 37199 return true 37200 } 37201 } 37202 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 37203 b := v.Block 37204 _ = b 37205 typ := &b.Func.Config.Types 37206 _ = typ 37207 // match: (Rsh64x8 x y) 37208 // cond: 37209 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 37210 for { 37211 _ = v.Args[1] 37212 x := v.Args[0] 37213 y := v.Args[1] 37214 v.reset(OpARM64SRA) 37215 v.AddArg(x) 37216 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37217 v0.Aux = OpARM64LessThanU 37218 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37219 v1.AddArg(y) 37220 v0.AddArg(v1) 37221 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37222 v2.AuxInt = 63 37223 v0.AddArg(v2) 37224 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37225 v3.AuxInt = 64 37226 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37227 v4.AddArg(y) 37228 v3.AddArg(v4) 37229 v0.AddArg(v3) 37230 v.AddArg(v0) 37231 return true 37232 } 37233 } 37234 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 37235 b := v.Block 37236 _ = b 37237 typ := &b.Func.Config.Types 37238 _ = typ 37239 // match: (Rsh8Ux16 <t> x y) 37240 // cond: 37241 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 37242 for { 37243 t := v.Type 37244 _ = v.Args[1] 37245 x := v.Args[0] 37246 y := v.Args[1] 37247 v.reset(OpARM64CSEL) 37248 v.Aux = OpARM64LessThanU 37249 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37250 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37251 v1.AddArg(x) 37252 v0.AddArg(v1) 37253 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37254 v2.AddArg(y) 37255 v0.AddArg(v2) 37256 v.AddArg(v0) 37257 v3 := b.NewValue0(v.Pos, OpConst64, t) 37258 v3.AuxInt = 0 37259 v.AddArg(v3) 37260 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37261 v4.AuxInt = 64 37262 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37263 v5.AddArg(y) 37264 v4.AddArg(v5) 37265 v.AddArg(v4) 37266 return true 37267 } 37268 } 37269 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 37270 b := v.Block 37271 _ = b 37272 typ := &b.Func.Config.Types 37273 _ = typ 37274 // match: (Rsh8Ux32 <t> x y) 37275 // cond: 37276 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 37277 for { 37278 t := v.Type 37279 _ = v.Args[1] 37280 x := v.Args[0] 37281 y := v.Args[1] 37282 v.reset(OpARM64CSEL) 37283 v.Aux = OpARM64LessThanU 37284 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37285 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37286 v1.AddArg(x) 37287 v0.AddArg(v1) 37288 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37289 v2.AddArg(y) 37290 v0.AddArg(v2) 37291 v.AddArg(v0) 37292 v3 := b.NewValue0(v.Pos, OpConst64, t) 37293 v3.AuxInt = 0 37294 v.AddArg(v3) 37295 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37296 v4.AuxInt = 64 37297 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37298 v5.AddArg(y) 37299 v4.AddArg(v5) 37300 v.AddArg(v4) 37301 return true 37302 } 37303 } 37304 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 37305 b := v.Block 37306 _ = b 37307 typ := &b.Func.Config.Types 37308 _ = typ 37309 // match: (Rsh8Ux64 <t> x y) 37310 // cond: 37311 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 37312 for { 37313 t := v.Type 37314 _ = v.Args[1] 37315 x := v.Args[0] 37316 y := v.Args[1] 37317 v.reset(OpARM64CSEL) 37318 v.Aux = OpARM64LessThanU 37319 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37320 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37321 v1.AddArg(x) 37322 v0.AddArg(v1) 37323 v0.AddArg(y) 37324 v.AddArg(v0) 37325 v2 := b.NewValue0(v.Pos, OpConst64, t) 37326 v2.AuxInt = 0 37327 v.AddArg(v2) 37328 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37329 v3.AuxInt = 64 37330 v3.AddArg(y) 37331 v.AddArg(v3) 37332 return true 37333 } 37334 } 37335 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 37336 b := v.Block 37337 _ = b 37338 typ := &b.Func.Config.Types 37339 _ = typ 37340 // match: (Rsh8Ux8 <t> x y) 37341 // cond: 37342 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 37343 for { 37344 t := v.Type 37345 _ = v.Args[1] 37346 x := v.Args[0] 37347 y := v.Args[1] 37348 v.reset(OpARM64CSEL) 37349 v.Aux = OpARM64LessThanU 37350 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37351 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37352 v1.AddArg(x) 37353 v0.AddArg(v1) 37354 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37355 v2.AddArg(y) 37356 v0.AddArg(v2) 37357 v.AddArg(v0) 37358 v3 := b.NewValue0(v.Pos, OpConst64, t) 37359 v3.AuxInt = 0 37360 v.AddArg(v3) 37361 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37362 v4.AuxInt = 64 37363 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37364 v5.AddArg(y) 37365 v4.AddArg(v5) 37366 v.AddArg(v4) 37367 return true 37368 } 37369 } 37370 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 37371 b := v.Block 37372 _ = b 37373 typ := &b.Func.Config.Types 37374 _ = typ 37375 // match: (Rsh8x16 x y) 37376 // cond: 37377 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 37378 for { 37379 _ = v.Args[1] 37380 x := v.Args[0] 37381 y := v.Args[1] 37382 v.reset(OpARM64SRA) 37383 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37384 v0.AddArg(x) 37385 v.AddArg(v0) 37386 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37387 v1.Aux = OpARM64LessThanU 37388 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37389 v2.AddArg(y) 37390 v1.AddArg(v2) 37391 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 37392 v3.AuxInt = 63 37393 v1.AddArg(v3) 37394 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37395 v4.AuxInt = 64 37396 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37397 v5.AddArg(y) 37398 v4.AddArg(v5) 37399 v1.AddArg(v4) 37400 v.AddArg(v1) 37401 return true 37402 } 37403 } 37404 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 37405 b := v.Block 37406 _ = b 37407 typ := &b.Func.Config.Types 37408 _ = typ 37409 // match: (Rsh8x32 x y) 37410 // cond: 37411 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 37412 for { 37413 _ = v.Args[1] 37414 x := v.Args[0] 37415 y := v.Args[1] 37416 v.reset(OpARM64SRA) 37417 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37418 v0.AddArg(x) 37419 v.AddArg(v0) 37420 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37421 v1.Aux = OpARM64LessThanU 37422 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37423 v2.AddArg(y) 37424 v1.AddArg(v2) 37425 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 37426 v3.AuxInt = 63 37427 v1.AddArg(v3) 37428 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37429 v4.AuxInt = 64 37430 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37431 v5.AddArg(y) 37432 v4.AddArg(v5) 37433 v1.AddArg(v4) 37434 v.AddArg(v1) 37435 return true 37436 } 37437 } 37438 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 37439 b := v.Block 37440 _ = b 37441 typ := &b.Func.Config.Types 37442 _ = typ 37443 // match: (Rsh8x64 x y) 37444 // cond: 37445 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 37446 for { 37447 _ = v.Args[1] 37448 x := v.Args[0] 37449 y := v.Args[1] 37450 v.reset(OpARM64SRA) 37451 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37452 v0.AddArg(x) 37453 v.AddArg(v0) 37454 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37455 v1.Aux = OpARM64LessThanU 37456 v1.AddArg(y) 37457 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37458 v2.AuxInt = 63 37459 v1.AddArg(v2) 37460 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37461 v3.AuxInt = 64 37462 v3.AddArg(y) 37463 v1.AddArg(v3) 37464 v.AddArg(v1) 37465 return true 37466 } 37467 } 37468 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 37469 b := v.Block 37470 _ = b 37471 typ := &b.Func.Config.Types 37472 _ = typ 37473 // match: (Rsh8x8 x y) 37474 // cond: 37475 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 37476 for { 37477 _ = v.Args[1] 37478 x := v.Args[0] 37479 y := v.Args[1] 37480 v.reset(OpARM64SRA) 37481 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37482 v0.AddArg(x) 37483 v.AddArg(v0) 37484 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37485 v1.Aux = OpARM64LessThanU 37486 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37487 v2.AddArg(y) 37488 v1.AddArg(v2) 37489 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 37490 v3.AuxInt = 63 37491 v1.AddArg(v3) 37492 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37493 v4.AuxInt = 64 37494 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37495 v5.AddArg(y) 37496 v4.AddArg(v5) 37497 v1.AddArg(v4) 37498 v.AddArg(v1) 37499 return true 37500 } 37501 } 37502 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 37503 // match: (SignExt16to32 x) 37504 // cond: 37505 // result: (MOVHreg x) 37506 for { 37507 x := v.Args[0] 37508 v.reset(OpARM64MOVHreg) 37509 v.AddArg(x) 37510 return true 37511 } 37512 } 37513 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 37514 // match: (SignExt16to64 x) 37515 // cond: 37516 // result: (MOVHreg x) 37517 for { 37518 x := v.Args[0] 37519 v.reset(OpARM64MOVHreg) 37520 v.AddArg(x) 37521 return true 37522 } 37523 } 37524 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 37525 // match: (SignExt32to64 x) 37526 // cond: 37527 // result: (MOVWreg x) 37528 for { 37529 x := v.Args[0] 37530 v.reset(OpARM64MOVWreg) 37531 v.AddArg(x) 37532 return true 37533 } 37534 } 37535 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 37536 // match: (SignExt8to16 x) 37537 // cond: 37538 // result: (MOVBreg x) 37539 for { 37540 x := v.Args[0] 37541 v.reset(OpARM64MOVBreg) 37542 v.AddArg(x) 37543 return true 37544 } 37545 } 37546 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 37547 // match: (SignExt8to32 x) 37548 // cond: 37549 // result: (MOVBreg x) 37550 for { 37551 x := v.Args[0] 37552 v.reset(OpARM64MOVBreg) 37553 v.AddArg(x) 37554 return true 37555 } 37556 } 37557 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 37558 // match: (SignExt8to64 x) 37559 // cond: 37560 // result: (MOVBreg x) 37561 for { 37562 x := v.Args[0] 37563 v.reset(OpARM64MOVBreg) 37564 v.AddArg(x) 37565 return true 37566 } 37567 } 37568 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 37569 b := v.Block 37570 _ = b 37571 // match: (Slicemask <t> x) 37572 // cond: 37573 // result: (SRAconst (NEG <t> x) [63]) 37574 for { 37575 t := v.Type 37576 x := v.Args[0] 37577 v.reset(OpARM64SRAconst) 37578 v.AuxInt = 63 37579 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 37580 v0.AddArg(x) 37581 v.AddArg(v0) 37582 return true 37583 } 37584 } 37585 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 37586 // match: (Sqrt x) 37587 // cond: 37588 // result: (FSQRTD x) 37589 for { 37590 x := v.Args[0] 37591 v.reset(OpARM64FSQRTD) 37592 v.AddArg(x) 37593 return true 37594 } 37595 } 37596 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 37597 // match: (StaticCall [argwid] {target} mem) 37598 // cond: 37599 // result: (CALLstatic [argwid] {target} mem) 37600 for { 37601 argwid := v.AuxInt 37602 target := v.Aux 37603 mem := v.Args[0] 37604 v.reset(OpARM64CALLstatic) 37605 v.AuxInt = argwid 37606 v.Aux = target 37607 v.AddArg(mem) 37608 return true 37609 } 37610 } 37611 func rewriteValueARM64_OpStore_0(v *Value) bool { 37612 // match: (Store {t} ptr val mem) 37613 // cond: t.(*types.Type).Size() == 1 37614 // result: (MOVBstore ptr val mem) 37615 for { 37616 t := v.Aux 37617 _ = v.Args[2] 37618 ptr := v.Args[0] 37619 val := v.Args[1] 37620 mem := v.Args[2] 37621 if !(t.(*types.Type).Size() == 1) { 37622 break 37623 } 37624 v.reset(OpARM64MOVBstore) 37625 v.AddArg(ptr) 37626 v.AddArg(val) 37627 v.AddArg(mem) 37628 return true 37629 } 37630 // match: (Store {t} ptr val mem) 37631 // cond: t.(*types.Type).Size() == 2 37632 // result: (MOVHstore ptr val mem) 37633 for { 37634 t := v.Aux 37635 _ = v.Args[2] 37636 ptr := v.Args[0] 37637 val := v.Args[1] 37638 mem := v.Args[2] 37639 if !(t.(*types.Type).Size() == 2) { 37640 break 37641 } 37642 v.reset(OpARM64MOVHstore) 37643 v.AddArg(ptr) 37644 v.AddArg(val) 37645 v.AddArg(mem) 37646 return true 37647 } 37648 // match: (Store {t} ptr val mem) 37649 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 37650 // result: (MOVWstore ptr val mem) 37651 for { 37652 t := v.Aux 37653 _ = v.Args[2] 37654 ptr := v.Args[0] 37655 val := v.Args[1] 37656 mem := v.Args[2] 37657 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 37658 break 37659 } 37660 v.reset(OpARM64MOVWstore) 37661 v.AddArg(ptr) 37662 v.AddArg(val) 37663 v.AddArg(mem) 37664 return true 37665 } 37666 // match: (Store {t} ptr val mem) 37667 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 37668 // result: (MOVDstore ptr val mem) 37669 for { 37670 t := v.Aux 37671 _ = v.Args[2] 37672 ptr := v.Args[0] 37673 val := v.Args[1] 37674 mem := v.Args[2] 37675 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 37676 break 37677 } 37678 v.reset(OpARM64MOVDstore) 37679 v.AddArg(ptr) 37680 v.AddArg(val) 37681 v.AddArg(mem) 37682 return true 37683 } 37684 // match: (Store {t} ptr val mem) 37685 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 37686 // result: (FMOVSstore ptr val mem) 37687 for { 37688 t := v.Aux 37689 _ = v.Args[2] 37690 ptr := v.Args[0] 37691 val := v.Args[1] 37692 mem := v.Args[2] 37693 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 37694 break 37695 } 37696 v.reset(OpARM64FMOVSstore) 37697 v.AddArg(ptr) 37698 v.AddArg(val) 37699 v.AddArg(mem) 37700 return true 37701 } 37702 // match: (Store {t} ptr val mem) 37703 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 37704 // result: (FMOVDstore ptr val mem) 37705 for { 37706 t := v.Aux 37707 _ = v.Args[2] 37708 ptr := v.Args[0] 37709 val := v.Args[1] 37710 mem := v.Args[2] 37711 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 37712 break 37713 } 37714 v.reset(OpARM64FMOVDstore) 37715 v.AddArg(ptr) 37716 v.AddArg(val) 37717 v.AddArg(mem) 37718 return true 37719 } 37720 return false 37721 } 37722 func rewriteValueARM64_OpSub16_0(v *Value) bool { 37723 // match: (Sub16 x y) 37724 // cond: 37725 // result: (SUB x y) 37726 for { 37727 _ = v.Args[1] 37728 x := v.Args[0] 37729 y := v.Args[1] 37730 v.reset(OpARM64SUB) 37731 v.AddArg(x) 37732 v.AddArg(y) 37733 return true 37734 } 37735 } 37736 func rewriteValueARM64_OpSub32_0(v *Value) bool { 37737 // match: (Sub32 x y) 37738 // cond: 37739 // result: (SUB x y) 37740 for { 37741 _ = v.Args[1] 37742 x := v.Args[0] 37743 y := v.Args[1] 37744 v.reset(OpARM64SUB) 37745 v.AddArg(x) 37746 v.AddArg(y) 37747 return true 37748 } 37749 } 37750 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 37751 // match: (Sub32F x y) 37752 // cond: 37753 // result: (FSUBS x y) 37754 for { 37755 _ = v.Args[1] 37756 x := v.Args[0] 37757 y := v.Args[1] 37758 v.reset(OpARM64FSUBS) 37759 v.AddArg(x) 37760 v.AddArg(y) 37761 return true 37762 } 37763 } 37764 func rewriteValueARM64_OpSub64_0(v *Value) bool { 37765 // match: (Sub64 x y) 37766 // cond: 37767 // result: (SUB x y) 37768 for { 37769 _ = v.Args[1] 37770 x := v.Args[0] 37771 y := v.Args[1] 37772 v.reset(OpARM64SUB) 37773 v.AddArg(x) 37774 v.AddArg(y) 37775 return true 37776 } 37777 } 37778 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 37779 // match: (Sub64F x y) 37780 // cond: 37781 // result: (FSUBD x y) 37782 for { 37783 _ = v.Args[1] 37784 x := v.Args[0] 37785 y := v.Args[1] 37786 v.reset(OpARM64FSUBD) 37787 v.AddArg(x) 37788 v.AddArg(y) 37789 return true 37790 } 37791 } 37792 func rewriteValueARM64_OpSub8_0(v *Value) bool { 37793 // match: (Sub8 x y) 37794 // cond: 37795 // result: (SUB x y) 37796 for { 37797 _ = v.Args[1] 37798 x := v.Args[0] 37799 y := v.Args[1] 37800 v.reset(OpARM64SUB) 37801 v.AddArg(x) 37802 v.AddArg(y) 37803 return true 37804 } 37805 } 37806 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 37807 // match: (SubPtr x y) 37808 // cond: 37809 // result: (SUB x y) 37810 for { 37811 _ = v.Args[1] 37812 x := v.Args[0] 37813 y := v.Args[1] 37814 v.reset(OpARM64SUB) 37815 v.AddArg(x) 37816 v.AddArg(y) 37817 return true 37818 } 37819 } 37820 func rewriteValueARM64_OpTrunc_0(v *Value) bool { 37821 // match: (Trunc x) 37822 // cond: 37823 // result: (FRINTZD x) 37824 for { 37825 x := v.Args[0] 37826 v.reset(OpARM64FRINTZD) 37827 v.AddArg(x) 37828 return true 37829 } 37830 } 37831 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 37832 // match: (Trunc16to8 x) 37833 // cond: 37834 // result: x 37835 for { 37836 x := v.Args[0] 37837 v.reset(OpCopy) 37838 v.Type = x.Type 37839 v.AddArg(x) 37840 return true 37841 } 37842 } 37843 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 37844 // match: (Trunc32to16 x) 37845 // cond: 37846 // result: x 37847 for { 37848 x := v.Args[0] 37849 v.reset(OpCopy) 37850 v.Type = x.Type 37851 v.AddArg(x) 37852 return true 37853 } 37854 } 37855 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 37856 // match: (Trunc32to8 x) 37857 // cond: 37858 // result: x 37859 for { 37860 x := v.Args[0] 37861 v.reset(OpCopy) 37862 v.Type = x.Type 37863 v.AddArg(x) 37864 return true 37865 } 37866 } 37867 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 37868 // match: (Trunc64to16 x) 37869 // cond: 37870 // result: x 37871 for { 37872 x := v.Args[0] 37873 v.reset(OpCopy) 37874 v.Type = x.Type 37875 v.AddArg(x) 37876 return true 37877 } 37878 } 37879 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 37880 // match: (Trunc64to32 x) 37881 // cond: 37882 // result: x 37883 for { 37884 x := v.Args[0] 37885 v.reset(OpCopy) 37886 v.Type = x.Type 37887 v.AddArg(x) 37888 return true 37889 } 37890 } 37891 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 37892 // match: (Trunc64to8 x) 37893 // cond: 37894 // result: x 37895 for { 37896 x := v.Args[0] 37897 v.reset(OpCopy) 37898 v.Type = x.Type 37899 v.AddArg(x) 37900 return true 37901 } 37902 } 37903 func rewriteValueARM64_OpWB_0(v *Value) bool { 37904 // match: (WB {fn} destptr srcptr mem) 37905 // cond: 37906 // result: (LoweredWB {fn} destptr srcptr mem) 37907 for { 37908 fn := v.Aux 37909 _ = v.Args[2] 37910 destptr := v.Args[0] 37911 srcptr := v.Args[1] 37912 mem := v.Args[2] 37913 v.reset(OpARM64LoweredWB) 37914 v.Aux = fn 37915 v.AddArg(destptr) 37916 v.AddArg(srcptr) 37917 v.AddArg(mem) 37918 return true 37919 } 37920 } 37921 func rewriteValueARM64_OpXor16_0(v *Value) bool { 37922 // match: (Xor16 x y) 37923 // cond: 37924 // result: (XOR x y) 37925 for { 37926 _ = v.Args[1] 37927 x := v.Args[0] 37928 y := v.Args[1] 37929 v.reset(OpARM64XOR) 37930 v.AddArg(x) 37931 v.AddArg(y) 37932 return true 37933 } 37934 } 37935 func rewriteValueARM64_OpXor32_0(v *Value) bool { 37936 // match: (Xor32 x y) 37937 // cond: 37938 // result: (XOR x y) 37939 for { 37940 _ = v.Args[1] 37941 x := v.Args[0] 37942 y := v.Args[1] 37943 v.reset(OpARM64XOR) 37944 v.AddArg(x) 37945 v.AddArg(y) 37946 return true 37947 } 37948 } 37949 func rewriteValueARM64_OpXor64_0(v *Value) bool { 37950 // match: (Xor64 x y) 37951 // cond: 37952 // result: (XOR x y) 37953 for { 37954 _ = v.Args[1] 37955 x := v.Args[0] 37956 y := v.Args[1] 37957 v.reset(OpARM64XOR) 37958 v.AddArg(x) 37959 v.AddArg(y) 37960 return true 37961 } 37962 } 37963 func rewriteValueARM64_OpXor8_0(v *Value) bool { 37964 // match: (Xor8 x y) 37965 // cond: 37966 // result: (XOR x y) 37967 for { 37968 _ = v.Args[1] 37969 x := v.Args[0] 37970 y := v.Args[1] 37971 v.reset(OpARM64XOR) 37972 v.AddArg(x) 37973 v.AddArg(y) 37974 return true 37975 } 37976 } 37977 func rewriteValueARM64_OpZero_0(v *Value) bool { 37978 b := v.Block 37979 _ = b 37980 typ := &b.Func.Config.Types 37981 _ = typ 37982 // match: (Zero [0] _ mem) 37983 // cond: 37984 // result: mem 37985 for { 37986 if v.AuxInt != 0 { 37987 break 37988 } 37989 _ = v.Args[1] 37990 mem := v.Args[1] 37991 v.reset(OpCopy) 37992 v.Type = mem.Type 37993 v.AddArg(mem) 37994 return true 37995 } 37996 // match: (Zero [1] ptr mem) 37997 // cond: 37998 // result: (MOVBstore ptr (MOVDconst [0]) mem) 37999 for { 38000 if v.AuxInt != 1 { 38001 break 38002 } 38003 _ = v.Args[1] 38004 ptr := v.Args[0] 38005 mem := v.Args[1] 38006 v.reset(OpARM64MOVBstore) 38007 v.AddArg(ptr) 38008 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38009 v0.AuxInt = 0 38010 v.AddArg(v0) 38011 v.AddArg(mem) 38012 return true 38013 } 38014 // match: (Zero [2] ptr mem) 38015 // cond: 38016 // result: (MOVHstore ptr (MOVDconst [0]) mem) 38017 for { 38018 if v.AuxInt != 2 { 38019 break 38020 } 38021 _ = v.Args[1] 38022 ptr := v.Args[0] 38023 mem := v.Args[1] 38024 v.reset(OpARM64MOVHstore) 38025 v.AddArg(ptr) 38026 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38027 v0.AuxInt = 0 38028 v.AddArg(v0) 38029 v.AddArg(mem) 38030 return true 38031 } 38032 // match: (Zero [4] ptr mem) 38033 // cond: 38034 // result: (MOVWstore ptr (MOVDconst [0]) mem) 38035 for { 38036 if v.AuxInt != 4 { 38037 break 38038 } 38039 _ = v.Args[1] 38040 ptr := v.Args[0] 38041 mem := v.Args[1] 38042 v.reset(OpARM64MOVWstore) 38043 v.AddArg(ptr) 38044 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38045 v0.AuxInt = 0 38046 v.AddArg(v0) 38047 v.AddArg(mem) 38048 return true 38049 } 38050 // match: (Zero [8] ptr mem) 38051 // cond: 38052 // result: (MOVDstore ptr (MOVDconst [0]) mem) 38053 for { 38054 if v.AuxInt != 8 { 38055 break 38056 } 38057 _ = v.Args[1] 38058 ptr := v.Args[0] 38059 mem := v.Args[1] 38060 v.reset(OpARM64MOVDstore) 38061 v.AddArg(ptr) 38062 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38063 v0.AuxInt = 0 38064 v.AddArg(v0) 38065 v.AddArg(mem) 38066 return true 38067 } 38068 // match: (Zero [3] ptr mem) 38069 // cond: 38070 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 38071 for { 38072 if v.AuxInt != 3 { 38073 break 38074 } 38075 _ = v.Args[1] 38076 ptr := v.Args[0] 38077 mem := v.Args[1] 38078 v.reset(OpARM64MOVBstore) 38079 v.AuxInt = 2 38080 v.AddArg(ptr) 38081 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38082 v0.AuxInt = 0 38083 v.AddArg(v0) 38084 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38085 v1.AddArg(ptr) 38086 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38087 v2.AuxInt = 0 38088 v1.AddArg(v2) 38089 v1.AddArg(mem) 38090 v.AddArg(v1) 38091 return true 38092 } 38093 // match: (Zero [5] ptr mem) 38094 // cond: 38095 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 38096 for { 38097 if v.AuxInt != 5 { 38098 break 38099 } 38100 _ = v.Args[1] 38101 ptr := v.Args[0] 38102 mem := v.Args[1] 38103 v.reset(OpARM64MOVBstore) 38104 v.AuxInt = 4 38105 v.AddArg(ptr) 38106 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38107 v0.AuxInt = 0 38108 v.AddArg(v0) 38109 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38110 v1.AddArg(ptr) 38111 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38112 v2.AuxInt = 0 38113 v1.AddArg(v2) 38114 v1.AddArg(mem) 38115 v.AddArg(v1) 38116 return true 38117 } 38118 // match: (Zero [6] ptr mem) 38119 // cond: 38120 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 38121 for { 38122 if v.AuxInt != 6 { 38123 break 38124 } 38125 _ = v.Args[1] 38126 ptr := v.Args[0] 38127 mem := v.Args[1] 38128 v.reset(OpARM64MOVHstore) 38129 v.AuxInt = 4 38130 v.AddArg(ptr) 38131 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38132 v0.AuxInt = 0 38133 v.AddArg(v0) 38134 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38135 v1.AddArg(ptr) 38136 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38137 v2.AuxInt = 0 38138 v1.AddArg(v2) 38139 v1.AddArg(mem) 38140 v.AddArg(v1) 38141 return true 38142 } 38143 // match: (Zero [7] ptr mem) 38144 // cond: 38145 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 38146 for { 38147 if v.AuxInt != 7 { 38148 break 38149 } 38150 _ = v.Args[1] 38151 ptr := v.Args[0] 38152 mem := v.Args[1] 38153 v.reset(OpARM64MOVBstore) 38154 v.AuxInt = 6 38155 v.AddArg(ptr) 38156 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38157 v0.AuxInt = 0 38158 v.AddArg(v0) 38159 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38160 v1.AuxInt = 4 38161 v1.AddArg(ptr) 38162 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38163 v2.AuxInt = 0 38164 v1.AddArg(v2) 38165 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38166 v3.AddArg(ptr) 38167 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38168 v4.AuxInt = 0 38169 v3.AddArg(v4) 38170 v3.AddArg(mem) 38171 v1.AddArg(v3) 38172 v.AddArg(v1) 38173 return true 38174 } 38175 // match: (Zero [9] ptr mem) 38176 // cond: 38177 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 38178 for { 38179 if v.AuxInt != 9 { 38180 break 38181 } 38182 _ = v.Args[1] 38183 ptr := v.Args[0] 38184 mem := v.Args[1] 38185 v.reset(OpARM64MOVBstore) 38186 v.AuxInt = 8 38187 v.AddArg(ptr) 38188 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38189 v0.AuxInt = 0 38190 v.AddArg(v0) 38191 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38192 v1.AddArg(ptr) 38193 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38194 v2.AuxInt = 0 38195 v1.AddArg(v2) 38196 v1.AddArg(mem) 38197 v.AddArg(v1) 38198 return true 38199 } 38200 return false 38201 } 38202 func rewriteValueARM64_OpZero_10(v *Value) bool { 38203 b := v.Block 38204 _ = b 38205 typ := &b.Func.Config.Types 38206 _ = typ 38207 // match: (Zero [10] ptr mem) 38208 // cond: 38209 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 38210 for { 38211 if v.AuxInt != 10 { 38212 break 38213 } 38214 _ = v.Args[1] 38215 ptr := v.Args[0] 38216 mem := v.Args[1] 38217 v.reset(OpARM64MOVHstore) 38218 v.AuxInt = 8 38219 v.AddArg(ptr) 38220 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38221 v0.AuxInt = 0 38222 v.AddArg(v0) 38223 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38224 v1.AddArg(ptr) 38225 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38226 v2.AuxInt = 0 38227 v1.AddArg(v2) 38228 v1.AddArg(mem) 38229 v.AddArg(v1) 38230 return true 38231 } 38232 // match: (Zero [11] ptr mem) 38233 // cond: 38234 // result: (MOVBstore [10] ptr (MOVDconst [0]) (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 38235 for { 38236 if v.AuxInt != 11 { 38237 break 38238 } 38239 _ = v.Args[1] 38240 ptr := v.Args[0] 38241 mem := v.Args[1] 38242 v.reset(OpARM64MOVBstore) 38243 v.AuxInt = 10 38244 v.AddArg(ptr) 38245 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38246 v0.AuxInt = 0 38247 v.AddArg(v0) 38248 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38249 v1.AuxInt = 8 38250 v1.AddArg(ptr) 38251 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38252 v2.AuxInt = 0 38253 v1.AddArg(v2) 38254 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38255 v3.AddArg(ptr) 38256 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38257 v4.AuxInt = 0 38258 v3.AddArg(v4) 38259 v3.AddArg(mem) 38260 v1.AddArg(v3) 38261 v.AddArg(v1) 38262 return true 38263 } 38264 // match: (Zero [12] ptr mem) 38265 // cond: 38266 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 38267 for { 38268 if v.AuxInt != 12 { 38269 break 38270 } 38271 _ = v.Args[1] 38272 ptr := v.Args[0] 38273 mem := v.Args[1] 38274 v.reset(OpARM64MOVWstore) 38275 v.AuxInt = 8 38276 v.AddArg(ptr) 38277 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38278 v0.AuxInt = 0 38279 v.AddArg(v0) 38280 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38281 v1.AddArg(ptr) 38282 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38283 v2.AuxInt = 0 38284 v1.AddArg(v2) 38285 v1.AddArg(mem) 38286 v.AddArg(v1) 38287 return true 38288 } 38289 // match: (Zero [13] ptr mem) 38290 // cond: 38291 // result: (MOVBstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 38292 for { 38293 if v.AuxInt != 13 { 38294 break 38295 } 38296 _ = v.Args[1] 38297 ptr := v.Args[0] 38298 mem := v.Args[1] 38299 v.reset(OpARM64MOVBstore) 38300 v.AuxInt = 12 38301 v.AddArg(ptr) 38302 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38303 v0.AuxInt = 0 38304 v.AddArg(v0) 38305 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38306 v1.AuxInt = 8 38307 v1.AddArg(ptr) 38308 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38309 v2.AuxInt = 0 38310 v1.AddArg(v2) 38311 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38312 v3.AddArg(ptr) 38313 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38314 v4.AuxInt = 0 38315 v3.AddArg(v4) 38316 v3.AddArg(mem) 38317 v1.AddArg(v3) 38318 v.AddArg(v1) 38319 return true 38320 } 38321 // match: (Zero [14] ptr mem) 38322 // cond: 38323 // result: (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 38324 for { 38325 if v.AuxInt != 14 { 38326 break 38327 } 38328 _ = v.Args[1] 38329 ptr := v.Args[0] 38330 mem := v.Args[1] 38331 v.reset(OpARM64MOVHstore) 38332 v.AuxInt = 12 38333 v.AddArg(ptr) 38334 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38335 v0.AuxInt = 0 38336 v.AddArg(v0) 38337 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38338 v1.AuxInt = 8 38339 v1.AddArg(ptr) 38340 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38341 v2.AuxInt = 0 38342 v1.AddArg(v2) 38343 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38344 v3.AddArg(ptr) 38345 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38346 v4.AuxInt = 0 38347 v3.AddArg(v4) 38348 v3.AddArg(mem) 38349 v1.AddArg(v3) 38350 v.AddArg(v1) 38351 return true 38352 } 38353 // match: (Zero [15] ptr mem) 38354 // cond: 38355 // result: (MOVBstore [14] ptr (MOVDconst [0]) (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))) 38356 for { 38357 if v.AuxInt != 15 { 38358 break 38359 } 38360 _ = v.Args[1] 38361 ptr := v.Args[0] 38362 mem := v.Args[1] 38363 v.reset(OpARM64MOVBstore) 38364 v.AuxInt = 14 38365 v.AddArg(ptr) 38366 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38367 v0.AuxInt = 0 38368 v.AddArg(v0) 38369 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38370 v1.AuxInt = 12 38371 v1.AddArg(ptr) 38372 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38373 v2.AuxInt = 0 38374 v1.AddArg(v2) 38375 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38376 v3.AuxInt = 8 38377 v3.AddArg(ptr) 38378 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38379 v4.AuxInt = 0 38380 v3.AddArg(v4) 38381 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38382 v5.AddArg(ptr) 38383 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38384 v6.AuxInt = 0 38385 v5.AddArg(v6) 38386 v5.AddArg(mem) 38387 v3.AddArg(v5) 38388 v1.AddArg(v3) 38389 v.AddArg(v1) 38390 return true 38391 } 38392 // match: (Zero [16] ptr mem) 38393 // cond: 38394 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 38395 for { 38396 if v.AuxInt != 16 { 38397 break 38398 } 38399 _ = v.Args[1] 38400 ptr := v.Args[0] 38401 mem := v.Args[1] 38402 v.reset(OpARM64STP) 38403 v.AuxInt = 0 38404 v.AddArg(ptr) 38405 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38406 v0.AuxInt = 0 38407 v.AddArg(v0) 38408 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38409 v1.AuxInt = 0 38410 v.AddArg(v1) 38411 v.AddArg(mem) 38412 return true 38413 } 38414 // match: (Zero [32] ptr mem) 38415 // cond: 38416 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 38417 for { 38418 if v.AuxInt != 32 { 38419 break 38420 } 38421 _ = v.Args[1] 38422 ptr := v.Args[0] 38423 mem := v.Args[1] 38424 v.reset(OpARM64STP) 38425 v.AuxInt = 16 38426 v.AddArg(ptr) 38427 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38428 v0.AuxInt = 0 38429 v.AddArg(v0) 38430 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38431 v1.AuxInt = 0 38432 v.AddArg(v1) 38433 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38434 v2.AuxInt = 0 38435 v2.AddArg(ptr) 38436 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38437 v3.AuxInt = 0 38438 v2.AddArg(v3) 38439 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38440 v4.AuxInt = 0 38441 v2.AddArg(v4) 38442 v2.AddArg(mem) 38443 v.AddArg(v2) 38444 return true 38445 } 38446 // match: (Zero [48] ptr mem) 38447 // cond: 38448 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 38449 for { 38450 if v.AuxInt != 48 { 38451 break 38452 } 38453 _ = v.Args[1] 38454 ptr := v.Args[0] 38455 mem := v.Args[1] 38456 v.reset(OpARM64STP) 38457 v.AuxInt = 32 38458 v.AddArg(ptr) 38459 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38460 v0.AuxInt = 0 38461 v.AddArg(v0) 38462 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38463 v1.AuxInt = 0 38464 v.AddArg(v1) 38465 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38466 v2.AuxInt = 16 38467 v2.AddArg(ptr) 38468 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38469 v3.AuxInt = 0 38470 v2.AddArg(v3) 38471 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38472 v4.AuxInt = 0 38473 v2.AddArg(v4) 38474 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38475 v5.AuxInt = 0 38476 v5.AddArg(ptr) 38477 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38478 v6.AuxInt = 0 38479 v5.AddArg(v6) 38480 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38481 v7.AuxInt = 0 38482 v5.AddArg(v7) 38483 v5.AddArg(mem) 38484 v2.AddArg(v5) 38485 v.AddArg(v2) 38486 return true 38487 } 38488 // match: (Zero [64] ptr mem) 38489 // cond: 38490 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 38491 for { 38492 if v.AuxInt != 64 { 38493 break 38494 } 38495 _ = v.Args[1] 38496 ptr := v.Args[0] 38497 mem := v.Args[1] 38498 v.reset(OpARM64STP) 38499 v.AuxInt = 48 38500 v.AddArg(ptr) 38501 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38502 v0.AuxInt = 0 38503 v.AddArg(v0) 38504 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38505 v1.AuxInt = 0 38506 v.AddArg(v1) 38507 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38508 v2.AuxInt = 32 38509 v2.AddArg(ptr) 38510 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38511 v3.AuxInt = 0 38512 v2.AddArg(v3) 38513 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38514 v4.AuxInt = 0 38515 v2.AddArg(v4) 38516 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38517 v5.AuxInt = 16 38518 v5.AddArg(ptr) 38519 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38520 v6.AuxInt = 0 38521 v5.AddArg(v6) 38522 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38523 v7.AuxInt = 0 38524 v5.AddArg(v7) 38525 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38526 v8.AuxInt = 0 38527 v8.AddArg(ptr) 38528 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38529 v9.AuxInt = 0 38530 v8.AddArg(v9) 38531 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38532 v10.AuxInt = 0 38533 v8.AddArg(v10) 38534 v8.AddArg(mem) 38535 v5.AddArg(v8) 38536 v2.AddArg(v5) 38537 v.AddArg(v2) 38538 return true 38539 } 38540 return false 38541 } 38542 func rewriteValueARM64_OpZero_20(v *Value) bool { 38543 b := v.Block 38544 _ = b 38545 config := b.Func.Config 38546 _ = config 38547 // match: (Zero [s] ptr mem) 38548 // cond: s%16 != 0 && s%16 <= 8 && s > 16 38549 // result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem)) 38550 for { 38551 s := v.AuxInt 38552 _ = v.Args[1] 38553 ptr := v.Args[0] 38554 mem := v.Args[1] 38555 if !(s%16 != 0 && s%16 <= 8 && s > 16) { 38556 break 38557 } 38558 v.reset(OpZero) 38559 v.AuxInt = 8 38560 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 38561 v0.AuxInt = s - 8 38562 v0.AddArg(ptr) 38563 v.AddArg(v0) 38564 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 38565 v1.AuxInt = s - s%16 38566 v1.AddArg(ptr) 38567 v1.AddArg(mem) 38568 v.AddArg(v1) 38569 return true 38570 } 38571 // match: (Zero [s] ptr mem) 38572 // cond: s%16 != 0 && s%16 > 8 && s > 16 38573 // result: (Zero [16] (OffPtr <ptr.Type> ptr [s-16]) (Zero [s-s%16] ptr mem)) 38574 for { 38575 s := v.AuxInt 38576 _ = v.Args[1] 38577 ptr := v.Args[0] 38578 mem := v.Args[1] 38579 if !(s%16 != 0 && s%16 > 8 && s > 16) { 38580 break 38581 } 38582 v.reset(OpZero) 38583 v.AuxInt = 16 38584 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 38585 v0.AuxInt = s - 16 38586 v0.AddArg(ptr) 38587 v.AddArg(v0) 38588 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 38589 v1.AuxInt = s - s%16 38590 v1.AddArg(ptr) 38591 v1.AddArg(mem) 38592 v.AddArg(v1) 38593 return true 38594 } 38595 // match: (Zero [s] ptr mem) 38596 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 38597 // result: (DUFFZERO [4 * (64 - s/16)] ptr mem) 38598 for { 38599 s := v.AuxInt 38600 _ = v.Args[1] 38601 ptr := v.Args[0] 38602 mem := v.Args[1] 38603 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 38604 break 38605 } 38606 v.reset(OpARM64DUFFZERO) 38607 v.AuxInt = 4 * (64 - s/16) 38608 v.AddArg(ptr) 38609 v.AddArg(mem) 38610 return true 38611 } 38612 // match: (Zero [s] ptr mem) 38613 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 38614 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 38615 for { 38616 s := v.AuxInt 38617 _ = v.Args[1] 38618 ptr := v.Args[0] 38619 mem := v.Args[1] 38620 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 38621 break 38622 } 38623 v.reset(OpARM64LoweredZero) 38624 v.AddArg(ptr) 38625 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 38626 v0.AuxInt = s - 16 38627 v0.AddArg(ptr) 38628 v.AddArg(v0) 38629 v.AddArg(mem) 38630 return true 38631 } 38632 return false 38633 } 38634 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 38635 // match: (ZeroExt16to32 x) 38636 // cond: 38637 // result: (MOVHUreg x) 38638 for { 38639 x := v.Args[0] 38640 v.reset(OpARM64MOVHUreg) 38641 v.AddArg(x) 38642 return true 38643 } 38644 } 38645 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 38646 // match: (ZeroExt16to64 x) 38647 // cond: 38648 // result: (MOVHUreg x) 38649 for { 38650 x := v.Args[0] 38651 v.reset(OpARM64MOVHUreg) 38652 v.AddArg(x) 38653 return true 38654 } 38655 } 38656 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 38657 // match: (ZeroExt32to64 x) 38658 // cond: 38659 // result: (MOVWUreg x) 38660 for { 38661 x := v.Args[0] 38662 v.reset(OpARM64MOVWUreg) 38663 v.AddArg(x) 38664 return true 38665 } 38666 } 38667 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 38668 // match: (ZeroExt8to16 x) 38669 // cond: 38670 // result: (MOVBUreg x) 38671 for { 38672 x := v.Args[0] 38673 v.reset(OpARM64MOVBUreg) 38674 v.AddArg(x) 38675 return true 38676 } 38677 } 38678 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 38679 // match: (ZeroExt8to32 x) 38680 // cond: 38681 // result: (MOVBUreg x) 38682 for { 38683 x := v.Args[0] 38684 v.reset(OpARM64MOVBUreg) 38685 v.AddArg(x) 38686 return true 38687 } 38688 } 38689 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 38690 // match: (ZeroExt8to64 x) 38691 // cond: 38692 // result: (MOVBUreg x) 38693 for { 38694 x := v.Args[0] 38695 v.reset(OpARM64MOVBUreg) 38696 v.AddArg(x) 38697 return true 38698 } 38699 } 38700 func rewriteBlockARM64(b *Block) bool { 38701 config := b.Func.Config 38702 _ = config 38703 fe := b.Func.fe 38704 _ = fe 38705 typ := &config.Types 38706 _ = typ 38707 switch b.Kind { 38708 case BlockARM64EQ: 38709 // match: (EQ (CMPWconst [0] x:(ANDconst [c] y)) yes no) 38710 // cond: x.Uses == 1 38711 // result: (EQ (TSTWconst [c] y) yes no) 38712 for { 38713 v := b.Control 38714 if v.Op != OpARM64CMPWconst { 38715 break 38716 } 38717 if v.AuxInt != 0 { 38718 break 38719 } 38720 x := v.Args[0] 38721 if x.Op != OpARM64ANDconst { 38722 break 38723 } 38724 c := x.AuxInt 38725 y := x.Args[0] 38726 if !(x.Uses == 1) { 38727 break 38728 } 38729 b.Kind = BlockARM64EQ 38730 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 38731 v0.AuxInt = c 38732 v0.AddArg(y) 38733 b.SetControl(v0) 38734 b.Aux = nil 38735 return true 38736 } 38737 // match: (EQ (CMPconst [0] z:(AND x y)) yes no) 38738 // cond: z.Uses == 1 38739 // result: (EQ (TST x y) yes no) 38740 for { 38741 v := b.Control 38742 if v.Op != OpARM64CMPconst { 38743 break 38744 } 38745 if v.AuxInt != 0 { 38746 break 38747 } 38748 z := v.Args[0] 38749 if z.Op != OpARM64AND { 38750 break 38751 } 38752 _ = z.Args[1] 38753 x := z.Args[0] 38754 y := z.Args[1] 38755 if !(z.Uses == 1) { 38756 break 38757 } 38758 b.Kind = BlockARM64EQ 38759 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 38760 v0.AddArg(x) 38761 v0.AddArg(y) 38762 b.SetControl(v0) 38763 b.Aux = nil 38764 return true 38765 } 38766 // match: (EQ (CMPWconst [0] z:(AND x y)) yes no) 38767 // cond: z.Uses == 1 38768 // result: (EQ (TSTW x y) yes no) 38769 for { 38770 v := b.Control 38771 if v.Op != OpARM64CMPWconst { 38772 break 38773 } 38774 if v.AuxInt != 0 { 38775 break 38776 } 38777 z := v.Args[0] 38778 if z.Op != OpARM64AND { 38779 break 38780 } 38781 _ = z.Args[1] 38782 x := z.Args[0] 38783 y := z.Args[1] 38784 if !(z.Uses == 1) { 38785 break 38786 } 38787 b.Kind = BlockARM64EQ 38788 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 38789 v0.AddArg(x) 38790 v0.AddArg(y) 38791 b.SetControl(v0) 38792 b.Aux = nil 38793 return true 38794 } 38795 // match: (EQ (CMPconst [0] x:(ANDconst [c] y)) yes no) 38796 // cond: x.Uses == 1 38797 // result: (EQ (TSTconst [c] y) yes no) 38798 for { 38799 v := b.Control 38800 if v.Op != OpARM64CMPconst { 38801 break 38802 } 38803 if v.AuxInt != 0 { 38804 break 38805 } 38806 x := v.Args[0] 38807 if x.Op != OpARM64ANDconst { 38808 break 38809 } 38810 c := x.AuxInt 38811 y := x.Args[0] 38812 if !(x.Uses == 1) { 38813 break 38814 } 38815 b.Kind = BlockARM64EQ 38816 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 38817 v0.AuxInt = c 38818 v0.AddArg(y) 38819 b.SetControl(v0) 38820 b.Aux = nil 38821 return true 38822 } 38823 // match: (EQ (CMPconst [0] x:(ADDconst [c] y)) yes no) 38824 // cond: x.Uses == 1 38825 // result: (EQ (CMNconst [c] y) yes no) 38826 for { 38827 v := b.Control 38828 if v.Op != OpARM64CMPconst { 38829 break 38830 } 38831 if v.AuxInt != 0 { 38832 break 38833 } 38834 x := v.Args[0] 38835 if x.Op != OpARM64ADDconst { 38836 break 38837 } 38838 c := x.AuxInt 38839 y := x.Args[0] 38840 if !(x.Uses == 1) { 38841 break 38842 } 38843 b.Kind = BlockARM64EQ 38844 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 38845 v0.AuxInt = c 38846 v0.AddArg(y) 38847 b.SetControl(v0) 38848 b.Aux = nil 38849 return true 38850 } 38851 // match: (EQ (CMPWconst [0] x:(ADDconst [c] y)) yes no) 38852 // cond: x.Uses == 1 38853 // result: (EQ (CMNWconst [c] y) yes no) 38854 for { 38855 v := b.Control 38856 if v.Op != OpARM64CMPWconst { 38857 break 38858 } 38859 if v.AuxInt != 0 { 38860 break 38861 } 38862 x := v.Args[0] 38863 if x.Op != OpARM64ADDconst { 38864 break 38865 } 38866 c := x.AuxInt 38867 y := x.Args[0] 38868 if !(x.Uses == 1) { 38869 break 38870 } 38871 b.Kind = BlockARM64EQ 38872 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 38873 v0.AuxInt = c 38874 v0.AddArg(y) 38875 b.SetControl(v0) 38876 b.Aux = nil 38877 return true 38878 } 38879 // match: (EQ (CMPconst [0] z:(ADD x y)) yes no) 38880 // cond: z.Uses == 1 38881 // result: (EQ (CMN x y) yes no) 38882 for { 38883 v := b.Control 38884 if v.Op != OpARM64CMPconst { 38885 break 38886 } 38887 if v.AuxInt != 0 { 38888 break 38889 } 38890 z := v.Args[0] 38891 if z.Op != OpARM64ADD { 38892 break 38893 } 38894 _ = z.Args[1] 38895 x := z.Args[0] 38896 y := z.Args[1] 38897 if !(z.Uses == 1) { 38898 break 38899 } 38900 b.Kind = BlockARM64EQ 38901 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 38902 v0.AddArg(x) 38903 v0.AddArg(y) 38904 b.SetControl(v0) 38905 b.Aux = nil 38906 return true 38907 } 38908 // match: (EQ (CMPWconst [0] z:(ADD x y)) yes no) 38909 // cond: z.Uses == 1 38910 // result: (EQ (CMNW x y) yes no) 38911 for { 38912 v := b.Control 38913 if v.Op != OpARM64CMPWconst { 38914 break 38915 } 38916 if v.AuxInt != 0 { 38917 break 38918 } 38919 z := v.Args[0] 38920 if z.Op != OpARM64ADD { 38921 break 38922 } 38923 _ = z.Args[1] 38924 x := z.Args[0] 38925 y := z.Args[1] 38926 if !(z.Uses == 1) { 38927 break 38928 } 38929 b.Kind = BlockARM64EQ 38930 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 38931 v0.AddArg(x) 38932 v0.AddArg(y) 38933 b.SetControl(v0) 38934 b.Aux = nil 38935 return true 38936 } 38937 // match: (EQ (CMP x z:(NEG y)) yes no) 38938 // cond: z.Uses == 1 38939 // result: (EQ (CMN x y) yes no) 38940 for { 38941 v := b.Control 38942 if v.Op != OpARM64CMP { 38943 break 38944 } 38945 _ = v.Args[1] 38946 x := v.Args[0] 38947 z := v.Args[1] 38948 if z.Op != OpARM64NEG { 38949 break 38950 } 38951 y := z.Args[0] 38952 if !(z.Uses == 1) { 38953 break 38954 } 38955 b.Kind = BlockARM64EQ 38956 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 38957 v0.AddArg(x) 38958 v0.AddArg(y) 38959 b.SetControl(v0) 38960 b.Aux = nil 38961 return true 38962 } 38963 // match: (EQ (CMPW x z:(NEG y)) yes no) 38964 // cond: z.Uses == 1 38965 // result: (EQ (CMNW x y) yes no) 38966 for { 38967 v := b.Control 38968 if v.Op != OpARM64CMPW { 38969 break 38970 } 38971 _ = v.Args[1] 38972 x := v.Args[0] 38973 z := v.Args[1] 38974 if z.Op != OpARM64NEG { 38975 break 38976 } 38977 y := z.Args[0] 38978 if !(z.Uses == 1) { 38979 break 38980 } 38981 b.Kind = BlockARM64EQ 38982 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 38983 v0.AddArg(x) 38984 v0.AddArg(y) 38985 b.SetControl(v0) 38986 b.Aux = nil 38987 return true 38988 } 38989 // match: (EQ (CMPconst [0] x) yes no) 38990 // cond: 38991 // result: (Z x yes no) 38992 for { 38993 v := b.Control 38994 if v.Op != OpARM64CMPconst { 38995 break 38996 } 38997 if v.AuxInt != 0 { 38998 break 38999 } 39000 x := v.Args[0] 39001 b.Kind = BlockARM64Z 39002 b.SetControl(x) 39003 b.Aux = nil 39004 return true 39005 } 39006 // match: (EQ (CMPWconst [0] x) yes no) 39007 // cond: 39008 // result: (ZW x yes no) 39009 for { 39010 v := b.Control 39011 if v.Op != OpARM64CMPWconst { 39012 break 39013 } 39014 if v.AuxInt != 0 { 39015 break 39016 } 39017 x := v.Args[0] 39018 b.Kind = BlockARM64ZW 39019 b.SetControl(x) 39020 b.Aux = nil 39021 return true 39022 } 39023 // match: (EQ (CMPconst [0] z:(MADD a x y)) yes no) 39024 // cond: z.Uses==1 39025 // result: (EQ (CMN a (MUL <x.Type> x y)) yes no) 39026 for { 39027 v := b.Control 39028 if v.Op != OpARM64CMPconst { 39029 break 39030 } 39031 if v.AuxInt != 0 { 39032 break 39033 } 39034 z := v.Args[0] 39035 if z.Op != OpARM64MADD { 39036 break 39037 } 39038 _ = z.Args[2] 39039 a := z.Args[0] 39040 x := z.Args[1] 39041 y := z.Args[2] 39042 if !(z.Uses == 1) { 39043 break 39044 } 39045 b.Kind = BlockARM64EQ 39046 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39047 v0.AddArg(a) 39048 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39049 v1.AddArg(x) 39050 v1.AddArg(y) 39051 v0.AddArg(v1) 39052 b.SetControl(v0) 39053 b.Aux = nil 39054 return true 39055 } 39056 // match: (EQ (CMPconst [0] z:(MSUB a x y)) yes no) 39057 // cond: z.Uses==1 39058 // result: (EQ (CMP a (MUL <x.Type> x y)) yes no) 39059 for { 39060 v := b.Control 39061 if v.Op != OpARM64CMPconst { 39062 break 39063 } 39064 if v.AuxInt != 0 { 39065 break 39066 } 39067 z := v.Args[0] 39068 if z.Op != OpARM64MSUB { 39069 break 39070 } 39071 _ = z.Args[2] 39072 a := z.Args[0] 39073 x := z.Args[1] 39074 y := z.Args[2] 39075 if !(z.Uses == 1) { 39076 break 39077 } 39078 b.Kind = BlockARM64EQ 39079 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 39080 v0.AddArg(a) 39081 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39082 v1.AddArg(x) 39083 v1.AddArg(y) 39084 v0.AddArg(v1) 39085 b.SetControl(v0) 39086 b.Aux = nil 39087 return true 39088 } 39089 // match: (EQ (CMPWconst [0] z:(MADDW a x y)) yes no) 39090 // cond: z.Uses==1 39091 // result: (EQ (CMNW a (MULW <x.Type> x y)) yes no) 39092 for { 39093 v := b.Control 39094 if v.Op != OpARM64CMPWconst { 39095 break 39096 } 39097 if v.AuxInt != 0 { 39098 break 39099 } 39100 z := v.Args[0] 39101 if z.Op != OpARM64MADDW { 39102 break 39103 } 39104 _ = z.Args[2] 39105 a := z.Args[0] 39106 x := z.Args[1] 39107 y := z.Args[2] 39108 if !(z.Uses == 1) { 39109 break 39110 } 39111 b.Kind = BlockARM64EQ 39112 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39113 v0.AddArg(a) 39114 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39115 v1.AddArg(x) 39116 v1.AddArg(y) 39117 v0.AddArg(v1) 39118 b.SetControl(v0) 39119 b.Aux = nil 39120 return true 39121 } 39122 // match: (EQ (CMPWconst [0] z:(MSUBW a x y)) yes no) 39123 // cond: z.Uses==1 39124 // result: (EQ (CMPW a (MULW <x.Type> x y)) yes no) 39125 for { 39126 v := b.Control 39127 if v.Op != OpARM64CMPWconst { 39128 break 39129 } 39130 if v.AuxInt != 0 { 39131 break 39132 } 39133 z := v.Args[0] 39134 if z.Op != OpARM64MSUBW { 39135 break 39136 } 39137 _ = z.Args[2] 39138 a := z.Args[0] 39139 x := z.Args[1] 39140 y := z.Args[2] 39141 if !(z.Uses == 1) { 39142 break 39143 } 39144 b.Kind = BlockARM64EQ 39145 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 39146 v0.AddArg(a) 39147 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39148 v1.AddArg(x) 39149 v1.AddArg(y) 39150 v0.AddArg(v1) 39151 b.SetControl(v0) 39152 b.Aux = nil 39153 return true 39154 } 39155 // match: (EQ (TSTconst [c] x) yes no) 39156 // cond: oneBit(c) 39157 // result: (TBZ {ntz(c)} x yes no) 39158 for { 39159 v := b.Control 39160 if v.Op != OpARM64TSTconst { 39161 break 39162 } 39163 c := v.AuxInt 39164 x := v.Args[0] 39165 if !(oneBit(c)) { 39166 break 39167 } 39168 b.Kind = BlockARM64TBZ 39169 b.SetControl(x) 39170 b.Aux = ntz(c) 39171 return true 39172 } 39173 // match: (EQ (TSTWconst [c] x) yes no) 39174 // cond: oneBit(int64(uint32(c))) 39175 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 39176 for { 39177 v := b.Control 39178 if v.Op != OpARM64TSTWconst { 39179 break 39180 } 39181 c := v.AuxInt 39182 x := v.Args[0] 39183 if !(oneBit(int64(uint32(c)))) { 39184 break 39185 } 39186 b.Kind = BlockARM64TBZ 39187 b.SetControl(x) 39188 b.Aux = ntz(int64(uint32(c))) 39189 return true 39190 } 39191 // match: (EQ (FlagEQ) yes no) 39192 // cond: 39193 // result: (First nil yes no) 39194 for { 39195 v := b.Control 39196 if v.Op != OpARM64FlagEQ { 39197 break 39198 } 39199 b.Kind = BlockFirst 39200 b.SetControl(nil) 39201 b.Aux = nil 39202 return true 39203 } 39204 // match: (EQ (FlagLT_ULT) yes no) 39205 // cond: 39206 // result: (First nil no yes) 39207 for { 39208 v := b.Control 39209 if v.Op != OpARM64FlagLT_ULT { 39210 break 39211 } 39212 b.Kind = BlockFirst 39213 b.SetControl(nil) 39214 b.Aux = nil 39215 b.swapSuccessors() 39216 return true 39217 } 39218 // match: (EQ (FlagLT_UGT) yes no) 39219 // cond: 39220 // result: (First nil no yes) 39221 for { 39222 v := b.Control 39223 if v.Op != OpARM64FlagLT_UGT { 39224 break 39225 } 39226 b.Kind = BlockFirst 39227 b.SetControl(nil) 39228 b.Aux = nil 39229 b.swapSuccessors() 39230 return true 39231 } 39232 // match: (EQ (FlagGT_ULT) yes no) 39233 // cond: 39234 // result: (First nil no yes) 39235 for { 39236 v := b.Control 39237 if v.Op != OpARM64FlagGT_ULT { 39238 break 39239 } 39240 b.Kind = BlockFirst 39241 b.SetControl(nil) 39242 b.Aux = nil 39243 b.swapSuccessors() 39244 return true 39245 } 39246 // match: (EQ (FlagGT_UGT) yes no) 39247 // cond: 39248 // result: (First nil no yes) 39249 for { 39250 v := b.Control 39251 if v.Op != OpARM64FlagGT_UGT { 39252 break 39253 } 39254 b.Kind = BlockFirst 39255 b.SetControl(nil) 39256 b.Aux = nil 39257 b.swapSuccessors() 39258 return true 39259 } 39260 // match: (EQ (InvertFlags cmp) yes no) 39261 // cond: 39262 // result: (EQ cmp yes no) 39263 for { 39264 v := b.Control 39265 if v.Op != OpARM64InvertFlags { 39266 break 39267 } 39268 cmp := v.Args[0] 39269 b.Kind = BlockARM64EQ 39270 b.SetControl(cmp) 39271 b.Aux = nil 39272 return true 39273 } 39274 case BlockARM64GE: 39275 // match: (GE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 39276 // cond: x.Uses == 1 39277 // result: (GE (TSTWconst [c] y) yes no) 39278 for { 39279 v := b.Control 39280 if v.Op != OpARM64CMPWconst { 39281 break 39282 } 39283 if v.AuxInt != 0 { 39284 break 39285 } 39286 x := v.Args[0] 39287 if x.Op != OpARM64ANDconst { 39288 break 39289 } 39290 c := x.AuxInt 39291 y := x.Args[0] 39292 if !(x.Uses == 1) { 39293 break 39294 } 39295 b.Kind = BlockARM64GE 39296 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 39297 v0.AuxInt = c 39298 v0.AddArg(y) 39299 b.SetControl(v0) 39300 b.Aux = nil 39301 return true 39302 } 39303 // match: (GE (CMPconst [0] z:(AND x y)) yes no) 39304 // cond: z.Uses == 1 39305 // result: (GE (TST x y) yes no) 39306 for { 39307 v := b.Control 39308 if v.Op != OpARM64CMPconst { 39309 break 39310 } 39311 if v.AuxInt != 0 { 39312 break 39313 } 39314 z := v.Args[0] 39315 if z.Op != OpARM64AND { 39316 break 39317 } 39318 _ = z.Args[1] 39319 x := z.Args[0] 39320 y := z.Args[1] 39321 if !(z.Uses == 1) { 39322 break 39323 } 39324 b.Kind = BlockARM64GE 39325 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 39326 v0.AddArg(x) 39327 v0.AddArg(y) 39328 b.SetControl(v0) 39329 b.Aux = nil 39330 return true 39331 } 39332 // match: (GE (CMPWconst [0] z:(AND x y)) yes no) 39333 // cond: z.Uses == 1 39334 // result: (GE (TSTW x y) yes no) 39335 for { 39336 v := b.Control 39337 if v.Op != OpARM64CMPWconst { 39338 break 39339 } 39340 if v.AuxInt != 0 { 39341 break 39342 } 39343 z := v.Args[0] 39344 if z.Op != OpARM64AND { 39345 break 39346 } 39347 _ = z.Args[1] 39348 x := z.Args[0] 39349 y := z.Args[1] 39350 if !(z.Uses == 1) { 39351 break 39352 } 39353 b.Kind = BlockARM64GE 39354 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 39355 v0.AddArg(x) 39356 v0.AddArg(y) 39357 b.SetControl(v0) 39358 b.Aux = nil 39359 return true 39360 } 39361 // match: (GE (CMPconst [0] x:(ANDconst [c] y)) yes no) 39362 // cond: x.Uses == 1 39363 // result: (GE (TSTconst [c] y) yes no) 39364 for { 39365 v := b.Control 39366 if v.Op != OpARM64CMPconst { 39367 break 39368 } 39369 if v.AuxInt != 0 { 39370 break 39371 } 39372 x := v.Args[0] 39373 if x.Op != OpARM64ANDconst { 39374 break 39375 } 39376 c := x.AuxInt 39377 y := x.Args[0] 39378 if !(x.Uses == 1) { 39379 break 39380 } 39381 b.Kind = BlockARM64GE 39382 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 39383 v0.AuxInt = c 39384 v0.AddArg(y) 39385 b.SetControl(v0) 39386 b.Aux = nil 39387 return true 39388 } 39389 // match: (GE (CMPconst [0] x:(ADDconst [c] y)) yes no) 39390 // cond: x.Uses == 1 39391 // result: (GE (CMNconst [c] y) yes no) 39392 for { 39393 v := b.Control 39394 if v.Op != OpARM64CMPconst { 39395 break 39396 } 39397 if v.AuxInt != 0 { 39398 break 39399 } 39400 x := v.Args[0] 39401 if x.Op != OpARM64ADDconst { 39402 break 39403 } 39404 c := x.AuxInt 39405 y := x.Args[0] 39406 if !(x.Uses == 1) { 39407 break 39408 } 39409 b.Kind = BlockARM64GE 39410 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 39411 v0.AuxInt = c 39412 v0.AddArg(y) 39413 b.SetControl(v0) 39414 b.Aux = nil 39415 return true 39416 } 39417 // match: (GE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 39418 // cond: x.Uses == 1 39419 // result: (GE (CMNWconst [c] y) yes no) 39420 for { 39421 v := b.Control 39422 if v.Op != OpARM64CMPWconst { 39423 break 39424 } 39425 if v.AuxInt != 0 { 39426 break 39427 } 39428 x := v.Args[0] 39429 if x.Op != OpARM64ADDconst { 39430 break 39431 } 39432 c := x.AuxInt 39433 y := x.Args[0] 39434 if !(x.Uses == 1) { 39435 break 39436 } 39437 b.Kind = BlockARM64GE 39438 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 39439 v0.AuxInt = c 39440 v0.AddArg(y) 39441 b.SetControl(v0) 39442 b.Aux = nil 39443 return true 39444 } 39445 // match: (GE (CMPconst [0] z:(ADD x y)) yes no) 39446 // cond: z.Uses == 1 39447 // result: (GE (CMN x y) yes no) 39448 for { 39449 v := b.Control 39450 if v.Op != OpARM64CMPconst { 39451 break 39452 } 39453 if v.AuxInt != 0 { 39454 break 39455 } 39456 z := v.Args[0] 39457 if z.Op != OpARM64ADD { 39458 break 39459 } 39460 _ = z.Args[1] 39461 x := z.Args[0] 39462 y := z.Args[1] 39463 if !(z.Uses == 1) { 39464 break 39465 } 39466 b.Kind = BlockARM64GE 39467 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39468 v0.AddArg(x) 39469 v0.AddArg(y) 39470 b.SetControl(v0) 39471 b.Aux = nil 39472 return true 39473 } 39474 // match: (GE (CMPWconst [0] z:(ADD x y)) yes no) 39475 // cond: z.Uses == 1 39476 // result: (GE (CMNW x y) yes no) 39477 for { 39478 v := b.Control 39479 if v.Op != OpARM64CMPWconst { 39480 break 39481 } 39482 if v.AuxInt != 0 { 39483 break 39484 } 39485 z := v.Args[0] 39486 if z.Op != OpARM64ADD { 39487 break 39488 } 39489 _ = z.Args[1] 39490 x := z.Args[0] 39491 y := z.Args[1] 39492 if !(z.Uses == 1) { 39493 break 39494 } 39495 b.Kind = BlockARM64GE 39496 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39497 v0.AddArg(x) 39498 v0.AddArg(y) 39499 b.SetControl(v0) 39500 b.Aux = nil 39501 return true 39502 } 39503 // match: (GE (CMP x z:(NEG y)) yes no) 39504 // cond: z.Uses == 1 39505 // result: (GE (CMN x y) yes no) 39506 for { 39507 v := b.Control 39508 if v.Op != OpARM64CMP { 39509 break 39510 } 39511 _ = v.Args[1] 39512 x := v.Args[0] 39513 z := v.Args[1] 39514 if z.Op != OpARM64NEG { 39515 break 39516 } 39517 y := z.Args[0] 39518 if !(z.Uses == 1) { 39519 break 39520 } 39521 b.Kind = BlockARM64GE 39522 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39523 v0.AddArg(x) 39524 v0.AddArg(y) 39525 b.SetControl(v0) 39526 b.Aux = nil 39527 return true 39528 } 39529 // match: (GE (CMPW x z:(NEG y)) yes no) 39530 // cond: z.Uses == 1 39531 // result: (GE (CMNW x y) yes no) 39532 for { 39533 v := b.Control 39534 if v.Op != OpARM64CMPW { 39535 break 39536 } 39537 _ = v.Args[1] 39538 x := v.Args[0] 39539 z := v.Args[1] 39540 if z.Op != OpARM64NEG { 39541 break 39542 } 39543 y := z.Args[0] 39544 if !(z.Uses == 1) { 39545 break 39546 } 39547 b.Kind = BlockARM64GE 39548 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39549 v0.AddArg(x) 39550 v0.AddArg(y) 39551 b.SetControl(v0) 39552 b.Aux = nil 39553 return true 39554 } 39555 // match: (GE (CMPconst [0] z:(MADD a x y)) yes no) 39556 // cond: z.Uses==1 39557 // result: (GE (CMN a (MUL <x.Type> x y)) yes no) 39558 for { 39559 v := b.Control 39560 if v.Op != OpARM64CMPconst { 39561 break 39562 } 39563 if v.AuxInt != 0 { 39564 break 39565 } 39566 z := v.Args[0] 39567 if z.Op != OpARM64MADD { 39568 break 39569 } 39570 _ = z.Args[2] 39571 a := z.Args[0] 39572 x := z.Args[1] 39573 y := z.Args[2] 39574 if !(z.Uses == 1) { 39575 break 39576 } 39577 b.Kind = BlockARM64GE 39578 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39579 v0.AddArg(a) 39580 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39581 v1.AddArg(x) 39582 v1.AddArg(y) 39583 v0.AddArg(v1) 39584 b.SetControl(v0) 39585 b.Aux = nil 39586 return true 39587 } 39588 // match: (GE (CMPconst [0] z:(MSUB a x y)) yes no) 39589 // cond: z.Uses==1 39590 // result: (GE (CMP a (MUL <x.Type> x y)) yes no) 39591 for { 39592 v := b.Control 39593 if v.Op != OpARM64CMPconst { 39594 break 39595 } 39596 if v.AuxInt != 0 { 39597 break 39598 } 39599 z := v.Args[0] 39600 if z.Op != OpARM64MSUB { 39601 break 39602 } 39603 _ = z.Args[2] 39604 a := z.Args[0] 39605 x := z.Args[1] 39606 y := z.Args[2] 39607 if !(z.Uses == 1) { 39608 break 39609 } 39610 b.Kind = BlockARM64GE 39611 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 39612 v0.AddArg(a) 39613 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39614 v1.AddArg(x) 39615 v1.AddArg(y) 39616 v0.AddArg(v1) 39617 b.SetControl(v0) 39618 b.Aux = nil 39619 return true 39620 } 39621 // match: (GE (CMPWconst [0] z:(MADDW a x y)) yes no) 39622 // cond: z.Uses==1 39623 // result: (GE (CMNW a (MULW <x.Type> x y)) yes no) 39624 for { 39625 v := b.Control 39626 if v.Op != OpARM64CMPWconst { 39627 break 39628 } 39629 if v.AuxInt != 0 { 39630 break 39631 } 39632 z := v.Args[0] 39633 if z.Op != OpARM64MADDW { 39634 break 39635 } 39636 _ = z.Args[2] 39637 a := z.Args[0] 39638 x := z.Args[1] 39639 y := z.Args[2] 39640 if !(z.Uses == 1) { 39641 break 39642 } 39643 b.Kind = BlockARM64GE 39644 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39645 v0.AddArg(a) 39646 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39647 v1.AddArg(x) 39648 v1.AddArg(y) 39649 v0.AddArg(v1) 39650 b.SetControl(v0) 39651 b.Aux = nil 39652 return true 39653 } 39654 // match: (GE (CMPWconst [0] z:(MSUBW a x y)) yes no) 39655 // cond: z.Uses==1 39656 // result: (GE (CMPW a (MULW <x.Type> x y)) yes no) 39657 for { 39658 v := b.Control 39659 if v.Op != OpARM64CMPWconst { 39660 break 39661 } 39662 if v.AuxInt != 0 { 39663 break 39664 } 39665 z := v.Args[0] 39666 if z.Op != OpARM64MSUBW { 39667 break 39668 } 39669 _ = z.Args[2] 39670 a := z.Args[0] 39671 x := z.Args[1] 39672 y := z.Args[2] 39673 if !(z.Uses == 1) { 39674 break 39675 } 39676 b.Kind = BlockARM64GE 39677 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 39678 v0.AddArg(a) 39679 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39680 v1.AddArg(x) 39681 v1.AddArg(y) 39682 v0.AddArg(v1) 39683 b.SetControl(v0) 39684 b.Aux = nil 39685 return true 39686 } 39687 // match: (GE (CMPWconst [0] x) yes no) 39688 // cond: 39689 // result: (TBZ {int64(31)} x yes no) 39690 for { 39691 v := b.Control 39692 if v.Op != OpARM64CMPWconst { 39693 break 39694 } 39695 if v.AuxInt != 0 { 39696 break 39697 } 39698 x := v.Args[0] 39699 b.Kind = BlockARM64TBZ 39700 b.SetControl(x) 39701 b.Aux = int64(31) 39702 return true 39703 } 39704 // match: (GE (CMPconst [0] x) yes no) 39705 // cond: 39706 // result: (TBZ {int64(63)} x yes no) 39707 for { 39708 v := b.Control 39709 if v.Op != OpARM64CMPconst { 39710 break 39711 } 39712 if v.AuxInt != 0 { 39713 break 39714 } 39715 x := v.Args[0] 39716 b.Kind = BlockARM64TBZ 39717 b.SetControl(x) 39718 b.Aux = int64(63) 39719 return true 39720 } 39721 // match: (GE (FlagEQ) yes no) 39722 // cond: 39723 // result: (First nil yes no) 39724 for { 39725 v := b.Control 39726 if v.Op != OpARM64FlagEQ { 39727 break 39728 } 39729 b.Kind = BlockFirst 39730 b.SetControl(nil) 39731 b.Aux = nil 39732 return true 39733 } 39734 // match: (GE (FlagLT_ULT) yes no) 39735 // cond: 39736 // result: (First nil no yes) 39737 for { 39738 v := b.Control 39739 if v.Op != OpARM64FlagLT_ULT { 39740 break 39741 } 39742 b.Kind = BlockFirst 39743 b.SetControl(nil) 39744 b.Aux = nil 39745 b.swapSuccessors() 39746 return true 39747 } 39748 // match: (GE (FlagLT_UGT) yes no) 39749 // cond: 39750 // result: (First nil no yes) 39751 for { 39752 v := b.Control 39753 if v.Op != OpARM64FlagLT_UGT { 39754 break 39755 } 39756 b.Kind = BlockFirst 39757 b.SetControl(nil) 39758 b.Aux = nil 39759 b.swapSuccessors() 39760 return true 39761 } 39762 // match: (GE (FlagGT_ULT) yes no) 39763 // cond: 39764 // result: (First nil yes no) 39765 for { 39766 v := b.Control 39767 if v.Op != OpARM64FlagGT_ULT { 39768 break 39769 } 39770 b.Kind = BlockFirst 39771 b.SetControl(nil) 39772 b.Aux = nil 39773 return true 39774 } 39775 // match: (GE (FlagGT_UGT) yes no) 39776 // cond: 39777 // result: (First nil yes no) 39778 for { 39779 v := b.Control 39780 if v.Op != OpARM64FlagGT_UGT { 39781 break 39782 } 39783 b.Kind = BlockFirst 39784 b.SetControl(nil) 39785 b.Aux = nil 39786 return true 39787 } 39788 // match: (GE (InvertFlags cmp) yes no) 39789 // cond: 39790 // result: (LE cmp yes no) 39791 for { 39792 v := b.Control 39793 if v.Op != OpARM64InvertFlags { 39794 break 39795 } 39796 cmp := v.Args[0] 39797 b.Kind = BlockARM64LE 39798 b.SetControl(cmp) 39799 b.Aux = nil 39800 return true 39801 } 39802 case BlockARM64GT: 39803 // match: (GT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 39804 // cond: x.Uses == 1 39805 // result: (GT (TSTWconst [c] y) yes no) 39806 for { 39807 v := b.Control 39808 if v.Op != OpARM64CMPWconst { 39809 break 39810 } 39811 if v.AuxInt != 0 { 39812 break 39813 } 39814 x := v.Args[0] 39815 if x.Op != OpARM64ANDconst { 39816 break 39817 } 39818 c := x.AuxInt 39819 y := x.Args[0] 39820 if !(x.Uses == 1) { 39821 break 39822 } 39823 b.Kind = BlockARM64GT 39824 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 39825 v0.AuxInt = c 39826 v0.AddArg(y) 39827 b.SetControl(v0) 39828 b.Aux = nil 39829 return true 39830 } 39831 // match: (GT (CMPconst [0] z:(AND x y)) yes no) 39832 // cond: z.Uses == 1 39833 // result: (GT (TST x y) yes no) 39834 for { 39835 v := b.Control 39836 if v.Op != OpARM64CMPconst { 39837 break 39838 } 39839 if v.AuxInt != 0 { 39840 break 39841 } 39842 z := v.Args[0] 39843 if z.Op != OpARM64AND { 39844 break 39845 } 39846 _ = z.Args[1] 39847 x := z.Args[0] 39848 y := z.Args[1] 39849 if !(z.Uses == 1) { 39850 break 39851 } 39852 b.Kind = BlockARM64GT 39853 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 39854 v0.AddArg(x) 39855 v0.AddArg(y) 39856 b.SetControl(v0) 39857 b.Aux = nil 39858 return true 39859 } 39860 // match: (GT (CMPWconst [0] z:(AND x y)) yes no) 39861 // cond: z.Uses == 1 39862 // result: (GT (TSTW x y) yes no) 39863 for { 39864 v := b.Control 39865 if v.Op != OpARM64CMPWconst { 39866 break 39867 } 39868 if v.AuxInt != 0 { 39869 break 39870 } 39871 z := v.Args[0] 39872 if z.Op != OpARM64AND { 39873 break 39874 } 39875 _ = z.Args[1] 39876 x := z.Args[0] 39877 y := z.Args[1] 39878 if !(z.Uses == 1) { 39879 break 39880 } 39881 b.Kind = BlockARM64GT 39882 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 39883 v0.AddArg(x) 39884 v0.AddArg(y) 39885 b.SetControl(v0) 39886 b.Aux = nil 39887 return true 39888 } 39889 // match: (GT (CMPconst [0] x:(ANDconst [c] y)) yes no) 39890 // cond: x.Uses == 1 39891 // result: (GT (TSTconst [c] y) yes no) 39892 for { 39893 v := b.Control 39894 if v.Op != OpARM64CMPconst { 39895 break 39896 } 39897 if v.AuxInt != 0 { 39898 break 39899 } 39900 x := v.Args[0] 39901 if x.Op != OpARM64ANDconst { 39902 break 39903 } 39904 c := x.AuxInt 39905 y := x.Args[0] 39906 if !(x.Uses == 1) { 39907 break 39908 } 39909 b.Kind = BlockARM64GT 39910 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 39911 v0.AuxInt = c 39912 v0.AddArg(y) 39913 b.SetControl(v0) 39914 b.Aux = nil 39915 return true 39916 } 39917 // match: (GT (CMPconst [0] x:(ADDconst [c] y)) yes no) 39918 // cond: x.Uses == 1 39919 // result: (GT (CMNconst [c] y) yes no) 39920 for { 39921 v := b.Control 39922 if v.Op != OpARM64CMPconst { 39923 break 39924 } 39925 if v.AuxInt != 0 { 39926 break 39927 } 39928 x := v.Args[0] 39929 if x.Op != OpARM64ADDconst { 39930 break 39931 } 39932 c := x.AuxInt 39933 y := x.Args[0] 39934 if !(x.Uses == 1) { 39935 break 39936 } 39937 b.Kind = BlockARM64GT 39938 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 39939 v0.AuxInt = c 39940 v0.AddArg(y) 39941 b.SetControl(v0) 39942 b.Aux = nil 39943 return true 39944 } 39945 // match: (GT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 39946 // cond: x.Uses == 1 39947 // result: (GT (CMNWconst [c] y) yes no) 39948 for { 39949 v := b.Control 39950 if v.Op != OpARM64CMPWconst { 39951 break 39952 } 39953 if v.AuxInt != 0 { 39954 break 39955 } 39956 x := v.Args[0] 39957 if x.Op != OpARM64ADDconst { 39958 break 39959 } 39960 c := x.AuxInt 39961 y := x.Args[0] 39962 if !(x.Uses == 1) { 39963 break 39964 } 39965 b.Kind = BlockARM64GT 39966 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 39967 v0.AuxInt = c 39968 v0.AddArg(y) 39969 b.SetControl(v0) 39970 b.Aux = nil 39971 return true 39972 } 39973 // match: (GT (CMPconst [0] z:(ADD x y)) yes no) 39974 // cond: z.Uses == 1 39975 // result: (GT (CMN x y) yes no) 39976 for { 39977 v := b.Control 39978 if v.Op != OpARM64CMPconst { 39979 break 39980 } 39981 if v.AuxInt != 0 { 39982 break 39983 } 39984 z := v.Args[0] 39985 if z.Op != OpARM64ADD { 39986 break 39987 } 39988 _ = z.Args[1] 39989 x := z.Args[0] 39990 y := z.Args[1] 39991 if !(z.Uses == 1) { 39992 break 39993 } 39994 b.Kind = BlockARM64GT 39995 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39996 v0.AddArg(x) 39997 v0.AddArg(y) 39998 b.SetControl(v0) 39999 b.Aux = nil 40000 return true 40001 } 40002 // match: (GT (CMPWconst [0] z:(ADD x y)) yes no) 40003 // cond: z.Uses == 1 40004 // result: (GT (CMNW x y) yes no) 40005 for { 40006 v := b.Control 40007 if v.Op != OpARM64CMPWconst { 40008 break 40009 } 40010 if v.AuxInt != 0 { 40011 break 40012 } 40013 z := v.Args[0] 40014 if z.Op != OpARM64ADD { 40015 break 40016 } 40017 _ = z.Args[1] 40018 x := z.Args[0] 40019 y := z.Args[1] 40020 if !(z.Uses == 1) { 40021 break 40022 } 40023 b.Kind = BlockARM64GT 40024 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40025 v0.AddArg(x) 40026 v0.AddArg(y) 40027 b.SetControl(v0) 40028 b.Aux = nil 40029 return true 40030 } 40031 // match: (GT (CMP x z:(NEG y)) yes no) 40032 // cond: z.Uses == 1 40033 // result: (GT (CMN x y) yes no) 40034 for { 40035 v := b.Control 40036 if v.Op != OpARM64CMP { 40037 break 40038 } 40039 _ = v.Args[1] 40040 x := v.Args[0] 40041 z := v.Args[1] 40042 if z.Op != OpARM64NEG { 40043 break 40044 } 40045 y := z.Args[0] 40046 if !(z.Uses == 1) { 40047 break 40048 } 40049 b.Kind = BlockARM64GT 40050 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40051 v0.AddArg(x) 40052 v0.AddArg(y) 40053 b.SetControl(v0) 40054 b.Aux = nil 40055 return true 40056 } 40057 // match: (GT (CMPW x z:(NEG y)) yes no) 40058 // cond: z.Uses == 1 40059 // result: (GT (CMNW x y) yes no) 40060 for { 40061 v := b.Control 40062 if v.Op != OpARM64CMPW { 40063 break 40064 } 40065 _ = v.Args[1] 40066 x := v.Args[0] 40067 z := v.Args[1] 40068 if z.Op != OpARM64NEG { 40069 break 40070 } 40071 y := z.Args[0] 40072 if !(z.Uses == 1) { 40073 break 40074 } 40075 b.Kind = BlockARM64GT 40076 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40077 v0.AddArg(x) 40078 v0.AddArg(y) 40079 b.SetControl(v0) 40080 b.Aux = nil 40081 return true 40082 } 40083 // match: (GT (CMPconst [0] z:(MADD a x y)) yes no) 40084 // cond: z.Uses==1 40085 // result: (GT (CMN a (MUL <x.Type> x y)) yes no) 40086 for { 40087 v := b.Control 40088 if v.Op != OpARM64CMPconst { 40089 break 40090 } 40091 if v.AuxInt != 0 { 40092 break 40093 } 40094 z := v.Args[0] 40095 if z.Op != OpARM64MADD { 40096 break 40097 } 40098 _ = z.Args[2] 40099 a := z.Args[0] 40100 x := z.Args[1] 40101 y := z.Args[2] 40102 if !(z.Uses == 1) { 40103 break 40104 } 40105 b.Kind = BlockARM64GT 40106 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40107 v0.AddArg(a) 40108 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40109 v1.AddArg(x) 40110 v1.AddArg(y) 40111 v0.AddArg(v1) 40112 b.SetControl(v0) 40113 b.Aux = nil 40114 return true 40115 } 40116 // match: (GT (CMPconst [0] z:(MSUB a x y)) yes no) 40117 // cond: z.Uses==1 40118 // result: (GT (CMP a (MUL <x.Type> x y)) yes no) 40119 for { 40120 v := b.Control 40121 if v.Op != OpARM64CMPconst { 40122 break 40123 } 40124 if v.AuxInt != 0 { 40125 break 40126 } 40127 z := v.Args[0] 40128 if z.Op != OpARM64MSUB { 40129 break 40130 } 40131 _ = z.Args[2] 40132 a := z.Args[0] 40133 x := z.Args[1] 40134 y := z.Args[2] 40135 if !(z.Uses == 1) { 40136 break 40137 } 40138 b.Kind = BlockARM64GT 40139 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 40140 v0.AddArg(a) 40141 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40142 v1.AddArg(x) 40143 v1.AddArg(y) 40144 v0.AddArg(v1) 40145 b.SetControl(v0) 40146 b.Aux = nil 40147 return true 40148 } 40149 // match: (GT (CMPWconst [0] z:(MADDW a x y)) yes no) 40150 // cond: z.Uses==1 40151 // result: (GT (CMNW a (MULW <x.Type> x y)) yes no) 40152 for { 40153 v := b.Control 40154 if v.Op != OpARM64CMPWconst { 40155 break 40156 } 40157 if v.AuxInt != 0 { 40158 break 40159 } 40160 z := v.Args[0] 40161 if z.Op != OpARM64MADDW { 40162 break 40163 } 40164 _ = z.Args[2] 40165 a := z.Args[0] 40166 x := z.Args[1] 40167 y := z.Args[2] 40168 if !(z.Uses == 1) { 40169 break 40170 } 40171 b.Kind = BlockARM64GT 40172 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40173 v0.AddArg(a) 40174 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40175 v1.AddArg(x) 40176 v1.AddArg(y) 40177 v0.AddArg(v1) 40178 b.SetControl(v0) 40179 b.Aux = nil 40180 return true 40181 } 40182 // match: (GT (CMPWconst [0] z:(MSUBW a x y)) yes no) 40183 // cond: z.Uses==1 40184 // result: (GT (CMPW a (MULW <x.Type> x y)) yes no) 40185 for { 40186 v := b.Control 40187 if v.Op != OpARM64CMPWconst { 40188 break 40189 } 40190 if v.AuxInt != 0 { 40191 break 40192 } 40193 z := v.Args[0] 40194 if z.Op != OpARM64MSUBW { 40195 break 40196 } 40197 _ = z.Args[2] 40198 a := z.Args[0] 40199 x := z.Args[1] 40200 y := z.Args[2] 40201 if !(z.Uses == 1) { 40202 break 40203 } 40204 b.Kind = BlockARM64GT 40205 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 40206 v0.AddArg(a) 40207 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40208 v1.AddArg(x) 40209 v1.AddArg(y) 40210 v0.AddArg(v1) 40211 b.SetControl(v0) 40212 b.Aux = nil 40213 return true 40214 } 40215 // match: (GT (FlagEQ) yes no) 40216 // cond: 40217 // result: (First nil no yes) 40218 for { 40219 v := b.Control 40220 if v.Op != OpARM64FlagEQ { 40221 break 40222 } 40223 b.Kind = BlockFirst 40224 b.SetControl(nil) 40225 b.Aux = nil 40226 b.swapSuccessors() 40227 return true 40228 } 40229 // match: (GT (FlagLT_ULT) yes no) 40230 // cond: 40231 // result: (First nil no yes) 40232 for { 40233 v := b.Control 40234 if v.Op != OpARM64FlagLT_ULT { 40235 break 40236 } 40237 b.Kind = BlockFirst 40238 b.SetControl(nil) 40239 b.Aux = nil 40240 b.swapSuccessors() 40241 return true 40242 } 40243 // match: (GT (FlagLT_UGT) yes no) 40244 // cond: 40245 // result: (First nil no yes) 40246 for { 40247 v := b.Control 40248 if v.Op != OpARM64FlagLT_UGT { 40249 break 40250 } 40251 b.Kind = BlockFirst 40252 b.SetControl(nil) 40253 b.Aux = nil 40254 b.swapSuccessors() 40255 return true 40256 } 40257 // match: (GT (FlagGT_ULT) yes no) 40258 // cond: 40259 // result: (First nil yes no) 40260 for { 40261 v := b.Control 40262 if v.Op != OpARM64FlagGT_ULT { 40263 break 40264 } 40265 b.Kind = BlockFirst 40266 b.SetControl(nil) 40267 b.Aux = nil 40268 return true 40269 } 40270 // match: (GT (FlagGT_UGT) yes no) 40271 // cond: 40272 // result: (First nil yes no) 40273 for { 40274 v := b.Control 40275 if v.Op != OpARM64FlagGT_UGT { 40276 break 40277 } 40278 b.Kind = BlockFirst 40279 b.SetControl(nil) 40280 b.Aux = nil 40281 return true 40282 } 40283 // match: (GT (InvertFlags cmp) yes no) 40284 // cond: 40285 // result: (LT cmp yes no) 40286 for { 40287 v := b.Control 40288 if v.Op != OpARM64InvertFlags { 40289 break 40290 } 40291 cmp := v.Args[0] 40292 b.Kind = BlockARM64LT 40293 b.SetControl(cmp) 40294 b.Aux = nil 40295 return true 40296 } 40297 case BlockIf: 40298 // match: (If (Equal cc) yes no) 40299 // cond: 40300 // result: (EQ cc yes no) 40301 for { 40302 v := b.Control 40303 if v.Op != OpARM64Equal { 40304 break 40305 } 40306 cc := v.Args[0] 40307 b.Kind = BlockARM64EQ 40308 b.SetControl(cc) 40309 b.Aux = nil 40310 return true 40311 } 40312 // match: (If (NotEqual cc) yes no) 40313 // cond: 40314 // result: (NE cc yes no) 40315 for { 40316 v := b.Control 40317 if v.Op != OpARM64NotEqual { 40318 break 40319 } 40320 cc := v.Args[0] 40321 b.Kind = BlockARM64NE 40322 b.SetControl(cc) 40323 b.Aux = nil 40324 return true 40325 } 40326 // match: (If (LessThan cc) yes no) 40327 // cond: 40328 // result: (LT cc yes no) 40329 for { 40330 v := b.Control 40331 if v.Op != OpARM64LessThan { 40332 break 40333 } 40334 cc := v.Args[0] 40335 b.Kind = BlockARM64LT 40336 b.SetControl(cc) 40337 b.Aux = nil 40338 return true 40339 } 40340 // match: (If (LessThanU cc) yes no) 40341 // cond: 40342 // result: (ULT cc yes no) 40343 for { 40344 v := b.Control 40345 if v.Op != OpARM64LessThanU { 40346 break 40347 } 40348 cc := v.Args[0] 40349 b.Kind = BlockARM64ULT 40350 b.SetControl(cc) 40351 b.Aux = nil 40352 return true 40353 } 40354 // match: (If (LessEqual cc) yes no) 40355 // cond: 40356 // result: (LE cc yes no) 40357 for { 40358 v := b.Control 40359 if v.Op != OpARM64LessEqual { 40360 break 40361 } 40362 cc := v.Args[0] 40363 b.Kind = BlockARM64LE 40364 b.SetControl(cc) 40365 b.Aux = nil 40366 return true 40367 } 40368 // match: (If (LessEqualU cc) yes no) 40369 // cond: 40370 // result: (ULE cc yes no) 40371 for { 40372 v := b.Control 40373 if v.Op != OpARM64LessEqualU { 40374 break 40375 } 40376 cc := v.Args[0] 40377 b.Kind = BlockARM64ULE 40378 b.SetControl(cc) 40379 b.Aux = nil 40380 return true 40381 } 40382 // match: (If (GreaterThan cc) yes no) 40383 // cond: 40384 // result: (GT cc yes no) 40385 for { 40386 v := b.Control 40387 if v.Op != OpARM64GreaterThan { 40388 break 40389 } 40390 cc := v.Args[0] 40391 b.Kind = BlockARM64GT 40392 b.SetControl(cc) 40393 b.Aux = nil 40394 return true 40395 } 40396 // match: (If (GreaterThanU cc) yes no) 40397 // cond: 40398 // result: (UGT cc yes no) 40399 for { 40400 v := b.Control 40401 if v.Op != OpARM64GreaterThanU { 40402 break 40403 } 40404 cc := v.Args[0] 40405 b.Kind = BlockARM64UGT 40406 b.SetControl(cc) 40407 b.Aux = nil 40408 return true 40409 } 40410 // match: (If (GreaterEqual cc) yes no) 40411 // cond: 40412 // result: (GE cc yes no) 40413 for { 40414 v := b.Control 40415 if v.Op != OpARM64GreaterEqual { 40416 break 40417 } 40418 cc := v.Args[0] 40419 b.Kind = BlockARM64GE 40420 b.SetControl(cc) 40421 b.Aux = nil 40422 return true 40423 } 40424 // match: (If (GreaterEqualU cc) yes no) 40425 // cond: 40426 // result: (UGE cc yes no) 40427 for { 40428 v := b.Control 40429 if v.Op != OpARM64GreaterEqualU { 40430 break 40431 } 40432 cc := v.Args[0] 40433 b.Kind = BlockARM64UGE 40434 b.SetControl(cc) 40435 b.Aux = nil 40436 return true 40437 } 40438 // match: (If cond yes no) 40439 // cond: 40440 // result: (NZ cond yes no) 40441 for { 40442 v := b.Control 40443 _ = v 40444 cond := b.Control 40445 b.Kind = BlockARM64NZ 40446 b.SetControl(cond) 40447 b.Aux = nil 40448 return true 40449 } 40450 case BlockARM64LE: 40451 // match: (LE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 40452 // cond: x.Uses == 1 40453 // result: (LE (TSTWconst [c] y) yes no) 40454 for { 40455 v := b.Control 40456 if v.Op != OpARM64CMPWconst { 40457 break 40458 } 40459 if v.AuxInt != 0 { 40460 break 40461 } 40462 x := v.Args[0] 40463 if x.Op != OpARM64ANDconst { 40464 break 40465 } 40466 c := x.AuxInt 40467 y := x.Args[0] 40468 if !(x.Uses == 1) { 40469 break 40470 } 40471 b.Kind = BlockARM64LE 40472 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 40473 v0.AuxInt = c 40474 v0.AddArg(y) 40475 b.SetControl(v0) 40476 b.Aux = nil 40477 return true 40478 } 40479 // match: (LE (CMPconst [0] z:(AND x y)) yes no) 40480 // cond: z.Uses == 1 40481 // result: (LE (TST x y) yes no) 40482 for { 40483 v := b.Control 40484 if v.Op != OpARM64CMPconst { 40485 break 40486 } 40487 if v.AuxInt != 0 { 40488 break 40489 } 40490 z := v.Args[0] 40491 if z.Op != OpARM64AND { 40492 break 40493 } 40494 _ = z.Args[1] 40495 x := z.Args[0] 40496 y := z.Args[1] 40497 if !(z.Uses == 1) { 40498 break 40499 } 40500 b.Kind = BlockARM64LE 40501 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 40502 v0.AddArg(x) 40503 v0.AddArg(y) 40504 b.SetControl(v0) 40505 b.Aux = nil 40506 return true 40507 } 40508 // match: (LE (CMPWconst [0] z:(AND x y)) yes no) 40509 // cond: z.Uses == 1 40510 // result: (LE (TSTW x y) yes no) 40511 for { 40512 v := b.Control 40513 if v.Op != OpARM64CMPWconst { 40514 break 40515 } 40516 if v.AuxInt != 0 { 40517 break 40518 } 40519 z := v.Args[0] 40520 if z.Op != OpARM64AND { 40521 break 40522 } 40523 _ = z.Args[1] 40524 x := z.Args[0] 40525 y := z.Args[1] 40526 if !(z.Uses == 1) { 40527 break 40528 } 40529 b.Kind = BlockARM64LE 40530 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 40531 v0.AddArg(x) 40532 v0.AddArg(y) 40533 b.SetControl(v0) 40534 b.Aux = nil 40535 return true 40536 } 40537 // match: (LE (CMPconst [0] x:(ANDconst [c] y)) yes no) 40538 // cond: x.Uses == 1 40539 // result: (LE (TSTconst [c] y) yes no) 40540 for { 40541 v := b.Control 40542 if v.Op != OpARM64CMPconst { 40543 break 40544 } 40545 if v.AuxInt != 0 { 40546 break 40547 } 40548 x := v.Args[0] 40549 if x.Op != OpARM64ANDconst { 40550 break 40551 } 40552 c := x.AuxInt 40553 y := x.Args[0] 40554 if !(x.Uses == 1) { 40555 break 40556 } 40557 b.Kind = BlockARM64LE 40558 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 40559 v0.AuxInt = c 40560 v0.AddArg(y) 40561 b.SetControl(v0) 40562 b.Aux = nil 40563 return true 40564 } 40565 // match: (LE (CMPconst [0] x:(ADDconst [c] y)) yes no) 40566 // cond: x.Uses == 1 40567 // result: (LE (CMNconst [c] y) yes no) 40568 for { 40569 v := b.Control 40570 if v.Op != OpARM64CMPconst { 40571 break 40572 } 40573 if v.AuxInt != 0 { 40574 break 40575 } 40576 x := v.Args[0] 40577 if x.Op != OpARM64ADDconst { 40578 break 40579 } 40580 c := x.AuxInt 40581 y := x.Args[0] 40582 if !(x.Uses == 1) { 40583 break 40584 } 40585 b.Kind = BlockARM64LE 40586 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 40587 v0.AuxInt = c 40588 v0.AddArg(y) 40589 b.SetControl(v0) 40590 b.Aux = nil 40591 return true 40592 } 40593 // match: (LE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 40594 // cond: x.Uses == 1 40595 // result: (LE (CMNWconst [c] y) yes no) 40596 for { 40597 v := b.Control 40598 if v.Op != OpARM64CMPWconst { 40599 break 40600 } 40601 if v.AuxInt != 0 { 40602 break 40603 } 40604 x := v.Args[0] 40605 if x.Op != OpARM64ADDconst { 40606 break 40607 } 40608 c := x.AuxInt 40609 y := x.Args[0] 40610 if !(x.Uses == 1) { 40611 break 40612 } 40613 b.Kind = BlockARM64LE 40614 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 40615 v0.AuxInt = c 40616 v0.AddArg(y) 40617 b.SetControl(v0) 40618 b.Aux = nil 40619 return true 40620 } 40621 // match: (LE (CMPconst [0] z:(ADD x y)) yes no) 40622 // cond: z.Uses == 1 40623 // result: (LE (CMN x y) yes no) 40624 for { 40625 v := b.Control 40626 if v.Op != OpARM64CMPconst { 40627 break 40628 } 40629 if v.AuxInt != 0 { 40630 break 40631 } 40632 z := v.Args[0] 40633 if z.Op != OpARM64ADD { 40634 break 40635 } 40636 _ = z.Args[1] 40637 x := z.Args[0] 40638 y := z.Args[1] 40639 if !(z.Uses == 1) { 40640 break 40641 } 40642 b.Kind = BlockARM64LE 40643 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40644 v0.AddArg(x) 40645 v0.AddArg(y) 40646 b.SetControl(v0) 40647 b.Aux = nil 40648 return true 40649 } 40650 // match: (LE (CMPWconst [0] z:(ADD x y)) yes no) 40651 // cond: z.Uses == 1 40652 // result: (LE (CMNW x y) yes no) 40653 for { 40654 v := b.Control 40655 if v.Op != OpARM64CMPWconst { 40656 break 40657 } 40658 if v.AuxInt != 0 { 40659 break 40660 } 40661 z := v.Args[0] 40662 if z.Op != OpARM64ADD { 40663 break 40664 } 40665 _ = z.Args[1] 40666 x := z.Args[0] 40667 y := z.Args[1] 40668 if !(z.Uses == 1) { 40669 break 40670 } 40671 b.Kind = BlockARM64LE 40672 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40673 v0.AddArg(x) 40674 v0.AddArg(y) 40675 b.SetControl(v0) 40676 b.Aux = nil 40677 return true 40678 } 40679 // match: (LE (CMP x z:(NEG y)) yes no) 40680 // cond: z.Uses == 1 40681 // result: (LE (CMN x y) yes no) 40682 for { 40683 v := b.Control 40684 if v.Op != OpARM64CMP { 40685 break 40686 } 40687 _ = v.Args[1] 40688 x := v.Args[0] 40689 z := v.Args[1] 40690 if z.Op != OpARM64NEG { 40691 break 40692 } 40693 y := z.Args[0] 40694 if !(z.Uses == 1) { 40695 break 40696 } 40697 b.Kind = BlockARM64LE 40698 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40699 v0.AddArg(x) 40700 v0.AddArg(y) 40701 b.SetControl(v0) 40702 b.Aux = nil 40703 return true 40704 } 40705 // match: (LE (CMPW x z:(NEG y)) yes no) 40706 // cond: z.Uses == 1 40707 // result: (LE (CMNW x y) yes no) 40708 for { 40709 v := b.Control 40710 if v.Op != OpARM64CMPW { 40711 break 40712 } 40713 _ = v.Args[1] 40714 x := v.Args[0] 40715 z := v.Args[1] 40716 if z.Op != OpARM64NEG { 40717 break 40718 } 40719 y := z.Args[0] 40720 if !(z.Uses == 1) { 40721 break 40722 } 40723 b.Kind = BlockARM64LE 40724 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40725 v0.AddArg(x) 40726 v0.AddArg(y) 40727 b.SetControl(v0) 40728 b.Aux = nil 40729 return true 40730 } 40731 // match: (LE (CMPconst [0] z:(MADD a x y)) yes no) 40732 // cond: z.Uses==1 40733 // result: (LE (CMN a (MUL <x.Type> x y)) yes no) 40734 for { 40735 v := b.Control 40736 if v.Op != OpARM64CMPconst { 40737 break 40738 } 40739 if v.AuxInt != 0 { 40740 break 40741 } 40742 z := v.Args[0] 40743 if z.Op != OpARM64MADD { 40744 break 40745 } 40746 _ = z.Args[2] 40747 a := z.Args[0] 40748 x := z.Args[1] 40749 y := z.Args[2] 40750 if !(z.Uses == 1) { 40751 break 40752 } 40753 b.Kind = BlockARM64LE 40754 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40755 v0.AddArg(a) 40756 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40757 v1.AddArg(x) 40758 v1.AddArg(y) 40759 v0.AddArg(v1) 40760 b.SetControl(v0) 40761 b.Aux = nil 40762 return true 40763 } 40764 // match: (LE (CMPconst [0] z:(MSUB a x y)) yes no) 40765 // cond: z.Uses==1 40766 // result: (LE (CMP a (MUL <x.Type> x y)) yes no) 40767 for { 40768 v := b.Control 40769 if v.Op != OpARM64CMPconst { 40770 break 40771 } 40772 if v.AuxInt != 0 { 40773 break 40774 } 40775 z := v.Args[0] 40776 if z.Op != OpARM64MSUB { 40777 break 40778 } 40779 _ = z.Args[2] 40780 a := z.Args[0] 40781 x := z.Args[1] 40782 y := z.Args[2] 40783 if !(z.Uses == 1) { 40784 break 40785 } 40786 b.Kind = BlockARM64LE 40787 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 40788 v0.AddArg(a) 40789 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40790 v1.AddArg(x) 40791 v1.AddArg(y) 40792 v0.AddArg(v1) 40793 b.SetControl(v0) 40794 b.Aux = nil 40795 return true 40796 } 40797 // match: (LE (CMPWconst [0] z:(MADDW a x y)) yes no) 40798 // cond: z.Uses==1 40799 // result: (LE (CMNW a (MULW <x.Type> x y)) yes no) 40800 for { 40801 v := b.Control 40802 if v.Op != OpARM64CMPWconst { 40803 break 40804 } 40805 if v.AuxInt != 0 { 40806 break 40807 } 40808 z := v.Args[0] 40809 if z.Op != OpARM64MADDW { 40810 break 40811 } 40812 _ = z.Args[2] 40813 a := z.Args[0] 40814 x := z.Args[1] 40815 y := z.Args[2] 40816 if !(z.Uses == 1) { 40817 break 40818 } 40819 b.Kind = BlockARM64LE 40820 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40821 v0.AddArg(a) 40822 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40823 v1.AddArg(x) 40824 v1.AddArg(y) 40825 v0.AddArg(v1) 40826 b.SetControl(v0) 40827 b.Aux = nil 40828 return true 40829 } 40830 // match: (LE (CMPWconst [0] z:(MSUBW a x y)) yes no) 40831 // cond: z.Uses==1 40832 // result: (LE (CMPW a (MULW <x.Type> x y)) yes no) 40833 for { 40834 v := b.Control 40835 if v.Op != OpARM64CMPWconst { 40836 break 40837 } 40838 if v.AuxInt != 0 { 40839 break 40840 } 40841 z := v.Args[0] 40842 if z.Op != OpARM64MSUBW { 40843 break 40844 } 40845 _ = z.Args[2] 40846 a := z.Args[0] 40847 x := z.Args[1] 40848 y := z.Args[2] 40849 if !(z.Uses == 1) { 40850 break 40851 } 40852 b.Kind = BlockARM64LE 40853 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 40854 v0.AddArg(a) 40855 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40856 v1.AddArg(x) 40857 v1.AddArg(y) 40858 v0.AddArg(v1) 40859 b.SetControl(v0) 40860 b.Aux = nil 40861 return true 40862 } 40863 // match: (LE (FlagEQ) yes no) 40864 // cond: 40865 // result: (First nil yes no) 40866 for { 40867 v := b.Control 40868 if v.Op != OpARM64FlagEQ { 40869 break 40870 } 40871 b.Kind = BlockFirst 40872 b.SetControl(nil) 40873 b.Aux = nil 40874 return true 40875 } 40876 // match: (LE (FlagLT_ULT) yes no) 40877 // cond: 40878 // result: (First nil yes no) 40879 for { 40880 v := b.Control 40881 if v.Op != OpARM64FlagLT_ULT { 40882 break 40883 } 40884 b.Kind = BlockFirst 40885 b.SetControl(nil) 40886 b.Aux = nil 40887 return true 40888 } 40889 // match: (LE (FlagLT_UGT) yes no) 40890 // cond: 40891 // result: (First nil yes no) 40892 for { 40893 v := b.Control 40894 if v.Op != OpARM64FlagLT_UGT { 40895 break 40896 } 40897 b.Kind = BlockFirst 40898 b.SetControl(nil) 40899 b.Aux = nil 40900 return true 40901 } 40902 // match: (LE (FlagGT_ULT) yes no) 40903 // cond: 40904 // result: (First nil no yes) 40905 for { 40906 v := b.Control 40907 if v.Op != OpARM64FlagGT_ULT { 40908 break 40909 } 40910 b.Kind = BlockFirst 40911 b.SetControl(nil) 40912 b.Aux = nil 40913 b.swapSuccessors() 40914 return true 40915 } 40916 // match: (LE (FlagGT_UGT) yes no) 40917 // cond: 40918 // result: (First nil no yes) 40919 for { 40920 v := b.Control 40921 if v.Op != OpARM64FlagGT_UGT { 40922 break 40923 } 40924 b.Kind = BlockFirst 40925 b.SetControl(nil) 40926 b.Aux = nil 40927 b.swapSuccessors() 40928 return true 40929 } 40930 // match: (LE (InvertFlags cmp) yes no) 40931 // cond: 40932 // result: (GE cmp yes no) 40933 for { 40934 v := b.Control 40935 if v.Op != OpARM64InvertFlags { 40936 break 40937 } 40938 cmp := v.Args[0] 40939 b.Kind = BlockARM64GE 40940 b.SetControl(cmp) 40941 b.Aux = nil 40942 return true 40943 } 40944 case BlockARM64LT: 40945 // match: (LT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 40946 // cond: x.Uses == 1 40947 // result: (LT (TSTWconst [c] y) yes no) 40948 for { 40949 v := b.Control 40950 if v.Op != OpARM64CMPWconst { 40951 break 40952 } 40953 if v.AuxInt != 0 { 40954 break 40955 } 40956 x := v.Args[0] 40957 if x.Op != OpARM64ANDconst { 40958 break 40959 } 40960 c := x.AuxInt 40961 y := x.Args[0] 40962 if !(x.Uses == 1) { 40963 break 40964 } 40965 b.Kind = BlockARM64LT 40966 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 40967 v0.AuxInt = c 40968 v0.AddArg(y) 40969 b.SetControl(v0) 40970 b.Aux = nil 40971 return true 40972 } 40973 // match: (LT (CMPconst [0] z:(AND x y)) yes no) 40974 // cond: z.Uses == 1 40975 // result: (LT (TST x y) yes no) 40976 for { 40977 v := b.Control 40978 if v.Op != OpARM64CMPconst { 40979 break 40980 } 40981 if v.AuxInt != 0 { 40982 break 40983 } 40984 z := v.Args[0] 40985 if z.Op != OpARM64AND { 40986 break 40987 } 40988 _ = z.Args[1] 40989 x := z.Args[0] 40990 y := z.Args[1] 40991 if !(z.Uses == 1) { 40992 break 40993 } 40994 b.Kind = BlockARM64LT 40995 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 40996 v0.AddArg(x) 40997 v0.AddArg(y) 40998 b.SetControl(v0) 40999 b.Aux = nil 41000 return true 41001 } 41002 // match: (LT (CMPWconst [0] z:(AND x y)) yes no) 41003 // cond: z.Uses == 1 41004 // result: (LT (TSTW x y) yes no) 41005 for { 41006 v := b.Control 41007 if v.Op != OpARM64CMPWconst { 41008 break 41009 } 41010 if v.AuxInt != 0 { 41011 break 41012 } 41013 z := v.Args[0] 41014 if z.Op != OpARM64AND { 41015 break 41016 } 41017 _ = z.Args[1] 41018 x := z.Args[0] 41019 y := z.Args[1] 41020 if !(z.Uses == 1) { 41021 break 41022 } 41023 b.Kind = BlockARM64LT 41024 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 41025 v0.AddArg(x) 41026 v0.AddArg(y) 41027 b.SetControl(v0) 41028 b.Aux = nil 41029 return true 41030 } 41031 // match: (LT (CMPconst [0] x:(ANDconst [c] y)) yes no) 41032 // cond: x.Uses == 1 41033 // result: (LT (TSTconst [c] y) yes no) 41034 for { 41035 v := b.Control 41036 if v.Op != OpARM64CMPconst { 41037 break 41038 } 41039 if v.AuxInt != 0 { 41040 break 41041 } 41042 x := v.Args[0] 41043 if x.Op != OpARM64ANDconst { 41044 break 41045 } 41046 c := x.AuxInt 41047 y := x.Args[0] 41048 if !(x.Uses == 1) { 41049 break 41050 } 41051 b.Kind = BlockARM64LT 41052 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 41053 v0.AuxInt = c 41054 v0.AddArg(y) 41055 b.SetControl(v0) 41056 b.Aux = nil 41057 return true 41058 } 41059 // match: (LT (CMPconst [0] x:(ADDconst [c] y)) yes no) 41060 // cond: x.Uses == 1 41061 // result: (LT (CMNconst [c] y) yes no) 41062 for { 41063 v := b.Control 41064 if v.Op != OpARM64CMPconst { 41065 break 41066 } 41067 if v.AuxInt != 0 { 41068 break 41069 } 41070 x := v.Args[0] 41071 if x.Op != OpARM64ADDconst { 41072 break 41073 } 41074 c := x.AuxInt 41075 y := x.Args[0] 41076 if !(x.Uses == 1) { 41077 break 41078 } 41079 b.Kind = BlockARM64LT 41080 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 41081 v0.AuxInt = c 41082 v0.AddArg(y) 41083 b.SetControl(v0) 41084 b.Aux = nil 41085 return true 41086 } 41087 // match: (LT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 41088 // cond: x.Uses == 1 41089 // result: (LT (CMNWconst [c] y) yes no) 41090 for { 41091 v := b.Control 41092 if v.Op != OpARM64CMPWconst { 41093 break 41094 } 41095 if v.AuxInt != 0 { 41096 break 41097 } 41098 x := v.Args[0] 41099 if x.Op != OpARM64ADDconst { 41100 break 41101 } 41102 c := x.AuxInt 41103 y := x.Args[0] 41104 if !(x.Uses == 1) { 41105 break 41106 } 41107 b.Kind = BlockARM64LT 41108 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 41109 v0.AuxInt = c 41110 v0.AddArg(y) 41111 b.SetControl(v0) 41112 b.Aux = nil 41113 return true 41114 } 41115 // match: (LT (CMPconst [0] z:(ADD x y)) yes no) 41116 // cond: z.Uses == 1 41117 // result: (LT (CMN x y) yes no) 41118 for { 41119 v := b.Control 41120 if v.Op != OpARM64CMPconst { 41121 break 41122 } 41123 if v.AuxInt != 0 { 41124 break 41125 } 41126 z := v.Args[0] 41127 if z.Op != OpARM64ADD { 41128 break 41129 } 41130 _ = z.Args[1] 41131 x := z.Args[0] 41132 y := z.Args[1] 41133 if !(z.Uses == 1) { 41134 break 41135 } 41136 b.Kind = BlockARM64LT 41137 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41138 v0.AddArg(x) 41139 v0.AddArg(y) 41140 b.SetControl(v0) 41141 b.Aux = nil 41142 return true 41143 } 41144 // match: (LT (CMPWconst [0] z:(ADD x y)) yes no) 41145 // cond: z.Uses == 1 41146 // result: (LT (CMNW x y) yes no) 41147 for { 41148 v := b.Control 41149 if v.Op != OpARM64CMPWconst { 41150 break 41151 } 41152 if v.AuxInt != 0 { 41153 break 41154 } 41155 z := v.Args[0] 41156 if z.Op != OpARM64ADD { 41157 break 41158 } 41159 _ = z.Args[1] 41160 x := z.Args[0] 41161 y := z.Args[1] 41162 if !(z.Uses == 1) { 41163 break 41164 } 41165 b.Kind = BlockARM64LT 41166 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41167 v0.AddArg(x) 41168 v0.AddArg(y) 41169 b.SetControl(v0) 41170 b.Aux = nil 41171 return true 41172 } 41173 // match: (LT (CMP x z:(NEG y)) yes no) 41174 // cond: z.Uses == 1 41175 // result: (LT (CMN x y) yes no) 41176 for { 41177 v := b.Control 41178 if v.Op != OpARM64CMP { 41179 break 41180 } 41181 _ = v.Args[1] 41182 x := v.Args[0] 41183 z := v.Args[1] 41184 if z.Op != OpARM64NEG { 41185 break 41186 } 41187 y := z.Args[0] 41188 if !(z.Uses == 1) { 41189 break 41190 } 41191 b.Kind = BlockARM64LT 41192 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41193 v0.AddArg(x) 41194 v0.AddArg(y) 41195 b.SetControl(v0) 41196 b.Aux = nil 41197 return true 41198 } 41199 // match: (LT (CMPW x z:(NEG y)) yes no) 41200 // cond: z.Uses == 1 41201 // result: (LT (CMNW x y) yes no) 41202 for { 41203 v := b.Control 41204 if v.Op != OpARM64CMPW { 41205 break 41206 } 41207 _ = v.Args[1] 41208 x := v.Args[0] 41209 z := v.Args[1] 41210 if z.Op != OpARM64NEG { 41211 break 41212 } 41213 y := z.Args[0] 41214 if !(z.Uses == 1) { 41215 break 41216 } 41217 b.Kind = BlockARM64LT 41218 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41219 v0.AddArg(x) 41220 v0.AddArg(y) 41221 b.SetControl(v0) 41222 b.Aux = nil 41223 return true 41224 } 41225 // match: (LT (CMPconst [0] z:(MADD a x y)) yes no) 41226 // cond: z.Uses==1 41227 // result: (LT (CMN a (MUL <x.Type> x y)) yes no) 41228 for { 41229 v := b.Control 41230 if v.Op != OpARM64CMPconst { 41231 break 41232 } 41233 if v.AuxInt != 0 { 41234 break 41235 } 41236 z := v.Args[0] 41237 if z.Op != OpARM64MADD { 41238 break 41239 } 41240 _ = z.Args[2] 41241 a := z.Args[0] 41242 x := z.Args[1] 41243 y := z.Args[2] 41244 if !(z.Uses == 1) { 41245 break 41246 } 41247 b.Kind = BlockARM64LT 41248 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41249 v0.AddArg(a) 41250 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41251 v1.AddArg(x) 41252 v1.AddArg(y) 41253 v0.AddArg(v1) 41254 b.SetControl(v0) 41255 b.Aux = nil 41256 return true 41257 } 41258 // match: (LT (CMPconst [0] z:(MSUB a x y)) yes no) 41259 // cond: z.Uses==1 41260 // result: (LT (CMP a (MUL <x.Type> x y)) yes no) 41261 for { 41262 v := b.Control 41263 if v.Op != OpARM64CMPconst { 41264 break 41265 } 41266 if v.AuxInt != 0 { 41267 break 41268 } 41269 z := v.Args[0] 41270 if z.Op != OpARM64MSUB { 41271 break 41272 } 41273 _ = z.Args[2] 41274 a := z.Args[0] 41275 x := z.Args[1] 41276 y := z.Args[2] 41277 if !(z.Uses == 1) { 41278 break 41279 } 41280 b.Kind = BlockARM64LT 41281 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 41282 v0.AddArg(a) 41283 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41284 v1.AddArg(x) 41285 v1.AddArg(y) 41286 v0.AddArg(v1) 41287 b.SetControl(v0) 41288 b.Aux = nil 41289 return true 41290 } 41291 // match: (LT (CMPWconst [0] z:(MADDW a x y)) yes no) 41292 // cond: z.Uses==1 41293 // result: (LT (CMNW a (MULW <x.Type> x y)) yes no) 41294 for { 41295 v := b.Control 41296 if v.Op != OpARM64CMPWconst { 41297 break 41298 } 41299 if v.AuxInt != 0 { 41300 break 41301 } 41302 z := v.Args[0] 41303 if z.Op != OpARM64MADDW { 41304 break 41305 } 41306 _ = z.Args[2] 41307 a := z.Args[0] 41308 x := z.Args[1] 41309 y := z.Args[2] 41310 if !(z.Uses == 1) { 41311 break 41312 } 41313 b.Kind = BlockARM64LT 41314 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41315 v0.AddArg(a) 41316 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41317 v1.AddArg(x) 41318 v1.AddArg(y) 41319 v0.AddArg(v1) 41320 b.SetControl(v0) 41321 b.Aux = nil 41322 return true 41323 } 41324 // match: (LT (CMPWconst [0] z:(MSUBW a x y)) yes no) 41325 // cond: z.Uses==1 41326 // result: (LT (CMPW a (MULW <x.Type> x y)) yes no) 41327 for { 41328 v := b.Control 41329 if v.Op != OpARM64CMPWconst { 41330 break 41331 } 41332 if v.AuxInt != 0 { 41333 break 41334 } 41335 z := v.Args[0] 41336 if z.Op != OpARM64MSUBW { 41337 break 41338 } 41339 _ = z.Args[2] 41340 a := z.Args[0] 41341 x := z.Args[1] 41342 y := z.Args[2] 41343 if !(z.Uses == 1) { 41344 break 41345 } 41346 b.Kind = BlockARM64LT 41347 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 41348 v0.AddArg(a) 41349 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41350 v1.AddArg(x) 41351 v1.AddArg(y) 41352 v0.AddArg(v1) 41353 b.SetControl(v0) 41354 b.Aux = nil 41355 return true 41356 } 41357 // match: (LT (CMPWconst [0] x) yes no) 41358 // cond: 41359 // result: (TBNZ {int64(31)} x yes no) 41360 for { 41361 v := b.Control 41362 if v.Op != OpARM64CMPWconst { 41363 break 41364 } 41365 if v.AuxInt != 0 { 41366 break 41367 } 41368 x := v.Args[0] 41369 b.Kind = BlockARM64TBNZ 41370 b.SetControl(x) 41371 b.Aux = int64(31) 41372 return true 41373 } 41374 // match: (LT (CMPconst [0] x) yes no) 41375 // cond: 41376 // result: (TBNZ {int64(63)} x yes no) 41377 for { 41378 v := b.Control 41379 if v.Op != OpARM64CMPconst { 41380 break 41381 } 41382 if v.AuxInt != 0 { 41383 break 41384 } 41385 x := v.Args[0] 41386 b.Kind = BlockARM64TBNZ 41387 b.SetControl(x) 41388 b.Aux = int64(63) 41389 return true 41390 } 41391 // match: (LT (FlagEQ) yes no) 41392 // cond: 41393 // result: (First nil no yes) 41394 for { 41395 v := b.Control 41396 if v.Op != OpARM64FlagEQ { 41397 break 41398 } 41399 b.Kind = BlockFirst 41400 b.SetControl(nil) 41401 b.Aux = nil 41402 b.swapSuccessors() 41403 return true 41404 } 41405 // match: (LT (FlagLT_ULT) yes no) 41406 // cond: 41407 // result: (First nil yes no) 41408 for { 41409 v := b.Control 41410 if v.Op != OpARM64FlagLT_ULT { 41411 break 41412 } 41413 b.Kind = BlockFirst 41414 b.SetControl(nil) 41415 b.Aux = nil 41416 return true 41417 } 41418 // match: (LT (FlagLT_UGT) yes no) 41419 // cond: 41420 // result: (First nil yes no) 41421 for { 41422 v := b.Control 41423 if v.Op != OpARM64FlagLT_UGT { 41424 break 41425 } 41426 b.Kind = BlockFirst 41427 b.SetControl(nil) 41428 b.Aux = nil 41429 return true 41430 } 41431 // match: (LT (FlagGT_ULT) yes no) 41432 // cond: 41433 // result: (First nil no yes) 41434 for { 41435 v := b.Control 41436 if v.Op != OpARM64FlagGT_ULT { 41437 break 41438 } 41439 b.Kind = BlockFirst 41440 b.SetControl(nil) 41441 b.Aux = nil 41442 b.swapSuccessors() 41443 return true 41444 } 41445 // match: (LT (FlagGT_UGT) yes no) 41446 // cond: 41447 // result: (First nil no yes) 41448 for { 41449 v := b.Control 41450 if v.Op != OpARM64FlagGT_UGT { 41451 break 41452 } 41453 b.Kind = BlockFirst 41454 b.SetControl(nil) 41455 b.Aux = nil 41456 b.swapSuccessors() 41457 return true 41458 } 41459 // match: (LT (InvertFlags cmp) yes no) 41460 // cond: 41461 // result: (GT cmp yes no) 41462 for { 41463 v := b.Control 41464 if v.Op != OpARM64InvertFlags { 41465 break 41466 } 41467 cmp := v.Args[0] 41468 b.Kind = BlockARM64GT 41469 b.SetControl(cmp) 41470 b.Aux = nil 41471 return true 41472 } 41473 case BlockARM64NE: 41474 // match: (NE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 41475 // cond: x.Uses == 1 41476 // result: (NE (TSTWconst [c] y) yes no) 41477 for { 41478 v := b.Control 41479 if v.Op != OpARM64CMPWconst { 41480 break 41481 } 41482 if v.AuxInt != 0 { 41483 break 41484 } 41485 x := v.Args[0] 41486 if x.Op != OpARM64ANDconst { 41487 break 41488 } 41489 c := x.AuxInt 41490 y := x.Args[0] 41491 if !(x.Uses == 1) { 41492 break 41493 } 41494 b.Kind = BlockARM64NE 41495 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 41496 v0.AuxInt = c 41497 v0.AddArg(y) 41498 b.SetControl(v0) 41499 b.Aux = nil 41500 return true 41501 } 41502 // match: (NE (CMPconst [0] z:(AND x y)) yes no) 41503 // cond: z.Uses == 1 41504 // result: (NE (TST x y) yes no) 41505 for { 41506 v := b.Control 41507 if v.Op != OpARM64CMPconst { 41508 break 41509 } 41510 if v.AuxInt != 0 { 41511 break 41512 } 41513 z := v.Args[0] 41514 if z.Op != OpARM64AND { 41515 break 41516 } 41517 _ = z.Args[1] 41518 x := z.Args[0] 41519 y := z.Args[1] 41520 if !(z.Uses == 1) { 41521 break 41522 } 41523 b.Kind = BlockARM64NE 41524 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 41525 v0.AddArg(x) 41526 v0.AddArg(y) 41527 b.SetControl(v0) 41528 b.Aux = nil 41529 return true 41530 } 41531 // match: (NE (CMPWconst [0] z:(AND x y)) yes no) 41532 // cond: z.Uses == 1 41533 // result: (NE (TSTW x y) yes no) 41534 for { 41535 v := b.Control 41536 if v.Op != OpARM64CMPWconst { 41537 break 41538 } 41539 if v.AuxInt != 0 { 41540 break 41541 } 41542 z := v.Args[0] 41543 if z.Op != OpARM64AND { 41544 break 41545 } 41546 _ = z.Args[1] 41547 x := z.Args[0] 41548 y := z.Args[1] 41549 if !(z.Uses == 1) { 41550 break 41551 } 41552 b.Kind = BlockARM64NE 41553 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 41554 v0.AddArg(x) 41555 v0.AddArg(y) 41556 b.SetControl(v0) 41557 b.Aux = nil 41558 return true 41559 } 41560 // match: (NE (CMPconst [0] x:(ANDconst [c] y)) yes no) 41561 // cond: x.Uses == 1 41562 // result: (NE (TSTconst [c] y) yes no) 41563 for { 41564 v := b.Control 41565 if v.Op != OpARM64CMPconst { 41566 break 41567 } 41568 if v.AuxInt != 0 { 41569 break 41570 } 41571 x := v.Args[0] 41572 if x.Op != OpARM64ANDconst { 41573 break 41574 } 41575 c := x.AuxInt 41576 y := x.Args[0] 41577 if !(x.Uses == 1) { 41578 break 41579 } 41580 b.Kind = BlockARM64NE 41581 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 41582 v0.AuxInt = c 41583 v0.AddArg(y) 41584 b.SetControl(v0) 41585 b.Aux = nil 41586 return true 41587 } 41588 // match: (NE (CMPconst [0] x:(ADDconst [c] y)) yes no) 41589 // cond: x.Uses == 1 41590 // result: (NE (CMNconst [c] y) yes no) 41591 for { 41592 v := b.Control 41593 if v.Op != OpARM64CMPconst { 41594 break 41595 } 41596 if v.AuxInt != 0 { 41597 break 41598 } 41599 x := v.Args[0] 41600 if x.Op != OpARM64ADDconst { 41601 break 41602 } 41603 c := x.AuxInt 41604 y := x.Args[0] 41605 if !(x.Uses == 1) { 41606 break 41607 } 41608 b.Kind = BlockARM64NE 41609 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 41610 v0.AuxInt = c 41611 v0.AddArg(y) 41612 b.SetControl(v0) 41613 b.Aux = nil 41614 return true 41615 } 41616 // match: (NE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 41617 // cond: x.Uses == 1 41618 // result: (NE (CMNWconst [c] y) yes no) 41619 for { 41620 v := b.Control 41621 if v.Op != OpARM64CMPWconst { 41622 break 41623 } 41624 if v.AuxInt != 0 { 41625 break 41626 } 41627 x := v.Args[0] 41628 if x.Op != OpARM64ADDconst { 41629 break 41630 } 41631 c := x.AuxInt 41632 y := x.Args[0] 41633 if !(x.Uses == 1) { 41634 break 41635 } 41636 b.Kind = BlockARM64NE 41637 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 41638 v0.AuxInt = c 41639 v0.AddArg(y) 41640 b.SetControl(v0) 41641 b.Aux = nil 41642 return true 41643 } 41644 // match: (NE (CMPconst [0] z:(ADD x y)) yes no) 41645 // cond: z.Uses == 1 41646 // result: (NE (CMN x y) yes no) 41647 for { 41648 v := b.Control 41649 if v.Op != OpARM64CMPconst { 41650 break 41651 } 41652 if v.AuxInt != 0 { 41653 break 41654 } 41655 z := v.Args[0] 41656 if z.Op != OpARM64ADD { 41657 break 41658 } 41659 _ = z.Args[1] 41660 x := z.Args[0] 41661 y := z.Args[1] 41662 if !(z.Uses == 1) { 41663 break 41664 } 41665 b.Kind = BlockARM64NE 41666 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41667 v0.AddArg(x) 41668 v0.AddArg(y) 41669 b.SetControl(v0) 41670 b.Aux = nil 41671 return true 41672 } 41673 // match: (NE (CMPWconst [0] z:(ADD x y)) yes no) 41674 // cond: z.Uses == 1 41675 // result: (NE (CMNW x y) yes no) 41676 for { 41677 v := b.Control 41678 if v.Op != OpARM64CMPWconst { 41679 break 41680 } 41681 if v.AuxInt != 0 { 41682 break 41683 } 41684 z := v.Args[0] 41685 if z.Op != OpARM64ADD { 41686 break 41687 } 41688 _ = z.Args[1] 41689 x := z.Args[0] 41690 y := z.Args[1] 41691 if !(z.Uses == 1) { 41692 break 41693 } 41694 b.Kind = BlockARM64NE 41695 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41696 v0.AddArg(x) 41697 v0.AddArg(y) 41698 b.SetControl(v0) 41699 b.Aux = nil 41700 return true 41701 } 41702 // match: (NE (CMP x z:(NEG y)) yes no) 41703 // cond: z.Uses == 1 41704 // result: (NE (CMN x y) yes no) 41705 for { 41706 v := b.Control 41707 if v.Op != OpARM64CMP { 41708 break 41709 } 41710 _ = v.Args[1] 41711 x := v.Args[0] 41712 z := v.Args[1] 41713 if z.Op != OpARM64NEG { 41714 break 41715 } 41716 y := z.Args[0] 41717 if !(z.Uses == 1) { 41718 break 41719 } 41720 b.Kind = BlockARM64NE 41721 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41722 v0.AddArg(x) 41723 v0.AddArg(y) 41724 b.SetControl(v0) 41725 b.Aux = nil 41726 return true 41727 } 41728 // match: (NE (CMPW x z:(NEG y)) yes no) 41729 // cond: z.Uses == 1 41730 // result: (NE (CMNW x y) yes no) 41731 for { 41732 v := b.Control 41733 if v.Op != OpARM64CMPW { 41734 break 41735 } 41736 _ = v.Args[1] 41737 x := v.Args[0] 41738 z := v.Args[1] 41739 if z.Op != OpARM64NEG { 41740 break 41741 } 41742 y := z.Args[0] 41743 if !(z.Uses == 1) { 41744 break 41745 } 41746 b.Kind = BlockARM64NE 41747 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41748 v0.AddArg(x) 41749 v0.AddArg(y) 41750 b.SetControl(v0) 41751 b.Aux = nil 41752 return true 41753 } 41754 // match: (NE (CMPconst [0] x) yes no) 41755 // cond: 41756 // result: (NZ x yes no) 41757 for { 41758 v := b.Control 41759 if v.Op != OpARM64CMPconst { 41760 break 41761 } 41762 if v.AuxInt != 0 { 41763 break 41764 } 41765 x := v.Args[0] 41766 b.Kind = BlockARM64NZ 41767 b.SetControl(x) 41768 b.Aux = nil 41769 return true 41770 } 41771 // match: (NE (CMPWconst [0] x) yes no) 41772 // cond: 41773 // result: (NZW x yes no) 41774 for { 41775 v := b.Control 41776 if v.Op != OpARM64CMPWconst { 41777 break 41778 } 41779 if v.AuxInt != 0 { 41780 break 41781 } 41782 x := v.Args[0] 41783 b.Kind = BlockARM64NZW 41784 b.SetControl(x) 41785 b.Aux = nil 41786 return true 41787 } 41788 // match: (NE (CMPconst [0] z:(MADD a x y)) yes no) 41789 // cond: z.Uses==1 41790 // result: (NE (CMN a (MUL <x.Type> x y)) yes no) 41791 for { 41792 v := b.Control 41793 if v.Op != OpARM64CMPconst { 41794 break 41795 } 41796 if v.AuxInt != 0 { 41797 break 41798 } 41799 z := v.Args[0] 41800 if z.Op != OpARM64MADD { 41801 break 41802 } 41803 _ = z.Args[2] 41804 a := z.Args[0] 41805 x := z.Args[1] 41806 y := z.Args[2] 41807 if !(z.Uses == 1) { 41808 break 41809 } 41810 b.Kind = BlockARM64NE 41811 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41812 v0.AddArg(a) 41813 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41814 v1.AddArg(x) 41815 v1.AddArg(y) 41816 v0.AddArg(v1) 41817 b.SetControl(v0) 41818 b.Aux = nil 41819 return true 41820 } 41821 // match: (NE (CMPconst [0] z:(MSUB a x y)) yes no) 41822 // cond: z.Uses==1 41823 // result: (NE (CMP a (MUL <x.Type> x y)) yes no) 41824 for { 41825 v := b.Control 41826 if v.Op != OpARM64CMPconst { 41827 break 41828 } 41829 if v.AuxInt != 0 { 41830 break 41831 } 41832 z := v.Args[0] 41833 if z.Op != OpARM64MSUB { 41834 break 41835 } 41836 _ = z.Args[2] 41837 a := z.Args[0] 41838 x := z.Args[1] 41839 y := z.Args[2] 41840 if !(z.Uses == 1) { 41841 break 41842 } 41843 b.Kind = BlockARM64NE 41844 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 41845 v0.AddArg(a) 41846 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41847 v1.AddArg(x) 41848 v1.AddArg(y) 41849 v0.AddArg(v1) 41850 b.SetControl(v0) 41851 b.Aux = nil 41852 return true 41853 } 41854 // match: (NE (CMPWconst [0] z:(MADDW a x y)) yes no) 41855 // cond: z.Uses==1 41856 // result: (NE (CMNW a (MULW <x.Type> x y)) yes no) 41857 for { 41858 v := b.Control 41859 if v.Op != OpARM64CMPWconst { 41860 break 41861 } 41862 if v.AuxInt != 0 { 41863 break 41864 } 41865 z := v.Args[0] 41866 if z.Op != OpARM64MADDW { 41867 break 41868 } 41869 _ = z.Args[2] 41870 a := z.Args[0] 41871 x := z.Args[1] 41872 y := z.Args[2] 41873 if !(z.Uses == 1) { 41874 break 41875 } 41876 b.Kind = BlockARM64NE 41877 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41878 v0.AddArg(a) 41879 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41880 v1.AddArg(x) 41881 v1.AddArg(y) 41882 v0.AddArg(v1) 41883 b.SetControl(v0) 41884 b.Aux = nil 41885 return true 41886 } 41887 // match: (NE (CMPWconst [0] z:(MSUBW a x y)) yes no) 41888 // cond: z.Uses==1 41889 // result: (NE (CMPW a (MULW <x.Type> x y)) yes no) 41890 for { 41891 v := b.Control 41892 if v.Op != OpARM64CMPWconst { 41893 break 41894 } 41895 if v.AuxInt != 0 { 41896 break 41897 } 41898 z := v.Args[0] 41899 if z.Op != OpARM64MSUBW { 41900 break 41901 } 41902 _ = z.Args[2] 41903 a := z.Args[0] 41904 x := z.Args[1] 41905 y := z.Args[2] 41906 if !(z.Uses == 1) { 41907 break 41908 } 41909 b.Kind = BlockARM64NE 41910 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 41911 v0.AddArg(a) 41912 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41913 v1.AddArg(x) 41914 v1.AddArg(y) 41915 v0.AddArg(v1) 41916 b.SetControl(v0) 41917 b.Aux = nil 41918 return true 41919 } 41920 // match: (NE (TSTconst [c] x) yes no) 41921 // cond: oneBit(c) 41922 // result: (TBNZ {ntz(c)} x yes no) 41923 for { 41924 v := b.Control 41925 if v.Op != OpARM64TSTconst { 41926 break 41927 } 41928 c := v.AuxInt 41929 x := v.Args[0] 41930 if !(oneBit(c)) { 41931 break 41932 } 41933 b.Kind = BlockARM64TBNZ 41934 b.SetControl(x) 41935 b.Aux = ntz(c) 41936 return true 41937 } 41938 // match: (NE (TSTWconst [c] x) yes no) 41939 // cond: oneBit(int64(uint32(c))) 41940 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 41941 for { 41942 v := b.Control 41943 if v.Op != OpARM64TSTWconst { 41944 break 41945 } 41946 c := v.AuxInt 41947 x := v.Args[0] 41948 if !(oneBit(int64(uint32(c)))) { 41949 break 41950 } 41951 b.Kind = BlockARM64TBNZ 41952 b.SetControl(x) 41953 b.Aux = ntz(int64(uint32(c))) 41954 return true 41955 } 41956 // match: (NE (FlagEQ) yes no) 41957 // cond: 41958 // result: (First nil no yes) 41959 for { 41960 v := b.Control 41961 if v.Op != OpARM64FlagEQ { 41962 break 41963 } 41964 b.Kind = BlockFirst 41965 b.SetControl(nil) 41966 b.Aux = nil 41967 b.swapSuccessors() 41968 return true 41969 } 41970 // match: (NE (FlagLT_ULT) yes no) 41971 // cond: 41972 // result: (First nil yes no) 41973 for { 41974 v := b.Control 41975 if v.Op != OpARM64FlagLT_ULT { 41976 break 41977 } 41978 b.Kind = BlockFirst 41979 b.SetControl(nil) 41980 b.Aux = nil 41981 return true 41982 } 41983 // match: (NE (FlagLT_UGT) yes no) 41984 // cond: 41985 // result: (First nil yes no) 41986 for { 41987 v := b.Control 41988 if v.Op != OpARM64FlagLT_UGT { 41989 break 41990 } 41991 b.Kind = BlockFirst 41992 b.SetControl(nil) 41993 b.Aux = nil 41994 return true 41995 } 41996 // match: (NE (FlagGT_ULT) yes no) 41997 // cond: 41998 // result: (First nil yes no) 41999 for { 42000 v := b.Control 42001 if v.Op != OpARM64FlagGT_ULT { 42002 break 42003 } 42004 b.Kind = BlockFirst 42005 b.SetControl(nil) 42006 b.Aux = nil 42007 return true 42008 } 42009 // match: (NE (FlagGT_UGT) yes no) 42010 // cond: 42011 // result: (First nil yes no) 42012 for { 42013 v := b.Control 42014 if v.Op != OpARM64FlagGT_UGT { 42015 break 42016 } 42017 b.Kind = BlockFirst 42018 b.SetControl(nil) 42019 b.Aux = nil 42020 return true 42021 } 42022 // match: (NE (InvertFlags cmp) yes no) 42023 // cond: 42024 // result: (NE cmp yes no) 42025 for { 42026 v := b.Control 42027 if v.Op != OpARM64InvertFlags { 42028 break 42029 } 42030 cmp := v.Args[0] 42031 b.Kind = BlockARM64NE 42032 b.SetControl(cmp) 42033 b.Aux = nil 42034 return true 42035 } 42036 case BlockARM64NZ: 42037 // match: (NZ (Equal cc) yes no) 42038 // cond: 42039 // result: (EQ cc yes no) 42040 for { 42041 v := b.Control 42042 if v.Op != OpARM64Equal { 42043 break 42044 } 42045 cc := v.Args[0] 42046 b.Kind = BlockARM64EQ 42047 b.SetControl(cc) 42048 b.Aux = nil 42049 return true 42050 } 42051 // match: (NZ (NotEqual cc) yes no) 42052 // cond: 42053 // result: (NE cc yes no) 42054 for { 42055 v := b.Control 42056 if v.Op != OpARM64NotEqual { 42057 break 42058 } 42059 cc := v.Args[0] 42060 b.Kind = BlockARM64NE 42061 b.SetControl(cc) 42062 b.Aux = nil 42063 return true 42064 } 42065 // match: (NZ (LessThan cc) yes no) 42066 // cond: 42067 // result: (LT cc yes no) 42068 for { 42069 v := b.Control 42070 if v.Op != OpARM64LessThan { 42071 break 42072 } 42073 cc := v.Args[0] 42074 b.Kind = BlockARM64LT 42075 b.SetControl(cc) 42076 b.Aux = nil 42077 return true 42078 } 42079 // match: (NZ (LessThanU cc) yes no) 42080 // cond: 42081 // result: (ULT cc yes no) 42082 for { 42083 v := b.Control 42084 if v.Op != OpARM64LessThanU { 42085 break 42086 } 42087 cc := v.Args[0] 42088 b.Kind = BlockARM64ULT 42089 b.SetControl(cc) 42090 b.Aux = nil 42091 return true 42092 } 42093 // match: (NZ (LessEqual cc) yes no) 42094 // cond: 42095 // result: (LE cc yes no) 42096 for { 42097 v := b.Control 42098 if v.Op != OpARM64LessEqual { 42099 break 42100 } 42101 cc := v.Args[0] 42102 b.Kind = BlockARM64LE 42103 b.SetControl(cc) 42104 b.Aux = nil 42105 return true 42106 } 42107 // match: (NZ (LessEqualU cc) yes no) 42108 // cond: 42109 // result: (ULE cc yes no) 42110 for { 42111 v := b.Control 42112 if v.Op != OpARM64LessEqualU { 42113 break 42114 } 42115 cc := v.Args[0] 42116 b.Kind = BlockARM64ULE 42117 b.SetControl(cc) 42118 b.Aux = nil 42119 return true 42120 } 42121 // match: (NZ (GreaterThan cc) yes no) 42122 // cond: 42123 // result: (GT cc yes no) 42124 for { 42125 v := b.Control 42126 if v.Op != OpARM64GreaterThan { 42127 break 42128 } 42129 cc := v.Args[0] 42130 b.Kind = BlockARM64GT 42131 b.SetControl(cc) 42132 b.Aux = nil 42133 return true 42134 } 42135 // match: (NZ (GreaterThanU cc) yes no) 42136 // cond: 42137 // result: (UGT cc yes no) 42138 for { 42139 v := b.Control 42140 if v.Op != OpARM64GreaterThanU { 42141 break 42142 } 42143 cc := v.Args[0] 42144 b.Kind = BlockARM64UGT 42145 b.SetControl(cc) 42146 b.Aux = nil 42147 return true 42148 } 42149 // match: (NZ (GreaterEqual cc) yes no) 42150 // cond: 42151 // result: (GE cc yes no) 42152 for { 42153 v := b.Control 42154 if v.Op != OpARM64GreaterEqual { 42155 break 42156 } 42157 cc := v.Args[0] 42158 b.Kind = BlockARM64GE 42159 b.SetControl(cc) 42160 b.Aux = nil 42161 return true 42162 } 42163 // match: (NZ (GreaterEqualU cc) yes no) 42164 // cond: 42165 // result: (UGE cc yes no) 42166 for { 42167 v := b.Control 42168 if v.Op != OpARM64GreaterEqualU { 42169 break 42170 } 42171 cc := v.Args[0] 42172 b.Kind = BlockARM64UGE 42173 b.SetControl(cc) 42174 b.Aux = nil 42175 return true 42176 } 42177 // match: (NZ (ANDconst [c] x) yes no) 42178 // cond: oneBit(c) 42179 // result: (TBNZ {ntz(c)} x yes no) 42180 for { 42181 v := b.Control 42182 if v.Op != OpARM64ANDconst { 42183 break 42184 } 42185 c := v.AuxInt 42186 x := v.Args[0] 42187 if !(oneBit(c)) { 42188 break 42189 } 42190 b.Kind = BlockARM64TBNZ 42191 b.SetControl(x) 42192 b.Aux = ntz(c) 42193 return true 42194 } 42195 // match: (NZ (MOVDconst [0]) yes no) 42196 // cond: 42197 // result: (First nil no yes) 42198 for { 42199 v := b.Control 42200 if v.Op != OpARM64MOVDconst { 42201 break 42202 } 42203 if v.AuxInt != 0 { 42204 break 42205 } 42206 b.Kind = BlockFirst 42207 b.SetControl(nil) 42208 b.Aux = nil 42209 b.swapSuccessors() 42210 return true 42211 } 42212 // match: (NZ (MOVDconst [c]) yes no) 42213 // cond: c != 0 42214 // result: (First nil yes no) 42215 for { 42216 v := b.Control 42217 if v.Op != OpARM64MOVDconst { 42218 break 42219 } 42220 c := v.AuxInt 42221 if !(c != 0) { 42222 break 42223 } 42224 b.Kind = BlockFirst 42225 b.SetControl(nil) 42226 b.Aux = nil 42227 return true 42228 } 42229 case BlockARM64NZW: 42230 // match: (NZW (ANDconst [c] x) yes no) 42231 // cond: oneBit(int64(uint32(c))) 42232 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 42233 for { 42234 v := b.Control 42235 if v.Op != OpARM64ANDconst { 42236 break 42237 } 42238 c := v.AuxInt 42239 x := v.Args[0] 42240 if !(oneBit(int64(uint32(c)))) { 42241 break 42242 } 42243 b.Kind = BlockARM64TBNZ 42244 b.SetControl(x) 42245 b.Aux = ntz(int64(uint32(c))) 42246 return true 42247 } 42248 // match: (NZW (MOVDconst [c]) yes no) 42249 // cond: int32(c) == 0 42250 // result: (First nil no yes) 42251 for { 42252 v := b.Control 42253 if v.Op != OpARM64MOVDconst { 42254 break 42255 } 42256 c := v.AuxInt 42257 if !(int32(c) == 0) { 42258 break 42259 } 42260 b.Kind = BlockFirst 42261 b.SetControl(nil) 42262 b.Aux = nil 42263 b.swapSuccessors() 42264 return true 42265 } 42266 // match: (NZW (MOVDconst [c]) yes no) 42267 // cond: int32(c) != 0 42268 // result: (First nil yes no) 42269 for { 42270 v := b.Control 42271 if v.Op != OpARM64MOVDconst { 42272 break 42273 } 42274 c := v.AuxInt 42275 if !(int32(c) != 0) { 42276 break 42277 } 42278 b.Kind = BlockFirst 42279 b.SetControl(nil) 42280 b.Aux = nil 42281 return true 42282 } 42283 case BlockARM64UGE: 42284 // match: (UGE (FlagEQ) yes no) 42285 // cond: 42286 // result: (First nil yes no) 42287 for { 42288 v := b.Control 42289 if v.Op != OpARM64FlagEQ { 42290 break 42291 } 42292 b.Kind = BlockFirst 42293 b.SetControl(nil) 42294 b.Aux = nil 42295 return true 42296 } 42297 // match: (UGE (FlagLT_ULT) yes no) 42298 // cond: 42299 // result: (First nil no yes) 42300 for { 42301 v := b.Control 42302 if v.Op != OpARM64FlagLT_ULT { 42303 break 42304 } 42305 b.Kind = BlockFirst 42306 b.SetControl(nil) 42307 b.Aux = nil 42308 b.swapSuccessors() 42309 return true 42310 } 42311 // match: (UGE (FlagLT_UGT) yes no) 42312 // cond: 42313 // result: (First nil yes no) 42314 for { 42315 v := b.Control 42316 if v.Op != OpARM64FlagLT_UGT { 42317 break 42318 } 42319 b.Kind = BlockFirst 42320 b.SetControl(nil) 42321 b.Aux = nil 42322 return true 42323 } 42324 // match: (UGE (FlagGT_ULT) yes no) 42325 // cond: 42326 // result: (First nil no yes) 42327 for { 42328 v := b.Control 42329 if v.Op != OpARM64FlagGT_ULT { 42330 break 42331 } 42332 b.Kind = BlockFirst 42333 b.SetControl(nil) 42334 b.Aux = nil 42335 b.swapSuccessors() 42336 return true 42337 } 42338 // match: (UGE (FlagGT_UGT) yes no) 42339 // cond: 42340 // result: (First nil yes no) 42341 for { 42342 v := b.Control 42343 if v.Op != OpARM64FlagGT_UGT { 42344 break 42345 } 42346 b.Kind = BlockFirst 42347 b.SetControl(nil) 42348 b.Aux = nil 42349 return true 42350 } 42351 // match: (UGE (InvertFlags cmp) yes no) 42352 // cond: 42353 // result: (ULE cmp yes no) 42354 for { 42355 v := b.Control 42356 if v.Op != OpARM64InvertFlags { 42357 break 42358 } 42359 cmp := v.Args[0] 42360 b.Kind = BlockARM64ULE 42361 b.SetControl(cmp) 42362 b.Aux = nil 42363 return true 42364 } 42365 case BlockARM64UGT: 42366 // match: (UGT (FlagEQ) yes no) 42367 // cond: 42368 // result: (First nil no yes) 42369 for { 42370 v := b.Control 42371 if v.Op != OpARM64FlagEQ { 42372 break 42373 } 42374 b.Kind = BlockFirst 42375 b.SetControl(nil) 42376 b.Aux = nil 42377 b.swapSuccessors() 42378 return true 42379 } 42380 // match: (UGT (FlagLT_ULT) yes no) 42381 // cond: 42382 // result: (First nil no yes) 42383 for { 42384 v := b.Control 42385 if v.Op != OpARM64FlagLT_ULT { 42386 break 42387 } 42388 b.Kind = BlockFirst 42389 b.SetControl(nil) 42390 b.Aux = nil 42391 b.swapSuccessors() 42392 return true 42393 } 42394 // match: (UGT (FlagLT_UGT) yes no) 42395 // cond: 42396 // result: (First nil yes no) 42397 for { 42398 v := b.Control 42399 if v.Op != OpARM64FlagLT_UGT { 42400 break 42401 } 42402 b.Kind = BlockFirst 42403 b.SetControl(nil) 42404 b.Aux = nil 42405 return true 42406 } 42407 // match: (UGT (FlagGT_ULT) yes no) 42408 // cond: 42409 // result: (First nil no yes) 42410 for { 42411 v := b.Control 42412 if v.Op != OpARM64FlagGT_ULT { 42413 break 42414 } 42415 b.Kind = BlockFirst 42416 b.SetControl(nil) 42417 b.Aux = nil 42418 b.swapSuccessors() 42419 return true 42420 } 42421 // match: (UGT (FlagGT_UGT) yes no) 42422 // cond: 42423 // result: (First nil yes no) 42424 for { 42425 v := b.Control 42426 if v.Op != OpARM64FlagGT_UGT { 42427 break 42428 } 42429 b.Kind = BlockFirst 42430 b.SetControl(nil) 42431 b.Aux = nil 42432 return true 42433 } 42434 // match: (UGT (InvertFlags cmp) yes no) 42435 // cond: 42436 // result: (ULT cmp yes no) 42437 for { 42438 v := b.Control 42439 if v.Op != OpARM64InvertFlags { 42440 break 42441 } 42442 cmp := v.Args[0] 42443 b.Kind = BlockARM64ULT 42444 b.SetControl(cmp) 42445 b.Aux = nil 42446 return true 42447 } 42448 case BlockARM64ULE: 42449 // match: (ULE (FlagEQ) yes no) 42450 // cond: 42451 // result: (First nil yes no) 42452 for { 42453 v := b.Control 42454 if v.Op != OpARM64FlagEQ { 42455 break 42456 } 42457 b.Kind = BlockFirst 42458 b.SetControl(nil) 42459 b.Aux = nil 42460 return true 42461 } 42462 // match: (ULE (FlagLT_ULT) yes no) 42463 // cond: 42464 // result: (First nil yes no) 42465 for { 42466 v := b.Control 42467 if v.Op != OpARM64FlagLT_ULT { 42468 break 42469 } 42470 b.Kind = BlockFirst 42471 b.SetControl(nil) 42472 b.Aux = nil 42473 return true 42474 } 42475 // match: (ULE (FlagLT_UGT) yes no) 42476 // cond: 42477 // result: (First nil no yes) 42478 for { 42479 v := b.Control 42480 if v.Op != OpARM64FlagLT_UGT { 42481 break 42482 } 42483 b.Kind = BlockFirst 42484 b.SetControl(nil) 42485 b.Aux = nil 42486 b.swapSuccessors() 42487 return true 42488 } 42489 // match: (ULE (FlagGT_ULT) yes no) 42490 // cond: 42491 // result: (First nil yes no) 42492 for { 42493 v := b.Control 42494 if v.Op != OpARM64FlagGT_ULT { 42495 break 42496 } 42497 b.Kind = BlockFirst 42498 b.SetControl(nil) 42499 b.Aux = nil 42500 return true 42501 } 42502 // match: (ULE (FlagGT_UGT) yes no) 42503 // cond: 42504 // result: (First nil no yes) 42505 for { 42506 v := b.Control 42507 if v.Op != OpARM64FlagGT_UGT { 42508 break 42509 } 42510 b.Kind = BlockFirst 42511 b.SetControl(nil) 42512 b.Aux = nil 42513 b.swapSuccessors() 42514 return true 42515 } 42516 // match: (ULE (InvertFlags cmp) yes no) 42517 // cond: 42518 // result: (UGE cmp yes no) 42519 for { 42520 v := b.Control 42521 if v.Op != OpARM64InvertFlags { 42522 break 42523 } 42524 cmp := v.Args[0] 42525 b.Kind = BlockARM64UGE 42526 b.SetControl(cmp) 42527 b.Aux = nil 42528 return true 42529 } 42530 case BlockARM64ULT: 42531 // match: (ULT (FlagEQ) yes no) 42532 // cond: 42533 // result: (First nil no yes) 42534 for { 42535 v := b.Control 42536 if v.Op != OpARM64FlagEQ { 42537 break 42538 } 42539 b.Kind = BlockFirst 42540 b.SetControl(nil) 42541 b.Aux = nil 42542 b.swapSuccessors() 42543 return true 42544 } 42545 // match: (ULT (FlagLT_ULT) yes no) 42546 // cond: 42547 // result: (First nil yes no) 42548 for { 42549 v := b.Control 42550 if v.Op != OpARM64FlagLT_ULT { 42551 break 42552 } 42553 b.Kind = BlockFirst 42554 b.SetControl(nil) 42555 b.Aux = nil 42556 return true 42557 } 42558 // match: (ULT (FlagLT_UGT) yes no) 42559 // cond: 42560 // result: (First nil no yes) 42561 for { 42562 v := b.Control 42563 if v.Op != OpARM64FlagLT_UGT { 42564 break 42565 } 42566 b.Kind = BlockFirst 42567 b.SetControl(nil) 42568 b.Aux = nil 42569 b.swapSuccessors() 42570 return true 42571 } 42572 // match: (ULT (FlagGT_ULT) yes no) 42573 // cond: 42574 // result: (First nil yes no) 42575 for { 42576 v := b.Control 42577 if v.Op != OpARM64FlagGT_ULT { 42578 break 42579 } 42580 b.Kind = BlockFirst 42581 b.SetControl(nil) 42582 b.Aux = nil 42583 return true 42584 } 42585 // match: (ULT (FlagGT_UGT) yes no) 42586 // cond: 42587 // result: (First nil no yes) 42588 for { 42589 v := b.Control 42590 if v.Op != OpARM64FlagGT_UGT { 42591 break 42592 } 42593 b.Kind = BlockFirst 42594 b.SetControl(nil) 42595 b.Aux = nil 42596 b.swapSuccessors() 42597 return true 42598 } 42599 // match: (ULT (InvertFlags cmp) yes no) 42600 // cond: 42601 // result: (UGT cmp yes no) 42602 for { 42603 v := b.Control 42604 if v.Op != OpARM64InvertFlags { 42605 break 42606 } 42607 cmp := v.Args[0] 42608 b.Kind = BlockARM64UGT 42609 b.SetControl(cmp) 42610 b.Aux = nil 42611 return true 42612 } 42613 case BlockARM64Z: 42614 // match: (Z (ANDconst [c] x) yes no) 42615 // cond: oneBit(c) 42616 // result: (TBZ {ntz(c)} x yes no) 42617 for { 42618 v := b.Control 42619 if v.Op != OpARM64ANDconst { 42620 break 42621 } 42622 c := v.AuxInt 42623 x := v.Args[0] 42624 if !(oneBit(c)) { 42625 break 42626 } 42627 b.Kind = BlockARM64TBZ 42628 b.SetControl(x) 42629 b.Aux = ntz(c) 42630 return true 42631 } 42632 // match: (Z (MOVDconst [0]) yes no) 42633 // cond: 42634 // result: (First nil yes no) 42635 for { 42636 v := b.Control 42637 if v.Op != OpARM64MOVDconst { 42638 break 42639 } 42640 if v.AuxInt != 0 { 42641 break 42642 } 42643 b.Kind = BlockFirst 42644 b.SetControl(nil) 42645 b.Aux = nil 42646 return true 42647 } 42648 // match: (Z (MOVDconst [c]) yes no) 42649 // cond: c != 0 42650 // result: (First nil no yes) 42651 for { 42652 v := b.Control 42653 if v.Op != OpARM64MOVDconst { 42654 break 42655 } 42656 c := v.AuxInt 42657 if !(c != 0) { 42658 break 42659 } 42660 b.Kind = BlockFirst 42661 b.SetControl(nil) 42662 b.Aux = nil 42663 b.swapSuccessors() 42664 return true 42665 } 42666 case BlockARM64ZW: 42667 // match: (ZW (ANDconst [c] x) yes no) 42668 // cond: oneBit(int64(uint32(c))) 42669 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 42670 for { 42671 v := b.Control 42672 if v.Op != OpARM64ANDconst { 42673 break 42674 } 42675 c := v.AuxInt 42676 x := v.Args[0] 42677 if !(oneBit(int64(uint32(c)))) { 42678 break 42679 } 42680 b.Kind = BlockARM64TBZ 42681 b.SetControl(x) 42682 b.Aux = ntz(int64(uint32(c))) 42683 return true 42684 } 42685 // match: (ZW (MOVDconst [c]) yes no) 42686 // cond: int32(c) == 0 42687 // result: (First nil yes no) 42688 for { 42689 v := b.Control 42690 if v.Op != OpARM64MOVDconst { 42691 break 42692 } 42693 c := v.AuxInt 42694 if !(int32(c) == 0) { 42695 break 42696 } 42697 b.Kind = BlockFirst 42698 b.SetControl(nil) 42699 b.Aux = nil 42700 return true 42701 } 42702 // match: (ZW (MOVDconst [c]) yes no) 42703 // cond: int32(c) != 0 42704 // result: (First nil no yes) 42705 for { 42706 v := b.Control 42707 if v.Op != OpARM64MOVDconst { 42708 break 42709 } 42710 c := v.AuxInt 42711 if !(int32(c) != 0) { 42712 break 42713 } 42714 b.Kind = BlockFirst 42715 b.SetControl(nil) 42716 b.Aux = nil 42717 b.swapSuccessors() 42718 return true 42719 } 42720 } 42721 return false 42722 }