github.com/gagliardetto/golang-go@v0.0.0-20201020153340-53909ea70814/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "github.com/gagliardetto/golang-go/cmd/compile/internal/types" 7 8 func rewriteValueARM64(v *Value) bool { 9 switch v.Op { 10 case OpARM64ADCSflags: 11 return rewriteValueARM64_OpARM64ADCSflags_0(v) 12 case OpARM64ADD: 13 return rewriteValueARM64_OpARM64ADD_0(v) || rewriteValueARM64_OpARM64ADD_10(v) || rewriteValueARM64_OpARM64ADD_20(v) 14 case OpARM64ADDconst: 15 return rewriteValueARM64_OpARM64ADDconst_0(v) 16 case OpARM64ADDshiftLL: 17 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 18 case OpARM64ADDshiftRA: 19 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 20 case OpARM64ADDshiftRL: 21 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 22 case OpARM64AND: 23 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 24 case OpARM64ANDconst: 25 return rewriteValueARM64_OpARM64ANDconst_0(v) 26 case OpARM64ANDshiftLL: 27 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 28 case OpARM64ANDshiftRA: 29 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 30 case OpARM64ANDshiftRL: 31 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 32 case OpARM64BIC: 33 return rewriteValueARM64_OpARM64BIC_0(v) 34 case OpARM64BICshiftLL: 35 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 36 case OpARM64BICshiftRA: 37 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 38 case OpARM64BICshiftRL: 39 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 40 case OpARM64CMN: 41 return rewriteValueARM64_OpARM64CMN_0(v) 42 case OpARM64CMNW: 43 return rewriteValueARM64_OpARM64CMNW_0(v) 44 case OpARM64CMNWconst: 45 return rewriteValueARM64_OpARM64CMNWconst_0(v) 46 case OpARM64CMNconst: 47 return rewriteValueARM64_OpARM64CMNconst_0(v) 48 case OpARM64CMNshiftLL: 49 return rewriteValueARM64_OpARM64CMNshiftLL_0(v) 50 case OpARM64CMNshiftRA: 51 return rewriteValueARM64_OpARM64CMNshiftRA_0(v) 52 case OpARM64CMNshiftRL: 53 return rewriteValueARM64_OpARM64CMNshiftRL_0(v) 54 case OpARM64CMP: 55 return rewriteValueARM64_OpARM64CMP_0(v) 56 case OpARM64CMPW: 57 return rewriteValueARM64_OpARM64CMPW_0(v) 58 case OpARM64CMPWconst: 59 return rewriteValueARM64_OpARM64CMPWconst_0(v) 60 case OpARM64CMPconst: 61 return rewriteValueARM64_OpARM64CMPconst_0(v) 62 case OpARM64CMPshiftLL: 63 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 64 case OpARM64CMPshiftRA: 65 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 66 case OpARM64CMPshiftRL: 67 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 68 case OpARM64CSEL: 69 return rewriteValueARM64_OpARM64CSEL_0(v) 70 case OpARM64CSEL0: 71 return rewriteValueARM64_OpARM64CSEL0_0(v) 72 case OpARM64DIV: 73 return rewriteValueARM64_OpARM64DIV_0(v) 74 case OpARM64DIVW: 75 return rewriteValueARM64_OpARM64DIVW_0(v) 76 case OpARM64EON: 77 return rewriteValueARM64_OpARM64EON_0(v) 78 case OpARM64EONshiftLL: 79 return rewriteValueARM64_OpARM64EONshiftLL_0(v) 80 case OpARM64EONshiftRA: 81 return rewriteValueARM64_OpARM64EONshiftRA_0(v) 82 case OpARM64EONshiftRL: 83 return rewriteValueARM64_OpARM64EONshiftRL_0(v) 84 case OpARM64Equal: 85 return rewriteValueARM64_OpARM64Equal_0(v) 86 case OpARM64FADDD: 87 return rewriteValueARM64_OpARM64FADDD_0(v) 88 case OpARM64FADDS: 89 return rewriteValueARM64_OpARM64FADDS_0(v) 90 case OpARM64FCMPD: 91 return rewriteValueARM64_OpARM64FCMPD_0(v) 92 case OpARM64FCMPS: 93 return rewriteValueARM64_OpARM64FCMPS_0(v) 94 case OpARM64FMOVDfpgp: 95 return rewriteValueARM64_OpARM64FMOVDfpgp_0(v) 96 case OpARM64FMOVDgpfp: 97 return rewriteValueARM64_OpARM64FMOVDgpfp_0(v) 98 case OpARM64FMOVDload: 99 return rewriteValueARM64_OpARM64FMOVDload_0(v) 100 case OpARM64FMOVDloadidx: 101 return rewriteValueARM64_OpARM64FMOVDloadidx_0(v) 102 case OpARM64FMOVDstore: 103 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 104 case OpARM64FMOVDstoreidx: 105 return rewriteValueARM64_OpARM64FMOVDstoreidx_0(v) 106 case OpARM64FMOVSload: 107 return rewriteValueARM64_OpARM64FMOVSload_0(v) 108 case OpARM64FMOVSloadidx: 109 return rewriteValueARM64_OpARM64FMOVSloadidx_0(v) 110 case OpARM64FMOVSstore: 111 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 112 case OpARM64FMOVSstoreidx: 113 return rewriteValueARM64_OpARM64FMOVSstoreidx_0(v) 114 case OpARM64FMULD: 115 return rewriteValueARM64_OpARM64FMULD_0(v) 116 case OpARM64FMULS: 117 return rewriteValueARM64_OpARM64FMULS_0(v) 118 case OpARM64FNEGD: 119 return rewriteValueARM64_OpARM64FNEGD_0(v) 120 case OpARM64FNEGS: 121 return rewriteValueARM64_OpARM64FNEGS_0(v) 122 case OpARM64FNMULD: 123 return rewriteValueARM64_OpARM64FNMULD_0(v) 124 case OpARM64FNMULS: 125 return rewriteValueARM64_OpARM64FNMULS_0(v) 126 case OpARM64FSUBD: 127 return rewriteValueARM64_OpARM64FSUBD_0(v) 128 case OpARM64FSUBS: 129 return rewriteValueARM64_OpARM64FSUBS_0(v) 130 case OpARM64GreaterEqual: 131 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 132 case OpARM64GreaterEqualF: 133 return rewriteValueARM64_OpARM64GreaterEqualF_0(v) 134 case OpARM64GreaterEqualU: 135 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 136 case OpARM64GreaterThan: 137 return rewriteValueARM64_OpARM64GreaterThan_0(v) 138 case OpARM64GreaterThanF: 139 return rewriteValueARM64_OpARM64GreaterThanF_0(v) 140 case OpARM64GreaterThanU: 141 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 142 case OpARM64LessEqual: 143 return rewriteValueARM64_OpARM64LessEqual_0(v) 144 case OpARM64LessEqualF: 145 return rewriteValueARM64_OpARM64LessEqualF_0(v) 146 case OpARM64LessEqualU: 147 return rewriteValueARM64_OpARM64LessEqualU_0(v) 148 case OpARM64LessThan: 149 return rewriteValueARM64_OpARM64LessThan_0(v) 150 case OpARM64LessThanF: 151 return rewriteValueARM64_OpARM64LessThanF_0(v) 152 case OpARM64LessThanU: 153 return rewriteValueARM64_OpARM64LessThanU_0(v) 154 case OpARM64MADD: 155 return rewriteValueARM64_OpARM64MADD_0(v) || rewriteValueARM64_OpARM64MADD_10(v) || rewriteValueARM64_OpARM64MADD_20(v) 156 case OpARM64MADDW: 157 return rewriteValueARM64_OpARM64MADDW_0(v) || rewriteValueARM64_OpARM64MADDW_10(v) || rewriteValueARM64_OpARM64MADDW_20(v) 158 case OpARM64MNEG: 159 return rewriteValueARM64_OpARM64MNEG_0(v) || rewriteValueARM64_OpARM64MNEG_10(v) || rewriteValueARM64_OpARM64MNEG_20(v) 160 case OpARM64MNEGW: 161 return rewriteValueARM64_OpARM64MNEGW_0(v) || rewriteValueARM64_OpARM64MNEGW_10(v) || rewriteValueARM64_OpARM64MNEGW_20(v) 162 case OpARM64MOD: 163 return rewriteValueARM64_OpARM64MOD_0(v) 164 case OpARM64MODW: 165 return rewriteValueARM64_OpARM64MODW_0(v) 166 case OpARM64MOVBUload: 167 return rewriteValueARM64_OpARM64MOVBUload_0(v) 168 case OpARM64MOVBUloadidx: 169 return rewriteValueARM64_OpARM64MOVBUloadidx_0(v) 170 case OpARM64MOVBUreg: 171 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 172 case OpARM64MOVBload: 173 return rewriteValueARM64_OpARM64MOVBload_0(v) 174 case OpARM64MOVBloadidx: 175 return rewriteValueARM64_OpARM64MOVBloadidx_0(v) 176 case OpARM64MOVBreg: 177 return rewriteValueARM64_OpARM64MOVBreg_0(v) 178 case OpARM64MOVBstore: 179 return rewriteValueARM64_OpARM64MOVBstore_0(v) || rewriteValueARM64_OpARM64MOVBstore_10(v) || rewriteValueARM64_OpARM64MOVBstore_20(v) || rewriteValueARM64_OpARM64MOVBstore_30(v) || rewriteValueARM64_OpARM64MOVBstore_40(v) 180 case OpARM64MOVBstoreidx: 181 return rewriteValueARM64_OpARM64MOVBstoreidx_0(v) || rewriteValueARM64_OpARM64MOVBstoreidx_10(v) 182 case OpARM64MOVBstorezero: 183 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 184 case OpARM64MOVBstorezeroidx: 185 return rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v) 186 case OpARM64MOVDload: 187 return rewriteValueARM64_OpARM64MOVDload_0(v) 188 case OpARM64MOVDloadidx: 189 return rewriteValueARM64_OpARM64MOVDloadidx_0(v) 190 case OpARM64MOVDloadidx8: 191 return rewriteValueARM64_OpARM64MOVDloadidx8_0(v) 192 case OpARM64MOVDreg: 193 return rewriteValueARM64_OpARM64MOVDreg_0(v) 194 case OpARM64MOVDstore: 195 return rewriteValueARM64_OpARM64MOVDstore_0(v) 196 case OpARM64MOVDstoreidx: 197 return rewriteValueARM64_OpARM64MOVDstoreidx_0(v) 198 case OpARM64MOVDstoreidx8: 199 return rewriteValueARM64_OpARM64MOVDstoreidx8_0(v) 200 case OpARM64MOVDstorezero: 201 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 202 case OpARM64MOVDstorezeroidx: 203 return rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v) 204 case OpARM64MOVDstorezeroidx8: 205 return rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v) 206 case OpARM64MOVHUload: 207 return rewriteValueARM64_OpARM64MOVHUload_0(v) 208 case OpARM64MOVHUloadidx: 209 return rewriteValueARM64_OpARM64MOVHUloadidx_0(v) 210 case OpARM64MOVHUloadidx2: 211 return rewriteValueARM64_OpARM64MOVHUloadidx2_0(v) 212 case OpARM64MOVHUreg: 213 return rewriteValueARM64_OpARM64MOVHUreg_0(v) || rewriteValueARM64_OpARM64MOVHUreg_10(v) 214 case OpARM64MOVHload: 215 return rewriteValueARM64_OpARM64MOVHload_0(v) 216 case OpARM64MOVHloadidx: 217 return rewriteValueARM64_OpARM64MOVHloadidx_0(v) 218 case OpARM64MOVHloadidx2: 219 return rewriteValueARM64_OpARM64MOVHloadidx2_0(v) 220 case OpARM64MOVHreg: 221 return rewriteValueARM64_OpARM64MOVHreg_0(v) || rewriteValueARM64_OpARM64MOVHreg_10(v) 222 case OpARM64MOVHstore: 223 return rewriteValueARM64_OpARM64MOVHstore_0(v) || rewriteValueARM64_OpARM64MOVHstore_10(v) || rewriteValueARM64_OpARM64MOVHstore_20(v) 224 case OpARM64MOVHstoreidx: 225 return rewriteValueARM64_OpARM64MOVHstoreidx_0(v) || rewriteValueARM64_OpARM64MOVHstoreidx_10(v) 226 case OpARM64MOVHstoreidx2: 227 return rewriteValueARM64_OpARM64MOVHstoreidx2_0(v) 228 case OpARM64MOVHstorezero: 229 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 230 case OpARM64MOVHstorezeroidx: 231 return rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v) 232 case OpARM64MOVHstorezeroidx2: 233 return rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v) 234 case OpARM64MOVQstorezero: 235 return rewriteValueARM64_OpARM64MOVQstorezero_0(v) 236 case OpARM64MOVWUload: 237 return rewriteValueARM64_OpARM64MOVWUload_0(v) 238 case OpARM64MOVWUloadidx: 239 return rewriteValueARM64_OpARM64MOVWUloadidx_0(v) 240 case OpARM64MOVWUloadidx4: 241 return rewriteValueARM64_OpARM64MOVWUloadidx4_0(v) 242 case OpARM64MOVWUreg: 243 return rewriteValueARM64_OpARM64MOVWUreg_0(v) || rewriteValueARM64_OpARM64MOVWUreg_10(v) 244 case OpARM64MOVWload: 245 return rewriteValueARM64_OpARM64MOVWload_0(v) 246 case OpARM64MOVWloadidx: 247 return rewriteValueARM64_OpARM64MOVWloadidx_0(v) 248 case OpARM64MOVWloadidx4: 249 return rewriteValueARM64_OpARM64MOVWloadidx4_0(v) 250 case OpARM64MOVWreg: 251 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 252 case OpARM64MOVWstore: 253 return rewriteValueARM64_OpARM64MOVWstore_0(v) || rewriteValueARM64_OpARM64MOVWstore_10(v) 254 case OpARM64MOVWstoreidx: 255 return rewriteValueARM64_OpARM64MOVWstoreidx_0(v) 256 case OpARM64MOVWstoreidx4: 257 return rewriteValueARM64_OpARM64MOVWstoreidx4_0(v) 258 case OpARM64MOVWstorezero: 259 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 260 case OpARM64MOVWstorezeroidx: 261 return rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v) 262 case OpARM64MOVWstorezeroidx4: 263 return rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v) 264 case OpARM64MSUB: 265 return rewriteValueARM64_OpARM64MSUB_0(v) || rewriteValueARM64_OpARM64MSUB_10(v) || rewriteValueARM64_OpARM64MSUB_20(v) 266 case OpARM64MSUBW: 267 return rewriteValueARM64_OpARM64MSUBW_0(v) || rewriteValueARM64_OpARM64MSUBW_10(v) || rewriteValueARM64_OpARM64MSUBW_20(v) 268 case OpARM64MUL: 269 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 270 case OpARM64MULW: 271 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 272 case OpARM64MVN: 273 return rewriteValueARM64_OpARM64MVN_0(v) 274 case OpARM64MVNshiftLL: 275 return rewriteValueARM64_OpARM64MVNshiftLL_0(v) 276 case OpARM64MVNshiftRA: 277 return rewriteValueARM64_OpARM64MVNshiftRA_0(v) 278 case OpARM64MVNshiftRL: 279 return rewriteValueARM64_OpARM64MVNshiftRL_0(v) 280 case OpARM64NEG: 281 return rewriteValueARM64_OpARM64NEG_0(v) 282 case OpARM64NEGshiftLL: 283 return rewriteValueARM64_OpARM64NEGshiftLL_0(v) 284 case OpARM64NEGshiftRA: 285 return rewriteValueARM64_OpARM64NEGshiftRA_0(v) 286 case OpARM64NEGshiftRL: 287 return rewriteValueARM64_OpARM64NEGshiftRL_0(v) 288 case OpARM64NotEqual: 289 return rewriteValueARM64_OpARM64NotEqual_0(v) 290 case OpARM64OR: 291 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) || rewriteValueARM64_OpARM64OR_20(v) || rewriteValueARM64_OpARM64OR_30(v) || rewriteValueARM64_OpARM64OR_40(v) 292 case OpARM64ORN: 293 return rewriteValueARM64_OpARM64ORN_0(v) 294 case OpARM64ORNshiftLL: 295 return rewriteValueARM64_OpARM64ORNshiftLL_0(v) 296 case OpARM64ORNshiftRA: 297 return rewriteValueARM64_OpARM64ORNshiftRA_0(v) 298 case OpARM64ORNshiftRL: 299 return rewriteValueARM64_OpARM64ORNshiftRL_0(v) 300 case OpARM64ORconst: 301 return rewriteValueARM64_OpARM64ORconst_0(v) 302 case OpARM64ORshiftLL: 303 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) || rewriteValueARM64_OpARM64ORshiftLL_20(v) 304 case OpARM64ORshiftRA: 305 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 306 case OpARM64ORshiftRL: 307 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 308 case OpARM64RORWconst: 309 return rewriteValueARM64_OpARM64RORWconst_0(v) 310 case OpARM64RORconst: 311 return rewriteValueARM64_OpARM64RORconst_0(v) 312 case OpARM64SBCSflags: 313 return rewriteValueARM64_OpARM64SBCSflags_0(v) 314 case OpARM64SLL: 315 return rewriteValueARM64_OpARM64SLL_0(v) 316 case OpARM64SLLconst: 317 return rewriteValueARM64_OpARM64SLLconst_0(v) 318 case OpARM64SRA: 319 return rewriteValueARM64_OpARM64SRA_0(v) 320 case OpARM64SRAconst: 321 return rewriteValueARM64_OpARM64SRAconst_0(v) 322 case OpARM64SRL: 323 return rewriteValueARM64_OpARM64SRL_0(v) 324 case OpARM64SRLconst: 325 return rewriteValueARM64_OpARM64SRLconst_0(v) || rewriteValueARM64_OpARM64SRLconst_10(v) 326 case OpARM64STP: 327 return rewriteValueARM64_OpARM64STP_0(v) 328 case OpARM64SUB: 329 return rewriteValueARM64_OpARM64SUB_0(v) || rewriteValueARM64_OpARM64SUB_10(v) 330 case OpARM64SUBconst: 331 return rewriteValueARM64_OpARM64SUBconst_0(v) 332 case OpARM64SUBshiftLL: 333 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 334 case OpARM64SUBshiftRA: 335 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 336 case OpARM64SUBshiftRL: 337 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 338 case OpARM64TST: 339 return rewriteValueARM64_OpARM64TST_0(v) 340 case OpARM64TSTW: 341 return rewriteValueARM64_OpARM64TSTW_0(v) 342 case OpARM64TSTWconst: 343 return rewriteValueARM64_OpARM64TSTWconst_0(v) 344 case OpARM64TSTconst: 345 return rewriteValueARM64_OpARM64TSTconst_0(v) 346 case OpARM64TSTshiftLL: 347 return rewriteValueARM64_OpARM64TSTshiftLL_0(v) 348 case OpARM64TSTshiftRA: 349 return rewriteValueARM64_OpARM64TSTshiftRA_0(v) 350 case OpARM64TSTshiftRL: 351 return rewriteValueARM64_OpARM64TSTshiftRL_0(v) 352 case OpARM64UBFIZ: 353 return rewriteValueARM64_OpARM64UBFIZ_0(v) 354 case OpARM64UBFX: 355 return rewriteValueARM64_OpARM64UBFX_0(v) 356 case OpARM64UDIV: 357 return rewriteValueARM64_OpARM64UDIV_0(v) 358 case OpARM64UDIVW: 359 return rewriteValueARM64_OpARM64UDIVW_0(v) 360 case OpARM64UMOD: 361 return rewriteValueARM64_OpARM64UMOD_0(v) 362 case OpARM64UMODW: 363 return rewriteValueARM64_OpARM64UMODW_0(v) 364 case OpARM64XOR: 365 return rewriteValueARM64_OpARM64XOR_0(v) || rewriteValueARM64_OpARM64XOR_10(v) 366 case OpARM64XORconst: 367 return rewriteValueARM64_OpARM64XORconst_0(v) 368 case OpARM64XORshiftLL: 369 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 370 case OpARM64XORshiftRA: 371 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 372 case OpARM64XORshiftRL: 373 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 374 case OpAbs: 375 return rewriteValueARM64_OpAbs_0(v) 376 case OpAdd16: 377 return rewriteValueARM64_OpAdd16_0(v) 378 case OpAdd32: 379 return rewriteValueARM64_OpAdd32_0(v) 380 case OpAdd32F: 381 return rewriteValueARM64_OpAdd32F_0(v) 382 case OpAdd64: 383 return rewriteValueARM64_OpAdd64_0(v) 384 case OpAdd64F: 385 return rewriteValueARM64_OpAdd64F_0(v) 386 case OpAdd8: 387 return rewriteValueARM64_OpAdd8_0(v) 388 case OpAddPtr: 389 return rewriteValueARM64_OpAddPtr_0(v) 390 case OpAddr: 391 return rewriteValueARM64_OpAddr_0(v) 392 case OpAnd16: 393 return rewriteValueARM64_OpAnd16_0(v) 394 case OpAnd32: 395 return rewriteValueARM64_OpAnd32_0(v) 396 case OpAnd64: 397 return rewriteValueARM64_OpAnd64_0(v) 398 case OpAnd8: 399 return rewriteValueARM64_OpAnd8_0(v) 400 case OpAndB: 401 return rewriteValueARM64_OpAndB_0(v) 402 case OpAtomicAdd32: 403 return rewriteValueARM64_OpAtomicAdd32_0(v) 404 case OpAtomicAdd32Variant: 405 return rewriteValueARM64_OpAtomicAdd32Variant_0(v) 406 case OpAtomicAdd64: 407 return rewriteValueARM64_OpAtomicAdd64_0(v) 408 case OpAtomicAdd64Variant: 409 return rewriteValueARM64_OpAtomicAdd64Variant_0(v) 410 case OpAtomicAnd8: 411 return rewriteValueARM64_OpAtomicAnd8_0(v) 412 case OpAtomicCompareAndSwap32: 413 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 414 case OpAtomicCompareAndSwap64: 415 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 416 case OpAtomicExchange32: 417 return rewriteValueARM64_OpAtomicExchange32_0(v) 418 case OpAtomicExchange64: 419 return rewriteValueARM64_OpAtomicExchange64_0(v) 420 case OpAtomicLoad32: 421 return rewriteValueARM64_OpAtomicLoad32_0(v) 422 case OpAtomicLoad64: 423 return rewriteValueARM64_OpAtomicLoad64_0(v) 424 case OpAtomicLoad8: 425 return rewriteValueARM64_OpAtomicLoad8_0(v) 426 case OpAtomicLoadPtr: 427 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 428 case OpAtomicOr8: 429 return rewriteValueARM64_OpAtomicOr8_0(v) 430 case OpAtomicStore32: 431 return rewriteValueARM64_OpAtomicStore32_0(v) 432 case OpAtomicStore64: 433 return rewriteValueARM64_OpAtomicStore64_0(v) 434 case OpAtomicStore8: 435 return rewriteValueARM64_OpAtomicStore8_0(v) 436 case OpAtomicStorePtrNoWB: 437 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 438 case OpAvg64u: 439 return rewriteValueARM64_OpAvg64u_0(v) 440 case OpBitLen32: 441 return rewriteValueARM64_OpBitLen32_0(v) 442 case OpBitLen64: 443 return rewriteValueARM64_OpBitLen64_0(v) 444 case OpBitRev16: 445 return rewriteValueARM64_OpBitRev16_0(v) 446 case OpBitRev32: 447 return rewriteValueARM64_OpBitRev32_0(v) 448 case OpBitRev64: 449 return rewriteValueARM64_OpBitRev64_0(v) 450 case OpBitRev8: 451 return rewriteValueARM64_OpBitRev8_0(v) 452 case OpBswap32: 453 return rewriteValueARM64_OpBswap32_0(v) 454 case OpBswap64: 455 return rewriteValueARM64_OpBswap64_0(v) 456 case OpCeil: 457 return rewriteValueARM64_OpCeil_0(v) 458 case OpClosureCall: 459 return rewriteValueARM64_OpClosureCall_0(v) 460 case OpCom16: 461 return rewriteValueARM64_OpCom16_0(v) 462 case OpCom32: 463 return rewriteValueARM64_OpCom32_0(v) 464 case OpCom64: 465 return rewriteValueARM64_OpCom64_0(v) 466 case OpCom8: 467 return rewriteValueARM64_OpCom8_0(v) 468 case OpCondSelect: 469 return rewriteValueARM64_OpCondSelect_0(v) 470 case OpConst16: 471 return rewriteValueARM64_OpConst16_0(v) 472 case OpConst32: 473 return rewriteValueARM64_OpConst32_0(v) 474 case OpConst32F: 475 return rewriteValueARM64_OpConst32F_0(v) 476 case OpConst64: 477 return rewriteValueARM64_OpConst64_0(v) 478 case OpConst64F: 479 return rewriteValueARM64_OpConst64F_0(v) 480 case OpConst8: 481 return rewriteValueARM64_OpConst8_0(v) 482 case OpConstBool: 483 return rewriteValueARM64_OpConstBool_0(v) 484 case OpConstNil: 485 return rewriteValueARM64_OpConstNil_0(v) 486 case OpCtz16: 487 return rewriteValueARM64_OpCtz16_0(v) 488 case OpCtz16NonZero: 489 return rewriteValueARM64_OpCtz16NonZero_0(v) 490 case OpCtz32: 491 return rewriteValueARM64_OpCtz32_0(v) 492 case OpCtz32NonZero: 493 return rewriteValueARM64_OpCtz32NonZero_0(v) 494 case OpCtz64: 495 return rewriteValueARM64_OpCtz64_0(v) 496 case OpCtz64NonZero: 497 return rewriteValueARM64_OpCtz64NonZero_0(v) 498 case OpCtz8: 499 return rewriteValueARM64_OpCtz8_0(v) 500 case OpCtz8NonZero: 501 return rewriteValueARM64_OpCtz8NonZero_0(v) 502 case OpCvt32Fto32: 503 return rewriteValueARM64_OpCvt32Fto32_0(v) 504 case OpCvt32Fto32U: 505 return rewriteValueARM64_OpCvt32Fto32U_0(v) 506 case OpCvt32Fto64: 507 return rewriteValueARM64_OpCvt32Fto64_0(v) 508 case OpCvt32Fto64F: 509 return rewriteValueARM64_OpCvt32Fto64F_0(v) 510 case OpCvt32Fto64U: 511 return rewriteValueARM64_OpCvt32Fto64U_0(v) 512 case OpCvt32Uto32F: 513 return rewriteValueARM64_OpCvt32Uto32F_0(v) 514 case OpCvt32Uto64F: 515 return rewriteValueARM64_OpCvt32Uto64F_0(v) 516 case OpCvt32to32F: 517 return rewriteValueARM64_OpCvt32to32F_0(v) 518 case OpCvt32to64F: 519 return rewriteValueARM64_OpCvt32to64F_0(v) 520 case OpCvt64Fto32: 521 return rewriteValueARM64_OpCvt64Fto32_0(v) 522 case OpCvt64Fto32F: 523 return rewriteValueARM64_OpCvt64Fto32F_0(v) 524 case OpCvt64Fto32U: 525 return rewriteValueARM64_OpCvt64Fto32U_0(v) 526 case OpCvt64Fto64: 527 return rewriteValueARM64_OpCvt64Fto64_0(v) 528 case OpCvt64Fto64U: 529 return rewriteValueARM64_OpCvt64Fto64U_0(v) 530 case OpCvt64Uto32F: 531 return rewriteValueARM64_OpCvt64Uto32F_0(v) 532 case OpCvt64Uto64F: 533 return rewriteValueARM64_OpCvt64Uto64F_0(v) 534 case OpCvt64to32F: 535 return rewriteValueARM64_OpCvt64to32F_0(v) 536 case OpCvt64to64F: 537 return rewriteValueARM64_OpCvt64to64F_0(v) 538 case OpDiv16: 539 return rewriteValueARM64_OpDiv16_0(v) 540 case OpDiv16u: 541 return rewriteValueARM64_OpDiv16u_0(v) 542 case OpDiv32: 543 return rewriteValueARM64_OpDiv32_0(v) 544 case OpDiv32F: 545 return rewriteValueARM64_OpDiv32F_0(v) 546 case OpDiv32u: 547 return rewriteValueARM64_OpDiv32u_0(v) 548 case OpDiv64: 549 return rewriteValueARM64_OpDiv64_0(v) 550 case OpDiv64F: 551 return rewriteValueARM64_OpDiv64F_0(v) 552 case OpDiv64u: 553 return rewriteValueARM64_OpDiv64u_0(v) 554 case OpDiv8: 555 return rewriteValueARM64_OpDiv8_0(v) 556 case OpDiv8u: 557 return rewriteValueARM64_OpDiv8u_0(v) 558 case OpEq16: 559 return rewriteValueARM64_OpEq16_0(v) 560 case OpEq32: 561 return rewriteValueARM64_OpEq32_0(v) 562 case OpEq32F: 563 return rewriteValueARM64_OpEq32F_0(v) 564 case OpEq64: 565 return rewriteValueARM64_OpEq64_0(v) 566 case OpEq64F: 567 return rewriteValueARM64_OpEq64F_0(v) 568 case OpEq8: 569 return rewriteValueARM64_OpEq8_0(v) 570 case OpEqB: 571 return rewriteValueARM64_OpEqB_0(v) 572 case OpEqPtr: 573 return rewriteValueARM64_OpEqPtr_0(v) 574 case OpFMA: 575 return rewriteValueARM64_OpFMA_0(v) 576 case OpFloor: 577 return rewriteValueARM64_OpFloor_0(v) 578 case OpGeq16: 579 return rewriteValueARM64_OpGeq16_0(v) 580 case OpGeq16U: 581 return rewriteValueARM64_OpGeq16U_0(v) 582 case OpGeq32: 583 return rewriteValueARM64_OpGeq32_0(v) 584 case OpGeq32F: 585 return rewriteValueARM64_OpGeq32F_0(v) 586 case OpGeq32U: 587 return rewriteValueARM64_OpGeq32U_0(v) 588 case OpGeq64: 589 return rewriteValueARM64_OpGeq64_0(v) 590 case OpGeq64F: 591 return rewriteValueARM64_OpGeq64F_0(v) 592 case OpGeq64U: 593 return rewriteValueARM64_OpGeq64U_0(v) 594 case OpGeq8: 595 return rewriteValueARM64_OpGeq8_0(v) 596 case OpGeq8U: 597 return rewriteValueARM64_OpGeq8U_0(v) 598 case OpGetCallerPC: 599 return rewriteValueARM64_OpGetCallerPC_0(v) 600 case OpGetCallerSP: 601 return rewriteValueARM64_OpGetCallerSP_0(v) 602 case OpGetClosurePtr: 603 return rewriteValueARM64_OpGetClosurePtr_0(v) 604 case OpGreater16: 605 return rewriteValueARM64_OpGreater16_0(v) 606 case OpGreater16U: 607 return rewriteValueARM64_OpGreater16U_0(v) 608 case OpGreater32: 609 return rewriteValueARM64_OpGreater32_0(v) 610 case OpGreater32F: 611 return rewriteValueARM64_OpGreater32F_0(v) 612 case OpGreater32U: 613 return rewriteValueARM64_OpGreater32U_0(v) 614 case OpGreater64: 615 return rewriteValueARM64_OpGreater64_0(v) 616 case OpGreater64F: 617 return rewriteValueARM64_OpGreater64F_0(v) 618 case OpGreater64U: 619 return rewriteValueARM64_OpGreater64U_0(v) 620 case OpGreater8: 621 return rewriteValueARM64_OpGreater8_0(v) 622 case OpGreater8U: 623 return rewriteValueARM64_OpGreater8U_0(v) 624 case OpHmul32: 625 return rewriteValueARM64_OpHmul32_0(v) 626 case OpHmul32u: 627 return rewriteValueARM64_OpHmul32u_0(v) 628 case OpHmul64: 629 return rewriteValueARM64_OpHmul64_0(v) 630 case OpHmul64u: 631 return rewriteValueARM64_OpHmul64u_0(v) 632 case OpInterCall: 633 return rewriteValueARM64_OpInterCall_0(v) 634 case OpIsInBounds: 635 return rewriteValueARM64_OpIsInBounds_0(v) 636 case OpIsNonNil: 637 return rewriteValueARM64_OpIsNonNil_0(v) 638 case OpIsSliceInBounds: 639 return rewriteValueARM64_OpIsSliceInBounds_0(v) 640 case OpLeq16: 641 return rewriteValueARM64_OpLeq16_0(v) 642 case OpLeq16U: 643 return rewriteValueARM64_OpLeq16U_0(v) 644 case OpLeq32: 645 return rewriteValueARM64_OpLeq32_0(v) 646 case OpLeq32F: 647 return rewriteValueARM64_OpLeq32F_0(v) 648 case OpLeq32U: 649 return rewriteValueARM64_OpLeq32U_0(v) 650 case OpLeq64: 651 return rewriteValueARM64_OpLeq64_0(v) 652 case OpLeq64F: 653 return rewriteValueARM64_OpLeq64F_0(v) 654 case OpLeq64U: 655 return rewriteValueARM64_OpLeq64U_0(v) 656 case OpLeq8: 657 return rewriteValueARM64_OpLeq8_0(v) 658 case OpLeq8U: 659 return rewriteValueARM64_OpLeq8U_0(v) 660 case OpLess16: 661 return rewriteValueARM64_OpLess16_0(v) 662 case OpLess16U: 663 return rewriteValueARM64_OpLess16U_0(v) 664 case OpLess32: 665 return rewriteValueARM64_OpLess32_0(v) 666 case OpLess32F: 667 return rewriteValueARM64_OpLess32F_0(v) 668 case OpLess32U: 669 return rewriteValueARM64_OpLess32U_0(v) 670 case OpLess64: 671 return rewriteValueARM64_OpLess64_0(v) 672 case OpLess64F: 673 return rewriteValueARM64_OpLess64F_0(v) 674 case OpLess64U: 675 return rewriteValueARM64_OpLess64U_0(v) 676 case OpLess8: 677 return rewriteValueARM64_OpLess8_0(v) 678 case OpLess8U: 679 return rewriteValueARM64_OpLess8U_0(v) 680 case OpLoad: 681 return rewriteValueARM64_OpLoad_0(v) 682 case OpLocalAddr: 683 return rewriteValueARM64_OpLocalAddr_0(v) 684 case OpLsh16x16: 685 return rewriteValueARM64_OpLsh16x16_0(v) 686 case OpLsh16x32: 687 return rewriteValueARM64_OpLsh16x32_0(v) 688 case OpLsh16x64: 689 return rewriteValueARM64_OpLsh16x64_0(v) 690 case OpLsh16x8: 691 return rewriteValueARM64_OpLsh16x8_0(v) 692 case OpLsh32x16: 693 return rewriteValueARM64_OpLsh32x16_0(v) 694 case OpLsh32x32: 695 return rewriteValueARM64_OpLsh32x32_0(v) 696 case OpLsh32x64: 697 return rewriteValueARM64_OpLsh32x64_0(v) 698 case OpLsh32x8: 699 return rewriteValueARM64_OpLsh32x8_0(v) 700 case OpLsh64x16: 701 return rewriteValueARM64_OpLsh64x16_0(v) 702 case OpLsh64x32: 703 return rewriteValueARM64_OpLsh64x32_0(v) 704 case OpLsh64x64: 705 return rewriteValueARM64_OpLsh64x64_0(v) 706 case OpLsh64x8: 707 return rewriteValueARM64_OpLsh64x8_0(v) 708 case OpLsh8x16: 709 return rewriteValueARM64_OpLsh8x16_0(v) 710 case OpLsh8x32: 711 return rewriteValueARM64_OpLsh8x32_0(v) 712 case OpLsh8x64: 713 return rewriteValueARM64_OpLsh8x64_0(v) 714 case OpLsh8x8: 715 return rewriteValueARM64_OpLsh8x8_0(v) 716 case OpMod16: 717 return rewriteValueARM64_OpMod16_0(v) 718 case OpMod16u: 719 return rewriteValueARM64_OpMod16u_0(v) 720 case OpMod32: 721 return rewriteValueARM64_OpMod32_0(v) 722 case OpMod32u: 723 return rewriteValueARM64_OpMod32u_0(v) 724 case OpMod64: 725 return rewriteValueARM64_OpMod64_0(v) 726 case OpMod64u: 727 return rewriteValueARM64_OpMod64u_0(v) 728 case OpMod8: 729 return rewriteValueARM64_OpMod8_0(v) 730 case OpMod8u: 731 return rewriteValueARM64_OpMod8u_0(v) 732 case OpMove: 733 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 734 case OpMul16: 735 return rewriteValueARM64_OpMul16_0(v) 736 case OpMul32: 737 return rewriteValueARM64_OpMul32_0(v) 738 case OpMul32F: 739 return rewriteValueARM64_OpMul32F_0(v) 740 case OpMul64: 741 return rewriteValueARM64_OpMul64_0(v) 742 case OpMul64F: 743 return rewriteValueARM64_OpMul64F_0(v) 744 case OpMul64uhilo: 745 return rewriteValueARM64_OpMul64uhilo_0(v) 746 case OpMul8: 747 return rewriteValueARM64_OpMul8_0(v) 748 case OpNeg16: 749 return rewriteValueARM64_OpNeg16_0(v) 750 case OpNeg32: 751 return rewriteValueARM64_OpNeg32_0(v) 752 case OpNeg32F: 753 return rewriteValueARM64_OpNeg32F_0(v) 754 case OpNeg64: 755 return rewriteValueARM64_OpNeg64_0(v) 756 case OpNeg64F: 757 return rewriteValueARM64_OpNeg64F_0(v) 758 case OpNeg8: 759 return rewriteValueARM64_OpNeg8_0(v) 760 case OpNeq16: 761 return rewriteValueARM64_OpNeq16_0(v) 762 case OpNeq32: 763 return rewriteValueARM64_OpNeq32_0(v) 764 case OpNeq32F: 765 return rewriteValueARM64_OpNeq32F_0(v) 766 case OpNeq64: 767 return rewriteValueARM64_OpNeq64_0(v) 768 case OpNeq64F: 769 return rewriteValueARM64_OpNeq64F_0(v) 770 case OpNeq8: 771 return rewriteValueARM64_OpNeq8_0(v) 772 case OpNeqB: 773 return rewriteValueARM64_OpNeqB_0(v) 774 case OpNeqPtr: 775 return rewriteValueARM64_OpNeqPtr_0(v) 776 case OpNilCheck: 777 return rewriteValueARM64_OpNilCheck_0(v) 778 case OpNot: 779 return rewriteValueARM64_OpNot_0(v) 780 case OpOffPtr: 781 return rewriteValueARM64_OpOffPtr_0(v) 782 case OpOr16: 783 return rewriteValueARM64_OpOr16_0(v) 784 case OpOr32: 785 return rewriteValueARM64_OpOr32_0(v) 786 case OpOr64: 787 return rewriteValueARM64_OpOr64_0(v) 788 case OpOr8: 789 return rewriteValueARM64_OpOr8_0(v) 790 case OpOrB: 791 return rewriteValueARM64_OpOrB_0(v) 792 case OpPanicBounds: 793 return rewriteValueARM64_OpPanicBounds_0(v) 794 case OpPopCount16: 795 return rewriteValueARM64_OpPopCount16_0(v) 796 case OpPopCount32: 797 return rewriteValueARM64_OpPopCount32_0(v) 798 case OpPopCount64: 799 return rewriteValueARM64_OpPopCount64_0(v) 800 case OpRotateLeft16: 801 return rewriteValueARM64_OpRotateLeft16_0(v) 802 case OpRotateLeft32: 803 return rewriteValueARM64_OpRotateLeft32_0(v) 804 case OpRotateLeft64: 805 return rewriteValueARM64_OpRotateLeft64_0(v) 806 case OpRotateLeft8: 807 return rewriteValueARM64_OpRotateLeft8_0(v) 808 case OpRound: 809 return rewriteValueARM64_OpRound_0(v) 810 case OpRound32F: 811 return rewriteValueARM64_OpRound32F_0(v) 812 case OpRound64F: 813 return rewriteValueARM64_OpRound64F_0(v) 814 case OpRoundToEven: 815 return rewriteValueARM64_OpRoundToEven_0(v) 816 case OpRsh16Ux16: 817 return rewriteValueARM64_OpRsh16Ux16_0(v) 818 case OpRsh16Ux32: 819 return rewriteValueARM64_OpRsh16Ux32_0(v) 820 case OpRsh16Ux64: 821 return rewriteValueARM64_OpRsh16Ux64_0(v) 822 case OpRsh16Ux8: 823 return rewriteValueARM64_OpRsh16Ux8_0(v) 824 case OpRsh16x16: 825 return rewriteValueARM64_OpRsh16x16_0(v) 826 case OpRsh16x32: 827 return rewriteValueARM64_OpRsh16x32_0(v) 828 case OpRsh16x64: 829 return rewriteValueARM64_OpRsh16x64_0(v) 830 case OpRsh16x8: 831 return rewriteValueARM64_OpRsh16x8_0(v) 832 case OpRsh32Ux16: 833 return rewriteValueARM64_OpRsh32Ux16_0(v) 834 case OpRsh32Ux32: 835 return rewriteValueARM64_OpRsh32Ux32_0(v) 836 case OpRsh32Ux64: 837 return rewriteValueARM64_OpRsh32Ux64_0(v) 838 case OpRsh32Ux8: 839 return rewriteValueARM64_OpRsh32Ux8_0(v) 840 case OpRsh32x16: 841 return rewriteValueARM64_OpRsh32x16_0(v) 842 case OpRsh32x32: 843 return rewriteValueARM64_OpRsh32x32_0(v) 844 case OpRsh32x64: 845 return rewriteValueARM64_OpRsh32x64_0(v) 846 case OpRsh32x8: 847 return rewriteValueARM64_OpRsh32x8_0(v) 848 case OpRsh64Ux16: 849 return rewriteValueARM64_OpRsh64Ux16_0(v) 850 case OpRsh64Ux32: 851 return rewriteValueARM64_OpRsh64Ux32_0(v) 852 case OpRsh64Ux64: 853 return rewriteValueARM64_OpRsh64Ux64_0(v) 854 case OpRsh64Ux8: 855 return rewriteValueARM64_OpRsh64Ux8_0(v) 856 case OpRsh64x16: 857 return rewriteValueARM64_OpRsh64x16_0(v) 858 case OpRsh64x32: 859 return rewriteValueARM64_OpRsh64x32_0(v) 860 case OpRsh64x64: 861 return rewriteValueARM64_OpRsh64x64_0(v) 862 case OpRsh64x8: 863 return rewriteValueARM64_OpRsh64x8_0(v) 864 case OpRsh8Ux16: 865 return rewriteValueARM64_OpRsh8Ux16_0(v) 866 case OpRsh8Ux32: 867 return rewriteValueARM64_OpRsh8Ux32_0(v) 868 case OpRsh8Ux64: 869 return rewriteValueARM64_OpRsh8Ux64_0(v) 870 case OpRsh8Ux8: 871 return rewriteValueARM64_OpRsh8Ux8_0(v) 872 case OpRsh8x16: 873 return rewriteValueARM64_OpRsh8x16_0(v) 874 case OpRsh8x32: 875 return rewriteValueARM64_OpRsh8x32_0(v) 876 case OpRsh8x64: 877 return rewriteValueARM64_OpRsh8x64_0(v) 878 case OpRsh8x8: 879 return rewriteValueARM64_OpRsh8x8_0(v) 880 case OpSelect0: 881 return rewriteValueARM64_OpSelect0_0(v) 882 case OpSelect1: 883 return rewriteValueARM64_OpSelect1_0(v) 884 case OpSignExt16to32: 885 return rewriteValueARM64_OpSignExt16to32_0(v) 886 case OpSignExt16to64: 887 return rewriteValueARM64_OpSignExt16to64_0(v) 888 case OpSignExt32to64: 889 return rewriteValueARM64_OpSignExt32to64_0(v) 890 case OpSignExt8to16: 891 return rewriteValueARM64_OpSignExt8to16_0(v) 892 case OpSignExt8to32: 893 return rewriteValueARM64_OpSignExt8to32_0(v) 894 case OpSignExt8to64: 895 return rewriteValueARM64_OpSignExt8to64_0(v) 896 case OpSlicemask: 897 return rewriteValueARM64_OpSlicemask_0(v) 898 case OpSqrt: 899 return rewriteValueARM64_OpSqrt_0(v) 900 case OpStaticCall: 901 return rewriteValueARM64_OpStaticCall_0(v) 902 case OpStore: 903 return rewriteValueARM64_OpStore_0(v) 904 case OpSub16: 905 return rewriteValueARM64_OpSub16_0(v) 906 case OpSub32: 907 return rewriteValueARM64_OpSub32_0(v) 908 case OpSub32F: 909 return rewriteValueARM64_OpSub32F_0(v) 910 case OpSub64: 911 return rewriteValueARM64_OpSub64_0(v) 912 case OpSub64F: 913 return rewriteValueARM64_OpSub64F_0(v) 914 case OpSub8: 915 return rewriteValueARM64_OpSub8_0(v) 916 case OpSubPtr: 917 return rewriteValueARM64_OpSubPtr_0(v) 918 case OpTrunc: 919 return rewriteValueARM64_OpTrunc_0(v) 920 case OpTrunc16to8: 921 return rewriteValueARM64_OpTrunc16to8_0(v) 922 case OpTrunc32to16: 923 return rewriteValueARM64_OpTrunc32to16_0(v) 924 case OpTrunc32to8: 925 return rewriteValueARM64_OpTrunc32to8_0(v) 926 case OpTrunc64to16: 927 return rewriteValueARM64_OpTrunc64to16_0(v) 928 case OpTrunc64to32: 929 return rewriteValueARM64_OpTrunc64to32_0(v) 930 case OpTrunc64to8: 931 return rewriteValueARM64_OpTrunc64to8_0(v) 932 case OpWB: 933 return rewriteValueARM64_OpWB_0(v) 934 case OpXor16: 935 return rewriteValueARM64_OpXor16_0(v) 936 case OpXor32: 937 return rewriteValueARM64_OpXor32_0(v) 938 case OpXor64: 939 return rewriteValueARM64_OpXor64_0(v) 940 case OpXor8: 941 return rewriteValueARM64_OpXor8_0(v) 942 case OpZero: 943 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v) 944 case OpZeroExt16to32: 945 return rewriteValueARM64_OpZeroExt16to32_0(v) 946 case OpZeroExt16to64: 947 return rewriteValueARM64_OpZeroExt16to64_0(v) 948 case OpZeroExt32to64: 949 return rewriteValueARM64_OpZeroExt32to64_0(v) 950 case OpZeroExt8to16: 951 return rewriteValueARM64_OpZeroExt8to16_0(v) 952 case OpZeroExt8to32: 953 return rewriteValueARM64_OpZeroExt8to32_0(v) 954 case OpZeroExt8to64: 955 return rewriteValueARM64_OpZeroExt8to64_0(v) 956 } 957 return false 958 } 959 func rewriteValueARM64_OpARM64ADCSflags_0(v *Value) bool { 960 b := v.Block 961 typ := &b.Func.Config.Types 962 // match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (ADCzerocarry <typ.UInt64> c)))) 963 // result: (ADCSflags x y c) 964 for { 965 _ = v.Args[2] 966 x := v.Args[0] 967 y := v.Args[1] 968 v_2 := v.Args[2] 969 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 970 break 971 } 972 v_2_0 := v_2.Args[0] 973 if v_2_0.Op != OpARM64ADDSconstflags || v_2_0.AuxInt != -1 { 974 break 975 } 976 v_2_0_0 := v_2_0.Args[0] 977 if v_2_0_0.Op != OpARM64ADCzerocarry || v_2_0_0.Type != typ.UInt64 { 978 break 979 } 980 c := v_2_0_0.Args[0] 981 v.reset(OpARM64ADCSflags) 982 v.AddArg(x) 983 v.AddArg(y) 984 v.AddArg(c) 985 return true 986 } 987 // match: (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] (MOVDconst [0])))) 988 // result: (ADDSflags x y) 989 for { 990 _ = v.Args[2] 991 x := v.Args[0] 992 y := v.Args[1] 993 v_2 := v.Args[2] 994 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 995 break 996 } 997 v_2_0 := v_2.Args[0] 998 if v_2_0.Op != OpARM64ADDSconstflags || v_2_0.AuxInt != -1 { 999 break 1000 } 1001 v_2_0_0 := v_2_0.Args[0] 1002 if v_2_0_0.Op != OpARM64MOVDconst || v_2_0_0.AuxInt != 0 { 1003 break 1004 } 1005 v.reset(OpARM64ADDSflags) 1006 v.AddArg(x) 1007 v.AddArg(y) 1008 return true 1009 } 1010 return false 1011 } 1012 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 1013 // match: (ADD x (MOVDconst [c])) 1014 // result: (ADDconst [c] x) 1015 for { 1016 _ = v.Args[1] 1017 x := v.Args[0] 1018 v_1 := v.Args[1] 1019 if v_1.Op != OpARM64MOVDconst { 1020 break 1021 } 1022 c := v_1.AuxInt 1023 v.reset(OpARM64ADDconst) 1024 v.AuxInt = c 1025 v.AddArg(x) 1026 return true 1027 } 1028 // match: (ADD (MOVDconst [c]) x) 1029 // result: (ADDconst [c] x) 1030 for { 1031 x := v.Args[1] 1032 v_0 := v.Args[0] 1033 if v_0.Op != OpARM64MOVDconst { 1034 break 1035 } 1036 c := v_0.AuxInt 1037 v.reset(OpARM64ADDconst) 1038 v.AuxInt = c 1039 v.AddArg(x) 1040 return true 1041 } 1042 // match: (ADD a l:(MUL x y)) 1043 // cond: l.Uses==1 && clobber(l) 1044 // result: (MADD a x y) 1045 for { 1046 _ = v.Args[1] 1047 a := v.Args[0] 1048 l := v.Args[1] 1049 if l.Op != OpARM64MUL { 1050 break 1051 } 1052 y := l.Args[1] 1053 x := l.Args[0] 1054 if !(l.Uses == 1 && clobber(l)) { 1055 break 1056 } 1057 v.reset(OpARM64MADD) 1058 v.AddArg(a) 1059 v.AddArg(x) 1060 v.AddArg(y) 1061 return true 1062 } 1063 // match: (ADD l:(MUL x y) a) 1064 // cond: l.Uses==1 && clobber(l) 1065 // result: (MADD a x y) 1066 for { 1067 a := v.Args[1] 1068 l := v.Args[0] 1069 if l.Op != OpARM64MUL { 1070 break 1071 } 1072 y := l.Args[1] 1073 x := l.Args[0] 1074 if !(l.Uses == 1 && clobber(l)) { 1075 break 1076 } 1077 v.reset(OpARM64MADD) 1078 v.AddArg(a) 1079 v.AddArg(x) 1080 v.AddArg(y) 1081 return true 1082 } 1083 // match: (ADD a l:(MNEG x y)) 1084 // cond: l.Uses==1 && clobber(l) 1085 // result: (MSUB a x y) 1086 for { 1087 _ = v.Args[1] 1088 a := v.Args[0] 1089 l := v.Args[1] 1090 if l.Op != OpARM64MNEG { 1091 break 1092 } 1093 y := l.Args[1] 1094 x := l.Args[0] 1095 if !(l.Uses == 1 && clobber(l)) { 1096 break 1097 } 1098 v.reset(OpARM64MSUB) 1099 v.AddArg(a) 1100 v.AddArg(x) 1101 v.AddArg(y) 1102 return true 1103 } 1104 // match: (ADD l:(MNEG x y) a) 1105 // cond: l.Uses==1 && clobber(l) 1106 // result: (MSUB a x y) 1107 for { 1108 a := v.Args[1] 1109 l := v.Args[0] 1110 if l.Op != OpARM64MNEG { 1111 break 1112 } 1113 y := l.Args[1] 1114 x := l.Args[0] 1115 if !(l.Uses == 1 && clobber(l)) { 1116 break 1117 } 1118 v.reset(OpARM64MSUB) 1119 v.AddArg(a) 1120 v.AddArg(x) 1121 v.AddArg(y) 1122 return true 1123 } 1124 // match: (ADD a l:(MULW x y)) 1125 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1126 // result: (MADDW a x y) 1127 for { 1128 _ = v.Args[1] 1129 a := v.Args[0] 1130 l := v.Args[1] 1131 if l.Op != OpARM64MULW { 1132 break 1133 } 1134 y := l.Args[1] 1135 x := l.Args[0] 1136 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1137 break 1138 } 1139 v.reset(OpARM64MADDW) 1140 v.AddArg(a) 1141 v.AddArg(x) 1142 v.AddArg(y) 1143 return true 1144 } 1145 // match: (ADD l:(MULW x y) a) 1146 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1147 // result: (MADDW a x y) 1148 for { 1149 a := v.Args[1] 1150 l := v.Args[0] 1151 if l.Op != OpARM64MULW { 1152 break 1153 } 1154 y := l.Args[1] 1155 x := l.Args[0] 1156 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1157 break 1158 } 1159 v.reset(OpARM64MADDW) 1160 v.AddArg(a) 1161 v.AddArg(x) 1162 v.AddArg(y) 1163 return true 1164 } 1165 // match: (ADD a l:(MNEGW x y)) 1166 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1167 // result: (MSUBW a x y) 1168 for { 1169 _ = v.Args[1] 1170 a := v.Args[0] 1171 l := v.Args[1] 1172 if l.Op != OpARM64MNEGW { 1173 break 1174 } 1175 y := l.Args[1] 1176 x := l.Args[0] 1177 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1178 break 1179 } 1180 v.reset(OpARM64MSUBW) 1181 v.AddArg(a) 1182 v.AddArg(x) 1183 v.AddArg(y) 1184 return true 1185 } 1186 // match: (ADD l:(MNEGW x y) a) 1187 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1188 // result: (MSUBW a x y) 1189 for { 1190 a := v.Args[1] 1191 l := v.Args[0] 1192 if l.Op != OpARM64MNEGW { 1193 break 1194 } 1195 y := l.Args[1] 1196 x := l.Args[0] 1197 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1198 break 1199 } 1200 v.reset(OpARM64MSUBW) 1201 v.AddArg(a) 1202 v.AddArg(x) 1203 v.AddArg(y) 1204 return true 1205 } 1206 return false 1207 } 1208 func rewriteValueARM64_OpARM64ADD_10(v *Value) bool { 1209 b := v.Block 1210 typ := &b.Func.Config.Types 1211 // match: (ADD x (NEG y)) 1212 // result: (SUB x y) 1213 for { 1214 _ = v.Args[1] 1215 x := v.Args[0] 1216 v_1 := v.Args[1] 1217 if v_1.Op != OpARM64NEG { 1218 break 1219 } 1220 y := v_1.Args[0] 1221 v.reset(OpARM64SUB) 1222 v.AddArg(x) 1223 v.AddArg(y) 1224 return true 1225 } 1226 // match: (ADD (NEG y) x) 1227 // result: (SUB x y) 1228 for { 1229 x := v.Args[1] 1230 v_0 := v.Args[0] 1231 if v_0.Op != OpARM64NEG { 1232 break 1233 } 1234 y := v_0.Args[0] 1235 v.reset(OpARM64SUB) 1236 v.AddArg(x) 1237 v.AddArg(y) 1238 return true 1239 } 1240 // match: (ADD x0 x1:(SLLconst [c] y)) 1241 // cond: clobberIfDead(x1) 1242 // result: (ADDshiftLL x0 y [c]) 1243 for { 1244 _ = v.Args[1] 1245 x0 := v.Args[0] 1246 x1 := v.Args[1] 1247 if x1.Op != OpARM64SLLconst { 1248 break 1249 } 1250 c := x1.AuxInt 1251 y := x1.Args[0] 1252 if !(clobberIfDead(x1)) { 1253 break 1254 } 1255 v.reset(OpARM64ADDshiftLL) 1256 v.AuxInt = c 1257 v.AddArg(x0) 1258 v.AddArg(y) 1259 return true 1260 } 1261 // match: (ADD x1:(SLLconst [c] y) x0) 1262 // cond: clobberIfDead(x1) 1263 // result: (ADDshiftLL x0 y [c]) 1264 for { 1265 x0 := v.Args[1] 1266 x1 := v.Args[0] 1267 if x1.Op != OpARM64SLLconst { 1268 break 1269 } 1270 c := x1.AuxInt 1271 y := x1.Args[0] 1272 if !(clobberIfDead(x1)) { 1273 break 1274 } 1275 v.reset(OpARM64ADDshiftLL) 1276 v.AuxInt = c 1277 v.AddArg(x0) 1278 v.AddArg(y) 1279 return true 1280 } 1281 // match: (ADD x0 x1:(SRLconst [c] y)) 1282 // cond: clobberIfDead(x1) 1283 // result: (ADDshiftRL x0 y [c]) 1284 for { 1285 _ = v.Args[1] 1286 x0 := v.Args[0] 1287 x1 := v.Args[1] 1288 if x1.Op != OpARM64SRLconst { 1289 break 1290 } 1291 c := x1.AuxInt 1292 y := x1.Args[0] 1293 if !(clobberIfDead(x1)) { 1294 break 1295 } 1296 v.reset(OpARM64ADDshiftRL) 1297 v.AuxInt = c 1298 v.AddArg(x0) 1299 v.AddArg(y) 1300 return true 1301 } 1302 // match: (ADD x1:(SRLconst [c] y) x0) 1303 // cond: clobberIfDead(x1) 1304 // result: (ADDshiftRL x0 y [c]) 1305 for { 1306 x0 := v.Args[1] 1307 x1 := v.Args[0] 1308 if x1.Op != OpARM64SRLconst { 1309 break 1310 } 1311 c := x1.AuxInt 1312 y := x1.Args[0] 1313 if !(clobberIfDead(x1)) { 1314 break 1315 } 1316 v.reset(OpARM64ADDshiftRL) 1317 v.AuxInt = c 1318 v.AddArg(x0) 1319 v.AddArg(y) 1320 return true 1321 } 1322 // match: (ADD x0 x1:(SRAconst [c] y)) 1323 // cond: clobberIfDead(x1) 1324 // result: (ADDshiftRA x0 y [c]) 1325 for { 1326 _ = v.Args[1] 1327 x0 := v.Args[0] 1328 x1 := v.Args[1] 1329 if x1.Op != OpARM64SRAconst { 1330 break 1331 } 1332 c := x1.AuxInt 1333 y := x1.Args[0] 1334 if !(clobberIfDead(x1)) { 1335 break 1336 } 1337 v.reset(OpARM64ADDshiftRA) 1338 v.AuxInt = c 1339 v.AddArg(x0) 1340 v.AddArg(y) 1341 return true 1342 } 1343 // match: (ADD x1:(SRAconst [c] y) x0) 1344 // cond: clobberIfDead(x1) 1345 // result: (ADDshiftRA x0 y [c]) 1346 for { 1347 x0 := v.Args[1] 1348 x1 := v.Args[0] 1349 if x1.Op != OpARM64SRAconst { 1350 break 1351 } 1352 c := x1.AuxInt 1353 y := x1.Args[0] 1354 if !(clobberIfDead(x1)) { 1355 break 1356 } 1357 v.reset(OpARM64ADDshiftRA) 1358 v.AuxInt = c 1359 v.AddArg(x0) 1360 v.AddArg(y) 1361 return true 1362 } 1363 // match: (ADD (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 1364 // cond: cc.(Op) == OpARM64LessThanU 1365 // result: (ROR x (NEG <t> y)) 1366 for { 1367 _ = v.Args[1] 1368 v_0 := v.Args[0] 1369 if v_0.Op != OpARM64SLL { 1370 break 1371 } 1372 _ = v_0.Args[1] 1373 x := v_0.Args[0] 1374 v_0_1 := v_0.Args[1] 1375 if v_0_1.Op != OpARM64ANDconst { 1376 break 1377 } 1378 t := v_0_1.Type 1379 if v_0_1.AuxInt != 63 { 1380 break 1381 } 1382 y := v_0_1.Args[0] 1383 v_1 := v.Args[1] 1384 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 { 1385 break 1386 } 1387 cc := v_1.Aux 1388 _ = v_1.Args[1] 1389 v_1_0 := v_1.Args[0] 1390 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 { 1391 break 1392 } 1393 _ = v_1_0.Args[1] 1394 if x != v_1_0.Args[0] { 1395 break 1396 } 1397 v_1_0_1 := v_1_0.Args[1] 1398 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 1399 break 1400 } 1401 _ = v_1_0_1.Args[1] 1402 v_1_0_1_0 := v_1_0_1.Args[0] 1403 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 { 1404 break 1405 } 1406 v_1_0_1_1 := v_1_0_1.Args[1] 1407 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] { 1408 break 1409 } 1410 v_1_1 := v_1.Args[1] 1411 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 1412 break 1413 } 1414 v_1_1_0 := v_1_1.Args[0] 1415 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 1416 break 1417 } 1418 _ = v_1_1_0.Args[1] 1419 v_1_1_0_0 := v_1_1_0.Args[0] 1420 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 { 1421 break 1422 } 1423 v_1_1_0_1 := v_1_1_0.Args[1] 1424 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1425 break 1426 } 1427 v.reset(OpARM64ROR) 1428 v.AddArg(x) 1429 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1430 v0.AddArg(y) 1431 v.AddArg(v0) 1432 return true 1433 } 1434 // match: (ADD (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 1435 // cond: cc.(Op) == OpARM64LessThanU 1436 // result: (ROR x (NEG <t> y)) 1437 for { 1438 _ = v.Args[1] 1439 v_0 := v.Args[0] 1440 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt64 { 1441 break 1442 } 1443 cc := v_0.Aux 1444 _ = v_0.Args[1] 1445 v_0_0 := v_0.Args[0] 1446 if v_0_0.Op != OpARM64SRL || v_0_0.Type != typ.UInt64 { 1447 break 1448 } 1449 _ = v_0_0.Args[1] 1450 x := v_0_0.Args[0] 1451 v_0_0_1 := v_0_0.Args[1] 1452 if v_0_0_1.Op != OpARM64SUB { 1453 break 1454 } 1455 t := v_0_0_1.Type 1456 _ = v_0_0_1.Args[1] 1457 v_0_0_1_0 := v_0_0_1.Args[0] 1458 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 64 { 1459 break 1460 } 1461 v_0_0_1_1 := v_0_0_1.Args[1] 1462 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 63 { 1463 break 1464 } 1465 y := v_0_0_1_1.Args[0] 1466 v_0_1 := v_0.Args[1] 1467 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 1468 break 1469 } 1470 v_0_1_0 := v_0_1.Args[0] 1471 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 1472 break 1473 } 1474 _ = v_0_1_0.Args[1] 1475 v_0_1_0_0 := v_0_1_0.Args[0] 1476 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 64 { 1477 break 1478 } 1479 v_0_1_0_1 := v_0_1_0.Args[1] 1480 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 63 || y != v_0_1_0_1.Args[0] { 1481 break 1482 } 1483 v_1 := v.Args[1] 1484 if v_1.Op != OpARM64SLL { 1485 break 1486 } 1487 _ = v_1.Args[1] 1488 if x != v_1.Args[0] { 1489 break 1490 } 1491 v_1_1 := v_1.Args[1] 1492 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 63 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1493 break 1494 } 1495 v.reset(OpARM64ROR) 1496 v.AddArg(x) 1497 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1498 v0.AddArg(y) 1499 v.AddArg(v0) 1500 return true 1501 } 1502 return false 1503 } 1504 func rewriteValueARM64_OpARM64ADD_20(v *Value) bool { 1505 b := v.Block 1506 typ := &b.Func.Config.Types 1507 // match: (ADD (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 1508 // cond: cc.(Op) == OpARM64LessThanU 1509 // result: (ROR x y) 1510 for { 1511 _ = v.Args[1] 1512 v_0 := v.Args[0] 1513 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 { 1514 break 1515 } 1516 _ = v_0.Args[1] 1517 x := v_0.Args[0] 1518 v_0_1 := v_0.Args[1] 1519 if v_0_1.Op != OpARM64ANDconst { 1520 break 1521 } 1522 t := v_0_1.Type 1523 if v_0_1.AuxInt != 63 { 1524 break 1525 } 1526 y := v_0_1.Args[0] 1527 v_1 := v.Args[1] 1528 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 { 1529 break 1530 } 1531 cc := v_1.Aux 1532 _ = v_1.Args[1] 1533 v_1_0 := v_1.Args[0] 1534 if v_1_0.Op != OpARM64SLL { 1535 break 1536 } 1537 _ = v_1_0.Args[1] 1538 if x != v_1_0.Args[0] { 1539 break 1540 } 1541 v_1_0_1 := v_1_0.Args[1] 1542 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 1543 break 1544 } 1545 _ = v_1_0_1.Args[1] 1546 v_1_0_1_0 := v_1_0_1.Args[0] 1547 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 { 1548 break 1549 } 1550 v_1_0_1_1 := v_1_0_1.Args[1] 1551 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] { 1552 break 1553 } 1554 v_1_1 := v_1.Args[1] 1555 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 1556 break 1557 } 1558 v_1_1_0 := v_1_1.Args[0] 1559 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 1560 break 1561 } 1562 _ = v_1_1_0.Args[1] 1563 v_1_1_0_0 := v_1_1_0.Args[0] 1564 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 { 1565 break 1566 } 1567 v_1_1_0_1 := v_1_1_0.Args[1] 1568 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1569 break 1570 } 1571 v.reset(OpARM64ROR) 1572 v.AddArg(x) 1573 v.AddArg(y) 1574 return true 1575 } 1576 // match: (ADD (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 1577 // cond: cc.(Op) == OpARM64LessThanU 1578 // result: (ROR x y) 1579 for { 1580 _ = v.Args[1] 1581 v_0 := v.Args[0] 1582 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt64 { 1583 break 1584 } 1585 cc := v_0.Aux 1586 _ = v_0.Args[1] 1587 v_0_0 := v_0.Args[0] 1588 if v_0_0.Op != OpARM64SLL { 1589 break 1590 } 1591 _ = v_0_0.Args[1] 1592 x := v_0_0.Args[0] 1593 v_0_0_1 := v_0_0.Args[1] 1594 if v_0_0_1.Op != OpARM64SUB { 1595 break 1596 } 1597 t := v_0_0_1.Type 1598 _ = v_0_0_1.Args[1] 1599 v_0_0_1_0 := v_0_0_1.Args[0] 1600 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 64 { 1601 break 1602 } 1603 v_0_0_1_1 := v_0_0_1.Args[1] 1604 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 63 { 1605 break 1606 } 1607 y := v_0_0_1_1.Args[0] 1608 v_0_1 := v_0.Args[1] 1609 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 1610 break 1611 } 1612 v_0_1_0 := v_0_1.Args[0] 1613 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 1614 break 1615 } 1616 _ = v_0_1_0.Args[1] 1617 v_0_1_0_0 := v_0_1_0.Args[0] 1618 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 64 { 1619 break 1620 } 1621 v_0_1_0_1 := v_0_1_0.Args[1] 1622 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 63 || y != v_0_1_0_1.Args[0] { 1623 break 1624 } 1625 v_1 := v.Args[1] 1626 if v_1.Op != OpARM64SRL || v_1.Type != typ.UInt64 { 1627 break 1628 } 1629 _ = v_1.Args[1] 1630 if x != v_1.Args[0] { 1631 break 1632 } 1633 v_1_1 := v_1.Args[1] 1634 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 63 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1635 break 1636 } 1637 v.reset(OpARM64ROR) 1638 v.AddArg(x) 1639 v.AddArg(y) 1640 return true 1641 } 1642 // match: (ADD (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 1643 // cond: cc.(Op) == OpARM64LessThanU 1644 // result: (RORW x (NEG <t> y)) 1645 for { 1646 _ = v.Args[1] 1647 v_0 := v.Args[0] 1648 if v_0.Op != OpARM64SLL { 1649 break 1650 } 1651 _ = v_0.Args[1] 1652 x := v_0.Args[0] 1653 v_0_1 := v_0.Args[1] 1654 if v_0_1.Op != OpARM64ANDconst { 1655 break 1656 } 1657 t := v_0_1.Type 1658 if v_0_1.AuxInt != 31 { 1659 break 1660 } 1661 y := v_0_1.Args[0] 1662 v_1 := v.Args[1] 1663 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 { 1664 break 1665 } 1666 cc := v_1.Aux 1667 _ = v_1.Args[1] 1668 v_1_0 := v_1.Args[0] 1669 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 { 1670 break 1671 } 1672 _ = v_1_0.Args[1] 1673 v_1_0_0 := v_1_0.Args[0] 1674 if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] { 1675 break 1676 } 1677 v_1_0_1 := v_1_0.Args[1] 1678 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 1679 break 1680 } 1681 _ = v_1_0_1.Args[1] 1682 v_1_0_1_0 := v_1_0_1.Args[0] 1683 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 { 1684 break 1685 } 1686 v_1_0_1_1 := v_1_0_1.Args[1] 1687 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] { 1688 break 1689 } 1690 v_1_1 := v_1.Args[1] 1691 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 1692 break 1693 } 1694 v_1_1_0 := v_1_1.Args[0] 1695 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 1696 break 1697 } 1698 _ = v_1_1_0.Args[1] 1699 v_1_1_0_0 := v_1_1_0.Args[0] 1700 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 { 1701 break 1702 } 1703 v_1_1_0_1 := v_1_1_0.Args[1] 1704 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1705 break 1706 } 1707 v.reset(OpARM64RORW) 1708 v.AddArg(x) 1709 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1710 v0.AddArg(y) 1711 v.AddArg(v0) 1712 return true 1713 } 1714 // match: (ADD (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 1715 // cond: cc.(Op) == OpARM64LessThanU 1716 // result: (RORW x (NEG <t> y)) 1717 for { 1718 _ = v.Args[1] 1719 v_0 := v.Args[0] 1720 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt32 { 1721 break 1722 } 1723 cc := v_0.Aux 1724 _ = v_0.Args[1] 1725 v_0_0 := v_0.Args[0] 1726 if v_0_0.Op != OpARM64SRL || v_0_0.Type != typ.UInt32 { 1727 break 1728 } 1729 _ = v_0_0.Args[1] 1730 v_0_0_0 := v_0_0.Args[0] 1731 if v_0_0_0.Op != OpARM64MOVWUreg { 1732 break 1733 } 1734 x := v_0_0_0.Args[0] 1735 v_0_0_1 := v_0_0.Args[1] 1736 if v_0_0_1.Op != OpARM64SUB { 1737 break 1738 } 1739 t := v_0_0_1.Type 1740 _ = v_0_0_1.Args[1] 1741 v_0_0_1_0 := v_0_0_1.Args[0] 1742 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 32 { 1743 break 1744 } 1745 v_0_0_1_1 := v_0_0_1.Args[1] 1746 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 31 { 1747 break 1748 } 1749 y := v_0_0_1_1.Args[0] 1750 v_0_1 := v_0.Args[1] 1751 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 1752 break 1753 } 1754 v_0_1_0 := v_0_1.Args[0] 1755 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 1756 break 1757 } 1758 _ = v_0_1_0.Args[1] 1759 v_0_1_0_0 := v_0_1_0.Args[0] 1760 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 32 { 1761 break 1762 } 1763 v_0_1_0_1 := v_0_1_0.Args[1] 1764 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 31 || y != v_0_1_0_1.Args[0] { 1765 break 1766 } 1767 v_1 := v.Args[1] 1768 if v_1.Op != OpARM64SLL { 1769 break 1770 } 1771 _ = v_1.Args[1] 1772 if x != v_1.Args[0] { 1773 break 1774 } 1775 v_1_1 := v_1.Args[1] 1776 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 31 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1777 break 1778 } 1779 v.reset(OpARM64RORW) 1780 v.AddArg(x) 1781 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1782 v0.AddArg(y) 1783 v.AddArg(v0) 1784 return true 1785 } 1786 // match: (ADD (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 1787 // cond: cc.(Op) == OpARM64LessThanU 1788 // result: (RORW x y) 1789 for { 1790 _ = v.Args[1] 1791 v_0 := v.Args[0] 1792 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 { 1793 break 1794 } 1795 _ = v_0.Args[1] 1796 v_0_0 := v_0.Args[0] 1797 if v_0_0.Op != OpARM64MOVWUreg { 1798 break 1799 } 1800 x := v_0_0.Args[0] 1801 v_0_1 := v_0.Args[1] 1802 if v_0_1.Op != OpARM64ANDconst { 1803 break 1804 } 1805 t := v_0_1.Type 1806 if v_0_1.AuxInt != 31 { 1807 break 1808 } 1809 y := v_0_1.Args[0] 1810 v_1 := v.Args[1] 1811 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 { 1812 break 1813 } 1814 cc := v_1.Aux 1815 _ = v_1.Args[1] 1816 v_1_0 := v_1.Args[0] 1817 if v_1_0.Op != OpARM64SLL { 1818 break 1819 } 1820 _ = v_1_0.Args[1] 1821 if x != v_1_0.Args[0] { 1822 break 1823 } 1824 v_1_0_1 := v_1_0.Args[1] 1825 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 1826 break 1827 } 1828 _ = v_1_0_1.Args[1] 1829 v_1_0_1_0 := v_1_0_1.Args[0] 1830 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 { 1831 break 1832 } 1833 v_1_0_1_1 := v_1_0_1.Args[1] 1834 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] { 1835 break 1836 } 1837 v_1_1 := v_1.Args[1] 1838 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 1839 break 1840 } 1841 v_1_1_0 := v_1_1.Args[0] 1842 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 1843 break 1844 } 1845 _ = v_1_1_0.Args[1] 1846 v_1_1_0_0 := v_1_1_0.Args[0] 1847 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 { 1848 break 1849 } 1850 v_1_1_0_1 := v_1_1_0.Args[1] 1851 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1852 break 1853 } 1854 v.reset(OpARM64RORW) 1855 v.AddArg(x) 1856 v.AddArg(y) 1857 return true 1858 } 1859 // match: (ADD (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 1860 // cond: cc.(Op) == OpARM64LessThanU 1861 // result: (RORW x y) 1862 for { 1863 _ = v.Args[1] 1864 v_0 := v.Args[0] 1865 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt32 { 1866 break 1867 } 1868 cc := v_0.Aux 1869 _ = v_0.Args[1] 1870 v_0_0 := v_0.Args[0] 1871 if v_0_0.Op != OpARM64SLL { 1872 break 1873 } 1874 _ = v_0_0.Args[1] 1875 x := v_0_0.Args[0] 1876 v_0_0_1 := v_0_0.Args[1] 1877 if v_0_0_1.Op != OpARM64SUB { 1878 break 1879 } 1880 t := v_0_0_1.Type 1881 _ = v_0_0_1.Args[1] 1882 v_0_0_1_0 := v_0_0_1.Args[0] 1883 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 32 { 1884 break 1885 } 1886 v_0_0_1_1 := v_0_0_1.Args[1] 1887 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 31 { 1888 break 1889 } 1890 y := v_0_0_1_1.Args[0] 1891 v_0_1 := v_0.Args[1] 1892 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 1893 break 1894 } 1895 v_0_1_0 := v_0_1.Args[0] 1896 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 1897 break 1898 } 1899 _ = v_0_1_0.Args[1] 1900 v_0_1_0_0 := v_0_1_0.Args[0] 1901 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 32 { 1902 break 1903 } 1904 v_0_1_0_1 := v_0_1_0.Args[1] 1905 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 31 || y != v_0_1_0_1.Args[0] { 1906 break 1907 } 1908 v_1 := v.Args[1] 1909 if v_1.Op != OpARM64SRL || v_1.Type != typ.UInt32 { 1910 break 1911 } 1912 _ = v_1.Args[1] 1913 v_1_0 := v_1.Args[0] 1914 if v_1_0.Op != OpARM64MOVWUreg || x != v_1_0.Args[0] { 1915 break 1916 } 1917 v_1_1 := v_1.Args[1] 1918 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 31 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 1919 break 1920 } 1921 v.reset(OpARM64RORW) 1922 v.AddArg(x) 1923 v.AddArg(y) 1924 return true 1925 } 1926 return false 1927 } 1928 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 1929 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 1930 // result: (MOVDaddr [off1+off2] {sym} ptr) 1931 for { 1932 off1 := v.AuxInt 1933 v_0 := v.Args[0] 1934 if v_0.Op != OpARM64MOVDaddr { 1935 break 1936 } 1937 off2 := v_0.AuxInt 1938 sym := v_0.Aux 1939 ptr := v_0.Args[0] 1940 v.reset(OpARM64MOVDaddr) 1941 v.AuxInt = off1 + off2 1942 v.Aux = sym 1943 v.AddArg(ptr) 1944 return true 1945 } 1946 // match: (ADDconst [0] x) 1947 // result: x 1948 for { 1949 if v.AuxInt != 0 { 1950 break 1951 } 1952 x := v.Args[0] 1953 v.reset(OpCopy) 1954 v.Type = x.Type 1955 v.AddArg(x) 1956 return true 1957 } 1958 // match: (ADDconst [c] (MOVDconst [d])) 1959 // result: (MOVDconst [c+d]) 1960 for { 1961 c := v.AuxInt 1962 v_0 := v.Args[0] 1963 if v_0.Op != OpARM64MOVDconst { 1964 break 1965 } 1966 d := v_0.AuxInt 1967 v.reset(OpARM64MOVDconst) 1968 v.AuxInt = c + d 1969 return true 1970 } 1971 // match: (ADDconst [c] (ADDconst [d] x)) 1972 // result: (ADDconst [c+d] x) 1973 for { 1974 c := v.AuxInt 1975 v_0 := v.Args[0] 1976 if v_0.Op != OpARM64ADDconst { 1977 break 1978 } 1979 d := v_0.AuxInt 1980 x := v_0.Args[0] 1981 v.reset(OpARM64ADDconst) 1982 v.AuxInt = c + d 1983 v.AddArg(x) 1984 return true 1985 } 1986 // match: (ADDconst [c] (SUBconst [d] x)) 1987 // result: (ADDconst [c-d] x) 1988 for { 1989 c := v.AuxInt 1990 v_0 := v.Args[0] 1991 if v_0.Op != OpARM64SUBconst { 1992 break 1993 } 1994 d := v_0.AuxInt 1995 x := v_0.Args[0] 1996 v.reset(OpARM64ADDconst) 1997 v.AuxInt = c - d 1998 v.AddArg(x) 1999 return true 2000 } 2001 return false 2002 } 2003 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 2004 b := v.Block 2005 typ := &b.Func.Config.Types 2006 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 2007 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 2008 for { 2009 d := v.AuxInt 2010 x := v.Args[1] 2011 v_0 := v.Args[0] 2012 if v_0.Op != OpARM64MOVDconst { 2013 break 2014 } 2015 c := v_0.AuxInt 2016 v.reset(OpARM64ADDconst) 2017 v.AuxInt = c 2018 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2019 v0.AuxInt = d 2020 v0.AddArg(x) 2021 v.AddArg(v0) 2022 return true 2023 } 2024 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 2025 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 2026 for { 2027 d := v.AuxInt 2028 _ = v.Args[1] 2029 x := v.Args[0] 2030 v_1 := v.Args[1] 2031 if v_1.Op != OpARM64MOVDconst { 2032 break 2033 } 2034 c := v_1.AuxInt 2035 v.reset(OpARM64ADDconst) 2036 v.AuxInt = int64(uint64(c) << uint64(d)) 2037 v.AddArg(x) 2038 return true 2039 } 2040 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 2041 // result: (RORconst [64-c] x) 2042 for { 2043 c := v.AuxInt 2044 x := v.Args[1] 2045 v_0 := v.Args[0] 2046 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c || x != v_0.Args[0] { 2047 break 2048 } 2049 v.reset(OpARM64RORconst) 2050 v.AuxInt = 64 - c 2051 v.AddArg(x) 2052 return true 2053 } 2054 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x) 2055 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 2056 // result: (RORWconst [32-c] x) 2057 for { 2058 t := v.Type 2059 c := v.AuxInt 2060 x := v.Args[1] 2061 v_0 := v.Args[0] 2062 if v_0.Op != OpARM64UBFX { 2063 break 2064 } 2065 bfc := v_0.AuxInt 2066 if x != v_0.Args[0] || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 2067 break 2068 } 2069 v.reset(OpARM64RORWconst) 2070 v.AuxInt = 32 - c 2071 v.AddArg(x) 2072 return true 2073 } 2074 // match: (ADDshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x) 2075 // result: (REV16W x) 2076 for { 2077 if v.Type != typ.UInt16 || v.AuxInt != 8 { 2078 break 2079 } 2080 x := v.Args[1] 2081 v_0 := v.Args[0] 2082 if v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || v_0.AuxInt != armBFAuxInt(8, 8) || x != v_0.Args[0] { 2083 break 2084 } 2085 v.reset(OpARM64REV16W) 2086 v.AddArg(x) 2087 return true 2088 } 2089 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x2) 2090 // result: (EXTRconst [64-c] x2 x) 2091 for { 2092 c := v.AuxInt 2093 x2 := v.Args[1] 2094 v_0 := v.Args[0] 2095 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c { 2096 break 2097 } 2098 x := v_0.Args[0] 2099 v.reset(OpARM64EXTRconst) 2100 v.AuxInt = 64 - c 2101 v.AddArg(x2) 2102 v.AddArg(x) 2103 return true 2104 } 2105 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2) 2106 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 2107 // result: (EXTRWconst [32-c] x2 x) 2108 for { 2109 t := v.Type 2110 c := v.AuxInt 2111 x2 := v.Args[1] 2112 v_0 := v.Args[0] 2113 if v_0.Op != OpARM64UBFX { 2114 break 2115 } 2116 bfc := v_0.AuxInt 2117 x := v_0.Args[0] 2118 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 2119 break 2120 } 2121 v.reset(OpARM64EXTRWconst) 2122 v.AuxInt = 32 - c 2123 v.AddArg(x2) 2124 v.AddArg(x) 2125 return true 2126 } 2127 return false 2128 } 2129 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 2130 b := v.Block 2131 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 2132 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 2133 for { 2134 d := v.AuxInt 2135 x := v.Args[1] 2136 v_0 := v.Args[0] 2137 if v_0.Op != OpARM64MOVDconst { 2138 break 2139 } 2140 c := v_0.AuxInt 2141 v.reset(OpARM64ADDconst) 2142 v.AuxInt = c 2143 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2144 v0.AuxInt = d 2145 v0.AddArg(x) 2146 v.AddArg(v0) 2147 return true 2148 } 2149 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 2150 // result: (ADDconst x [c>>uint64(d)]) 2151 for { 2152 d := v.AuxInt 2153 _ = v.Args[1] 2154 x := v.Args[0] 2155 v_1 := v.Args[1] 2156 if v_1.Op != OpARM64MOVDconst { 2157 break 2158 } 2159 c := v_1.AuxInt 2160 v.reset(OpARM64ADDconst) 2161 v.AuxInt = c >> uint64(d) 2162 v.AddArg(x) 2163 return true 2164 } 2165 return false 2166 } 2167 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 2168 b := v.Block 2169 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 2170 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 2171 for { 2172 d := v.AuxInt 2173 x := v.Args[1] 2174 v_0 := v.Args[0] 2175 if v_0.Op != OpARM64MOVDconst { 2176 break 2177 } 2178 c := v_0.AuxInt 2179 v.reset(OpARM64ADDconst) 2180 v.AuxInt = c 2181 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2182 v0.AuxInt = d 2183 v0.AddArg(x) 2184 v.AddArg(v0) 2185 return true 2186 } 2187 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 2188 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 2189 for { 2190 d := v.AuxInt 2191 _ = v.Args[1] 2192 x := v.Args[0] 2193 v_1 := v.Args[1] 2194 if v_1.Op != OpARM64MOVDconst { 2195 break 2196 } 2197 c := v_1.AuxInt 2198 v.reset(OpARM64ADDconst) 2199 v.AuxInt = int64(uint64(c) >> uint64(d)) 2200 v.AddArg(x) 2201 return true 2202 } 2203 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 2204 // result: (RORconst [ c] x) 2205 for { 2206 c := v.AuxInt 2207 x := v.Args[1] 2208 v_0 := v.Args[0] 2209 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 64-c || x != v_0.Args[0] { 2210 break 2211 } 2212 v.reset(OpARM64RORconst) 2213 v.AuxInt = c 2214 v.AddArg(x) 2215 return true 2216 } 2217 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 2218 // cond: c < 32 && t.Size() == 4 2219 // result: (RORWconst [c] x) 2220 for { 2221 t := v.Type 2222 c := v.AuxInt 2223 _ = v.Args[1] 2224 v_0 := v.Args[0] 2225 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 32-c { 2226 break 2227 } 2228 x := v_0.Args[0] 2229 v_1 := v.Args[1] 2230 if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) { 2231 break 2232 } 2233 v.reset(OpARM64RORWconst) 2234 v.AuxInt = c 2235 v.AddArg(x) 2236 return true 2237 } 2238 return false 2239 } 2240 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 2241 // match: (AND x (MOVDconst [c])) 2242 // result: (ANDconst [c] x) 2243 for { 2244 _ = v.Args[1] 2245 x := v.Args[0] 2246 v_1 := v.Args[1] 2247 if v_1.Op != OpARM64MOVDconst { 2248 break 2249 } 2250 c := v_1.AuxInt 2251 v.reset(OpARM64ANDconst) 2252 v.AuxInt = c 2253 v.AddArg(x) 2254 return true 2255 } 2256 // match: (AND (MOVDconst [c]) x) 2257 // result: (ANDconst [c] x) 2258 for { 2259 x := v.Args[1] 2260 v_0 := v.Args[0] 2261 if v_0.Op != OpARM64MOVDconst { 2262 break 2263 } 2264 c := v_0.AuxInt 2265 v.reset(OpARM64ANDconst) 2266 v.AuxInt = c 2267 v.AddArg(x) 2268 return true 2269 } 2270 // match: (AND x x) 2271 // result: x 2272 for { 2273 x := v.Args[1] 2274 if x != v.Args[0] { 2275 break 2276 } 2277 v.reset(OpCopy) 2278 v.Type = x.Type 2279 v.AddArg(x) 2280 return true 2281 } 2282 // match: (AND x (MVN y)) 2283 // result: (BIC x y) 2284 for { 2285 _ = v.Args[1] 2286 x := v.Args[0] 2287 v_1 := v.Args[1] 2288 if v_1.Op != OpARM64MVN { 2289 break 2290 } 2291 y := v_1.Args[0] 2292 v.reset(OpARM64BIC) 2293 v.AddArg(x) 2294 v.AddArg(y) 2295 return true 2296 } 2297 // match: (AND (MVN y) x) 2298 // result: (BIC x y) 2299 for { 2300 x := v.Args[1] 2301 v_0 := v.Args[0] 2302 if v_0.Op != OpARM64MVN { 2303 break 2304 } 2305 y := v_0.Args[0] 2306 v.reset(OpARM64BIC) 2307 v.AddArg(x) 2308 v.AddArg(y) 2309 return true 2310 } 2311 // match: (AND x0 x1:(SLLconst [c] y)) 2312 // cond: clobberIfDead(x1) 2313 // result: (ANDshiftLL x0 y [c]) 2314 for { 2315 _ = v.Args[1] 2316 x0 := v.Args[0] 2317 x1 := v.Args[1] 2318 if x1.Op != OpARM64SLLconst { 2319 break 2320 } 2321 c := x1.AuxInt 2322 y := x1.Args[0] 2323 if !(clobberIfDead(x1)) { 2324 break 2325 } 2326 v.reset(OpARM64ANDshiftLL) 2327 v.AuxInt = c 2328 v.AddArg(x0) 2329 v.AddArg(y) 2330 return true 2331 } 2332 // match: (AND x1:(SLLconst [c] y) x0) 2333 // cond: clobberIfDead(x1) 2334 // result: (ANDshiftLL x0 y [c]) 2335 for { 2336 x0 := v.Args[1] 2337 x1 := v.Args[0] 2338 if x1.Op != OpARM64SLLconst { 2339 break 2340 } 2341 c := x1.AuxInt 2342 y := x1.Args[0] 2343 if !(clobberIfDead(x1)) { 2344 break 2345 } 2346 v.reset(OpARM64ANDshiftLL) 2347 v.AuxInt = c 2348 v.AddArg(x0) 2349 v.AddArg(y) 2350 return true 2351 } 2352 // match: (AND x0 x1:(SRLconst [c] y)) 2353 // cond: clobberIfDead(x1) 2354 // result: (ANDshiftRL x0 y [c]) 2355 for { 2356 _ = v.Args[1] 2357 x0 := v.Args[0] 2358 x1 := v.Args[1] 2359 if x1.Op != OpARM64SRLconst { 2360 break 2361 } 2362 c := x1.AuxInt 2363 y := x1.Args[0] 2364 if !(clobberIfDead(x1)) { 2365 break 2366 } 2367 v.reset(OpARM64ANDshiftRL) 2368 v.AuxInt = c 2369 v.AddArg(x0) 2370 v.AddArg(y) 2371 return true 2372 } 2373 // match: (AND x1:(SRLconst [c] y) x0) 2374 // cond: clobberIfDead(x1) 2375 // result: (ANDshiftRL x0 y [c]) 2376 for { 2377 x0 := v.Args[1] 2378 x1 := v.Args[0] 2379 if x1.Op != OpARM64SRLconst { 2380 break 2381 } 2382 c := x1.AuxInt 2383 y := x1.Args[0] 2384 if !(clobberIfDead(x1)) { 2385 break 2386 } 2387 v.reset(OpARM64ANDshiftRL) 2388 v.AuxInt = c 2389 v.AddArg(x0) 2390 v.AddArg(y) 2391 return true 2392 } 2393 // match: (AND x0 x1:(SRAconst [c] y)) 2394 // cond: clobberIfDead(x1) 2395 // result: (ANDshiftRA x0 y [c]) 2396 for { 2397 _ = v.Args[1] 2398 x0 := v.Args[0] 2399 x1 := v.Args[1] 2400 if x1.Op != OpARM64SRAconst { 2401 break 2402 } 2403 c := x1.AuxInt 2404 y := x1.Args[0] 2405 if !(clobberIfDead(x1)) { 2406 break 2407 } 2408 v.reset(OpARM64ANDshiftRA) 2409 v.AuxInt = c 2410 v.AddArg(x0) 2411 v.AddArg(y) 2412 return true 2413 } 2414 return false 2415 } 2416 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 2417 // match: (AND x1:(SRAconst [c] y) x0) 2418 // cond: clobberIfDead(x1) 2419 // result: (ANDshiftRA x0 y [c]) 2420 for { 2421 x0 := v.Args[1] 2422 x1 := v.Args[0] 2423 if x1.Op != OpARM64SRAconst { 2424 break 2425 } 2426 c := x1.AuxInt 2427 y := x1.Args[0] 2428 if !(clobberIfDead(x1)) { 2429 break 2430 } 2431 v.reset(OpARM64ANDshiftRA) 2432 v.AuxInt = c 2433 v.AddArg(x0) 2434 v.AddArg(y) 2435 return true 2436 } 2437 return false 2438 } 2439 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 2440 // match: (ANDconst [0] _) 2441 // result: (MOVDconst [0]) 2442 for { 2443 if v.AuxInt != 0 { 2444 break 2445 } 2446 v.reset(OpARM64MOVDconst) 2447 v.AuxInt = 0 2448 return true 2449 } 2450 // match: (ANDconst [-1] x) 2451 // result: x 2452 for { 2453 if v.AuxInt != -1 { 2454 break 2455 } 2456 x := v.Args[0] 2457 v.reset(OpCopy) 2458 v.Type = x.Type 2459 v.AddArg(x) 2460 return true 2461 } 2462 // match: (ANDconst [c] (MOVDconst [d])) 2463 // result: (MOVDconst [c&d]) 2464 for { 2465 c := v.AuxInt 2466 v_0 := v.Args[0] 2467 if v_0.Op != OpARM64MOVDconst { 2468 break 2469 } 2470 d := v_0.AuxInt 2471 v.reset(OpARM64MOVDconst) 2472 v.AuxInt = c & d 2473 return true 2474 } 2475 // match: (ANDconst [c] (ANDconst [d] x)) 2476 // result: (ANDconst [c&d] x) 2477 for { 2478 c := v.AuxInt 2479 v_0 := v.Args[0] 2480 if v_0.Op != OpARM64ANDconst { 2481 break 2482 } 2483 d := v_0.AuxInt 2484 x := v_0.Args[0] 2485 v.reset(OpARM64ANDconst) 2486 v.AuxInt = c & d 2487 v.AddArg(x) 2488 return true 2489 } 2490 // match: (ANDconst [c] (MOVWUreg x)) 2491 // result: (ANDconst [c&(1<<32-1)] x) 2492 for { 2493 c := v.AuxInt 2494 v_0 := v.Args[0] 2495 if v_0.Op != OpARM64MOVWUreg { 2496 break 2497 } 2498 x := v_0.Args[0] 2499 v.reset(OpARM64ANDconst) 2500 v.AuxInt = c & (1<<32 - 1) 2501 v.AddArg(x) 2502 return true 2503 } 2504 // match: (ANDconst [c] (MOVHUreg x)) 2505 // result: (ANDconst [c&(1<<16-1)] x) 2506 for { 2507 c := v.AuxInt 2508 v_0 := v.Args[0] 2509 if v_0.Op != OpARM64MOVHUreg { 2510 break 2511 } 2512 x := v_0.Args[0] 2513 v.reset(OpARM64ANDconst) 2514 v.AuxInt = c & (1<<16 - 1) 2515 v.AddArg(x) 2516 return true 2517 } 2518 // match: (ANDconst [c] (MOVBUreg x)) 2519 // result: (ANDconst [c&(1<<8-1)] x) 2520 for { 2521 c := v.AuxInt 2522 v_0 := v.Args[0] 2523 if v_0.Op != OpARM64MOVBUreg { 2524 break 2525 } 2526 x := v_0.Args[0] 2527 v.reset(OpARM64ANDconst) 2528 v.AuxInt = c & (1<<8 - 1) 2529 v.AddArg(x) 2530 return true 2531 } 2532 // match: (ANDconst [ac] (SLLconst [sc] x)) 2533 // cond: isARM64BFMask(sc, ac, sc) 2534 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x) 2535 for { 2536 ac := v.AuxInt 2537 v_0 := v.Args[0] 2538 if v_0.Op != OpARM64SLLconst { 2539 break 2540 } 2541 sc := v_0.AuxInt 2542 x := v_0.Args[0] 2543 if !(isARM64BFMask(sc, ac, sc)) { 2544 break 2545 } 2546 v.reset(OpARM64UBFIZ) 2547 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, sc)) 2548 v.AddArg(x) 2549 return true 2550 } 2551 // match: (ANDconst [ac] (SRLconst [sc] x)) 2552 // cond: isARM64BFMask(sc, ac, 0) 2553 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x) 2554 for { 2555 ac := v.AuxInt 2556 v_0 := v.Args[0] 2557 if v_0.Op != OpARM64SRLconst { 2558 break 2559 } 2560 sc := v_0.AuxInt 2561 x := v_0.Args[0] 2562 if !(isARM64BFMask(sc, ac, 0)) { 2563 break 2564 } 2565 v.reset(OpARM64UBFX) 2566 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, 0)) 2567 v.AddArg(x) 2568 return true 2569 } 2570 return false 2571 } 2572 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 2573 b := v.Block 2574 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 2575 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 2576 for { 2577 d := v.AuxInt 2578 x := v.Args[1] 2579 v_0 := v.Args[0] 2580 if v_0.Op != OpARM64MOVDconst { 2581 break 2582 } 2583 c := v_0.AuxInt 2584 v.reset(OpARM64ANDconst) 2585 v.AuxInt = c 2586 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2587 v0.AuxInt = d 2588 v0.AddArg(x) 2589 v.AddArg(v0) 2590 return true 2591 } 2592 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 2593 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 2594 for { 2595 d := v.AuxInt 2596 _ = v.Args[1] 2597 x := v.Args[0] 2598 v_1 := v.Args[1] 2599 if v_1.Op != OpARM64MOVDconst { 2600 break 2601 } 2602 c := v_1.AuxInt 2603 v.reset(OpARM64ANDconst) 2604 v.AuxInt = int64(uint64(c) << uint64(d)) 2605 v.AddArg(x) 2606 return true 2607 } 2608 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 2609 // cond: c==d 2610 // result: y 2611 for { 2612 d := v.AuxInt 2613 _ = v.Args[1] 2614 x := v.Args[0] 2615 y := v.Args[1] 2616 if y.Op != OpARM64SLLconst { 2617 break 2618 } 2619 c := y.AuxInt 2620 if x != y.Args[0] || !(c == d) { 2621 break 2622 } 2623 v.reset(OpCopy) 2624 v.Type = y.Type 2625 v.AddArg(y) 2626 return true 2627 } 2628 return false 2629 } 2630 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 2631 b := v.Block 2632 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 2633 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 2634 for { 2635 d := v.AuxInt 2636 x := v.Args[1] 2637 v_0 := v.Args[0] 2638 if v_0.Op != OpARM64MOVDconst { 2639 break 2640 } 2641 c := v_0.AuxInt 2642 v.reset(OpARM64ANDconst) 2643 v.AuxInt = c 2644 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2645 v0.AuxInt = d 2646 v0.AddArg(x) 2647 v.AddArg(v0) 2648 return true 2649 } 2650 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 2651 // result: (ANDconst x [c>>uint64(d)]) 2652 for { 2653 d := v.AuxInt 2654 _ = v.Args[1] 2655 x := v.Args[0] 2656 v_1 := v.Args[1] 2657 if v_1.Op != OpARM64MOVDconst { 2658 break 2659 } 2660 c := v_1.AuxInt 2661 v.reset(OpARM64ANDconst) 2662 v.AuxInt = c >> uint64(d) 2663 v.AddArg(x) 2664 return true 2665 } 2666 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 2667 // cond: c==d 2668 // result: y 2669 for { 2670 d := v.AuxInt 2671 _ = v.Args[1] 2672 x := v.Args[0] 2673 y := v.Args[1] 2674 if y.Op != OpARM64SRAconst { 2675 break 2676 } 2677 c := y.AuxInt 2678 if x != y.Args[0] || !(c == d) { 2679 break 2680 } 2681 v.reset(OpCopy) 2682 v.Type = y.Type 2683 v.AddArg(y) 2684 return true 2685 } 2686 return false 2687 } 2688 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 2689 b := v.Block 2690 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 2691 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 2692 for { 2693 d := v.AuxInt 2694 x := v.Args[1] 2695 v_0 := v.Args[0] 2696 if v_0.Op != OpARM64MOVDconst { 2697 break 2698 } 2699 c := v_0.AuxInt 2700 v.reset(OpARM64ANDconst) 2701 v.AuxInt = c 2702 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2703 v0.AuxInt = d 2704 v0.AddArg(x) 2705 v.AddArg(v0) 2706 return true 2707 } 2708 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 2709 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 2710 for { 2711 d := v.AuxInt 2712 _ = v.Args[1] 2713 x := v.Args[0] 2714 v_1 := v.Args[1] 2715 if v_1.Op != OpARM64MOVDconst { 2716 break 2717 } 2718 c := v_1.AuxInt 2719 v.reset(OpARM64ANDconst) 2720 v.AuxInt = int64(uint64(c) >> uint64(d)) 2721 v.AddArg(x) 2722 return true 2723 } 2724 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 2725 // cond: c==d 2726 // result: y 2727 for { 2728 d := v.AuxInt 2729 _ = v.Args[1] 2730 x := v.Args[0] 2731 y := v.Args[1] 2732 if y.Op != OpARM64SRLconst { 2733 break 2734 } 2735 c := y.AuxInt 2736 if x != y.Args[0] || !(c == d) { 2737 break 2738 } 2739 v.reset(OpCopy) 2740 v.Type = y.Type 2741 v.AddArg(y) 2742 return true 2743 } 2744 return false 2745 } 2746 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 2747 // match: (BIC x (MOVDconst [c])) 2748 // result: (ANDconst [^c] x) 2749 for { 2750 _ = v.Args[1] 2751 x := v.Args[0] 2752 v_1 := v.Args[1] 2753 if v_1.Op != OpARM64MOVDconst { 2754 break 2755 } 2756 c := v_1.AuxInt 2757 v.reset(OpARM64ANDconst) 2758 v.AuxInt = ^c 2759 v.AddArg(x) 2760 return true 2761 } 2762 // match: (BIC x x) 2763 // result: (MOVDconst [0]) 2764 for { 2765 x := v.Args[1] 2766 if x != v.Args[0] { 2767 break 2768 } 2769 v.reset(OpARM64MOVDconst) 2770 v.AuxInt = 0 2771 return true 2772 } 2773 // match: (BIC x0 x1:(SLLconst [c] y)) 2774 // cond: clobberIfDead(x1) 2775 // result: (BICshiftLL x0 y [c]) 2776 for { 2777 _ = v.Args[1] 2778 x0 := v.Args[0] 2779 x1 := v.Args[1] 2780 if x1.Op != OpARM64SLLconst { 2781 break 2782 } 2783 c := x1.AuxInt 2784 y := x1.Args[0] 2785 if !(clobberIfDead(x1)) { 2786 break 2787 } 2788 v.reset(OpARM64BICshiftLL) 2789 v.AuxInt = c 2790 v.AddArg(x0) 2791 v.AddArg(y) 2792 return true 2793 } 2794 // match: (BIC x0 x1:(SRLconst [c] y)) 2795 // cond: clobberIfDead(x1) 2796 // result: (BICshiftRL x0 y [c]) 2797 for { 2798 _ = v.Args[1] 2799 x0 := v.Args[0] 2800 x1 := v.Args[1] 2801 if x1.Op != OpARM64SRLconst { 2802 break 2803 } 2804 c := x1.AuxInt 2805 y := x1.Args[0] 2806 if !(clobberIfDead(x1)) { 2807 break 2808 } 2809 v.reset(OpARM64BICshiftRL) 2810 v.AuxInt = c 2811 v.AddArg(x0) 2812 v.AddArg(y) 2813 return true 2814 } 2815 // match: (BIC x0 x1:(SRAconst [c] y)) 2816 // cond: clobberIfDead(x1) 2817 // result: (BICshiftRA x0 y [c]) 2818 for { 2819 _ = v.Args[1] 2820 x0 := v.Args[0] 2821 x1 := v.Args[1] 2822 if x1.Op != OpARM64SRAconst { 2823 break 2824 } 2825 c := x1.AuxInt 2826 y := x1.Args[0] 2827 if !(clobberIfDead(x1)) { 2828 break 2829 } 2830 v.reset(OpARM64BICshiftRA) 2831 v.AuxInt = c 2832 v.AddArg(x0) 2833 v.AddArg(y) 2834 return true 2835 } 2836 return false 2837 } 2838 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 2839 // match: (BICshiftLL x (MOVDconst [c]) [d]) 2840 // result: (ANDconst x [^int64(uint64(c)<<uint64(d))]) 2841 for { 2842 d := v.AuxInt 2843 _ = v.Args[1] 2844 x := v.Args[0] 2845 v_1 := v.Args[1] 2846 if v_1.Op != OpARM64MOVDconst { 2847 break 2848 } 2849 c := v_1.AuxInt 2850 v.reset(OpARM64ANDconst) 2851 v.AuxInt = ^int64(uint64(c) << uint64(d)) 2852 v.AddArg(x) 2853 return true 2854 } 2855 // match: (BICshiftLL x (SLLconst x [c]) [d]) 2856 // cond: c==d 2857 // result: (MOVDconst [0]) 2858 for { 2859 d := v.AuxInt 2860 _ = v.Args[1] 2861 x := v.Args[0] 2862 v_1 := v.Args[1] 2863 if v_1.Op != OpARM64SLLconst { 2864 break 2865 } 2866 c := v_1.AuxInt 2867 if x != v_1.Args[0] || !(c == d) { 2868 break 2869 } 2870 v.reset(OpARM64MOVDconst) 2871 v.AuxInt = 0 2872 return true 2873 } 2874 return false 2875 } 2876 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 2877 // match: (BICshiftRA x (MOVDconst [c]) [d]) 2878 // result: (ANDconst x [^(c>>uint64(d))]) 2879 for { 2880 d := v.AuxInt 2881 _ = v.Args[1] 2882 x := v.Args[0] 2883 v_1 := v.Args[1] 2884 if v_1.Op != OpARM64MOVDconst { 2885 break 2886 } 2887 c := v_1.AuxInt 2888 v.reset(OpARM64ANDconst) 2889 v.AuxInt = ^(c >> uint64(d)) 2890 v.AddArg(x) 2891 return true 2892 } 2893 // match: (BICshiftRA x (SRAconst x [c]) [d]) 2894 // cond: c==d 2895 // result: (MOVDconst [0]) 2896 for { 2897 d := v.AuxInt 2898 _ = v.Args[1] 2899 x := v.Args[0] 2900 v_1 := v.Args[1] 2901 if v_1.Op != OpARM64SRAconst { 2902 break 2903 } 2904 c := v_1.AuxInt 2905 if x != v_1.Args[0] || !(c == d) { 2906 break 2907 } 2908 v.reset(OpARM64MOVDconst) 2909 v.AuxInt = 0 2910 return true 2911 } 2912 return false 2913 } 2914 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 2915 // match: (BICshiftRL x (MOVDconst [c]) [d]) 2916 // result: (ANDconst x [^int64(uint64(c)>>uint64(d))]) 2917 for { 2918 d := v.AuxInt 2919 _ = v.Args[1] 2920 x := v.Args[0] 2921 v_1 := v.Args[1] 2922 if v_1.Op != OpARM64MOVDconst { 2923 break 2924 } 2925 c := v_1.AuxInt 2926 v.reset(OpARM64ANDconst) 2927 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 2928 v.AddArg(x) 2929 return true 2930 } 2931 // match: (BICshiftRL x (SRLconst x [c]) [d]) 2932 // cond: c==d 2933 // result: (MOVDconst [0]) 2934 for { 2935 d := v.AuxInt 2936 _ = v.Args[1] 2937 x := v.Args[0] 2938 v_1 := v.Args[1] 2939 if v_1.Op != OpARM64SRLconst { 2940 break 2941 } 2942 c := v_1.AuxInt 2943 if x != v_1.Args[0] || !(c == d) { 2944 break 2945 } 2946 v.reset(OpARM64MOVDconst) 2947 v.AuxInt = 0 2948 return true 2949 } 2950 return false 2951 } 2952 func rewriteValueARM64_OpARM64CMN_0(v *Value) bool { 2953 // match: (CMN x (MOVDconst [c])) 2954 // result: (CMNconst [c] x) 2955 for { 2956 _ = v.Args[1] 2957 x := v.Args[0] 2958 v_1 := v.Args[1] 2959 if v_1.Op != OpARM64MOVDconst { 2960 break 2961 } 2962 c := v_1.AuxInt 2963 v.reset(OpARM64CMNconst) 2964 v.AuxInt = c 2965 v.AddArg(x) 2966 return true 2967 } 2968 // match: (CMN (MOVDconst [c]) x) 2969 // result: (CMNconst [c] x) 2970 for { 2971 x := v.Args[1] 2972 v_0 := v.Args[0] 2973 if v_0.Op != OpARM64MOVDconst { 2974 break 2975 } 2976 c := v_0.AuxInt 2977 v.reset(OpARM64CMNconst) 2978 v.AuxInt = c 2979 v.AddArg(x) 2980 return true 2981 } 2982 // match: (CMN x0 x1:(SLLconst [c] y)) 2983 // cond: clobberIfDead(x1) 2984 // result: (CMNshiftLL x0 y [c]) 2985 for { 2986 _ = v.Args[1] 2987 x0 := v.Args[0] 2988 x1 := v.Args[1] 2989 if x1.Op != OpARM64SLLconst { 2990 break 2991 } 2992 c := x1.AuxInt 2993 y := x1.Args[0] 2994 if !(clobberIfDead(x1)) { 2995 break 2996 } 2997 v.reset(OpARM64CMNshiftLL) 2998 v.AuxInt = c 2999 v.AddArg(x0) 3000 v.AddArg(y) 3001 return true 3002 } 3003 // match: (CMN x1:(SLLconst [c] y) x0) 3004 // cond: clobberIfDead(x1) 3005 // result: (CMNshiftLL x0 y [c]) 3006 for { 3007 x0 := v.Args[1] 3008 x1 := v.Args[0] 3009 if x1.Op != OpARM64SLLconst { 3010 break 3011 } 3012 c := x1.AuxInt 3013 y := x1.Args[0] 3014 if !(clobberIfDead(x1)) { 3015 break 3016 } 3017 v.reset(OpARM64CMNshiftLL) 3018 v.AuxInt = c 3019 v.AddArg(x0) 3020 v.AddArg(y) 3021 return true 3022 } 3023 // match: (CMN x0 x1:(SRLconst [c] y)) 3024 // cond: clobberIfDead(x1) 3025 // result: (CMNshiftRL x0 y [c]) 3026 for { 3027 _ = v.Args[1] 3028 x0 := v.Args[0] 3029 x1 := v.Args[1] 3030 if x1.Op != OpARM64SRLconst { 3031 break 3032 } 3033 c := x1.AuxInt 3034 y := x1.Args[0] 3035 if !(clobberIfDead(x1)) { 3036 break 3037 } 3038 v.reset(OpARM64CMNshiftRL) 3039 v.AuxInt = c 3040 v.AddArg(x0) 3041 v.AddArg(y) 3042 return true 3043 } 3044 // match: (CMN x1:(SRLconst [c] y) x0) 3045 // cond: clobberIfDead(x1) 3046 // result: (CMNshiftRL x0 y [c]) 3047 for { 3048 x0 := v.Args[1] 3049 x1 := v.Args[0] 3050 if x1.Op != OpARM64SRLconst { 3051 break 3052 } 3053 c := x1.AuxInt 3054 y := x1.Args[0] 3055 if !(clobberIfDead(x1)) { 3056 break 3057 } 3058 v.reset(OpARM64CMNshiftRL) 3059 v.AuxInt = c 3060 v.AddArg(x0) 3061 v.AddArg(y) 3062 return true 3063 } 3064 // match: (CMN x0 x1:(SRAconst [c] y)) 3065 // cond: clobberIfDead(x1) 3066 // result: (CMNshiftRA x0 y [c]) 3067 for { 3068 _ = v.Args[1] 3069 x0 := v.Args[0] 3070 x1 := v.Args[1] 3071 if x1.Op != OpARM64SRAconst { 3072 break 3073 } 3074 c := x1.AuxInt 3075 y := x1.Args[0] 3076 if !(clobberIfDead(x1)) { 3077 break 3078 } 3079 v.reset(OpARM64CMNshiftRA) 3080 v.AuxInt = c 3081 v.AddArg(x0) 3082 v.AddArg(y) 3083 return true 3084 } 3085 // match: (CMN x1:(SRAconst [c] y) x0) 3086 // cond: clobberIfDead(x1) 3087 // result: (CMNshiftRA x0 y [c]) 3088 for { 3089 x0 := v.Args[1] 3090 x1 := v.Args[0] 3091 if x1.Op != OpARM64SRAconst { 3092 break 3093 } 3094 c := x1.AuxInt 3095 y := x1.Args[0] 3096 if !(clobberIfDead(x1)) { 3097 break 3098 } 3099 v.reset(OpARM64CMNshiftRA) 3100 v.AuxInt = c 3101 v.AddArg(x0) 3102 v.AddArg(y) 3103 return true 3104 } 3105 return false 3106 } 3107 func rewriteValueARM64_OpARM64CMNW_0(v *Value) bool { 3108 // match: (CMNW x (MOVDconst [c])) 3109 // result: (CMNWconst [c] x) 3110 for { 3111 _ = v.Args[1] 3112 x := v.Args[0] 3113 v_1 := v.Args[1] 3114 if v_1.Op != OpARM64MOVDconst { 3115 break 3116 } 3117 c := v_1.AuxInt 3118 v.reset(OpARM64CMNWconst) 3119 v.AuxInt = c 3120 v.AddArg(x) 3121 return true 3122 } 3123 // match: (CMNW (MOVDconst [c]) x) 3124 // result: (CMNWconst [c] x) 3125 for { 3126 x := v.Args[1] 3127 v_0 := v.Args[0] 3128 if v_0.Op != OpARM64MOVDconst { 3129 break 3130 } 3131 c := v_0.AuxInt 3132 v.reset(OpARM64CMNWconst) 3133 v.AuxInt = c 3134 v.AddArg(x) 3135 return true 3136 } 3137 return false 3138 } 3139 func rewriteValueARM64_OpARM64CMNWconst_0(v *Value) bool { 3140 // match: (CMNWconst (MOVDconst [x]) [y]) 3141 // cond: int32(x)==int32(-y) 3142 // result: (FlagEQ) 3143 for { 3144 y := v.AuxInt 3145 v_0 := v.Args[0] 3146 if v_0.Op != OpARM64MOVDconst { 3147 break 3148 } 3149 x := v_0.AuxInt 3150 if !(int32(x) == int32(-y)) { 3151 break 3152 } 3153 v.reset(OpARM64FlagEQ) 3154 return true 3155 } 3156 // match: (CMNWconst (MOVDconst [x]) [y]) 3157 // cond: int32(x)<int32(-y) && uint32(x)<uint32(-y) 3158 // result: (FlagLT_ULT) 3159 for { 3160 y := v.AuxInt 3161 v_0 := v.Args[0] 3162 if v_0.Op != OpARM64MOVDconst { 3163 break 3164 } 3165 x := v_0.AuxInt 3166 if !(int32(x) < int32(-y) && uint32(x) < uint32(-y)) { 3167 break 3168 } 3169 v.reset(OpARM64FlagLT_ULT) 3170 return true 3171 } 3172 // match: (CMNWconst (MOVDconst [x]) [y]) 3173 // cond: int32(x)<int32(-y) && uint32(x)>uint32(-y) 3174 // result: (FlagLT_UGT) 3175 for { 3176 y := v.AuxInt 3177 v_0 := v.Args[0] 3178 if v_0.Op != OpARM64MOVDconst { 3179 break 3180 } 3181 x := v_0.AuxInt 3182 if !(int32(x) < int32(-y) && uint32(x) > uint32(-y)) { 3183 break 3184 } 3185 v.reset(OpARM64FlagLT_UGT) 3186 return true 3187 } 3188 // match: (CMNWconst (MOVDconst [x]) [y]) 3189 // cond: int32(x)>int32(-y) && uint32(x)<uint32(-y) 3190 // result: (FlagGT_ULT) 3191 for { 3192 y := v.AuxInt 3193 v_0 := v.Args[0] 3194 if v_0.Op != OpARM64MOVDconst { 3195 break 3196 } 3197 x := v_0.AuxInt 3198 if !(int32(x) > int32(-y) && uint32(x) < uint32(-y)) { 3199 break 3200 } 3201 v.reset(OpARM64FlagGT_ULT) 3202 return true 3203 } 3204 // match: (CMNWconst (MOVDconst [x]) [y]) 3205 // cond: int32(x)>int32(-y) && uint32(x)>uint32(-y) 3206 // result: (FlagGT_UGT) 3207 for { 3208 y := v.AuxInt 3209 v_0 := v.Args[0] 3210 if v_0.Op != OpARM64MOVDconst { 3211 break 3212 } 3213 x := v_0.AuxInt 3214 if !(int32(x) > int32(-y) && uint32(x) > uint32(-y)) { 3215 break 3216 } 3217 v.reset(OpARM64FlagGT_UGT) 3218 return true 3219 } 3220 return false 3221 } 3222 func rewriteValueARM64_OpARM64CMNconst_0(v *Value) bool { 3223 // match: (CMNconst (MOVDconst [x]) [y]) 3224 // cond: int64(x)==int64(-y) 3225 // result: (FlagEQ) 3226 for { 3227 y := v.AuxInt 3228 v_0 := v.Args[0] 3229 if v_0.Op != OpARM64MOVDconst { 3230 break 3231 } 3232 x := v_0.AuxInt 3233 if !(int64(x) == int64(-y)) { 3234 break 3235 } 3236 v.reset(OpARM64FlagEQ) 3237 return true 3238 } 3239 // match: (CMNconst (MOVDconst [x]) [y]) 3240 // cond: int64(x)<int64(-y) && uint64(x)<uint64(-y) 3241 // result: (FlagLT_ULT) 3242 for { 3243 y := v.AuxInt 3244 v_0 := v.Args[0] 3245 if v_0.Op != OpARM64MOVDconst { 3246 break 3247 } 3248 x := v_0.AuxInt 3249 if !(int64(x) < int64(-y) && uint64(x) < uint64(-y)) { 3250 break 3251 } 3252 v.reset(OpARM64FlagLT_ULT) 3253 return true 3254 } 3255 // match: (CMNconst (MOVDconst [x]) [y]) 3256 // cond: int64(x)<int64(-y) && uint64(x)>uint64(-y) 3257 // result: (FlagLT_UGT) 3258 for { 3259 y := v.AuxInt 3260 v_0 := v.Args[0] 3261 if v_0.Op != OpARM64MOVDconst { 3262 break 3263 } 3264 x := v_0.AuxInt 3265 if !(int64(x) < int64(-y) && uint64(x) > uint64(-y)) { 3266 break 3267 } 3268 v.reset(OpARM64FlagLT_UGT) 3269 return true 3270 } 3271 // match: (CMNconst (MOVDconst [x]) [y]) 3272 // cond: int64(x)>int64(-y) && uint64(x)<uint64(-y) 3273 // result: (FlagGT_ULT) 3274 for { 3275 y := v.AuxInt 3276 v_0 := v.Args[0] 3277 if v_0.Op != OpARM64MOVDconst { 3278 break 3279 } 3280 x := v_0.AuxInt 3281 if !(int64(x) > int64(-y) && uint64(x) < uint64(-y)) { 3282 break 3283 } 3284 v.reset(OpARM64FlagGT_ULT) 3285 return true 3286 } 3287 // match: (CMNconst (MOVDconst [x]) [y]) 3288 // cond: int64(x)>int64(-y) && uint64(x)>uint64(-y) 3289 // result: (FlagGT_UGT) 3290 for { 3291 y := v.AuxInt 3292 v_0 := v.Args[0] 3293 if v_0.Op != OpARM64MOVDconst { 3294 break 3295 } 3296 x := v_0.AuxInt 3297 if !(int64(x) > int64(-y) && uint64(x) > uint64(-y)) { 3298 break 3299 } 3300 v.reset(OpARM64FlagGT_UGT) 3301 return true 3302 } 3303 return false 3304 } 3305 func rewriteValueARM64_OpARM64CMNshiftLL_0(v *Value) bool { 3306 b := v.Block 3307 // match: (CMNshiftLL (MOVDconst [c]) x [d]) 3308 // result: (CMNconst [c] (SLLconst <x.Type> x [d])) 3309 for { 3310 d := v.AuxInt 3311 x := v.Args[1] 3312 v_0 := v.Args[0] 3313 if v_0.Op != OpARM64MOVDconst { 3314 break 3315 } 3316 c := v_0.AuxInt 3317 v.reset(OpARM64CMNconst) 3318 v.AuxInt = c 3319 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 3320 v0.AuxInt = d 3321 v0.AddArg(x) 3322 v.AddArg(v0) 3323 return true 3324 } 3325 // match: (CMNshiftLL x (MOVDconst [c]) [d]) 3326 // result: (CMNconst x [int64(uint64(c)<<uint64(d))]) 3327 for { 3328 d := v.AuxInt 3329 _ = v.Args[1] 3330 x := v.Args[0] 3331 v_1 := v.Args[1] 3332 if v_1.Op != OpARM64MOVDconst { 3333 break 3334 } 3335 c := v_1.AuxInt 3336 v.reset(OpARM64CMNconst) 3337 v.AuxInt = int64(uint64(c) << uint64(d)) 3338 v.AddArg(x) 3339 return true 3340 } 3341 return false 3342 } 3343 func rewriteValueARM64_OpARM64CMNshiftRA_0(v *Value) bool { 3344 b := v.Block 3345 // match: (CMNshiftRA (MOVDconst [c]) x [d]) 3346 // result: (CMNconst [c] (SRAconst <x.Type> x [d])) 3347 for { 3348 d := v.AuxInt 3349 x := v.Args[1] 3350 v_0 := v.Args[0] 3351 if v_0.Op != OpARM64MOVDconst { 3352 break 3353 } 3354 c := v_0.AuxInt 3355 v.reset(OpARM64CMNconst) 3356 v.AuxInt = c 3357 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3358 v0.AuxInt = d 3359 v0.AddArg(x) 3360 v.AddArg(v0) 3361 return true 3362 } 3363 // match: (CMNshiftRA x (MOVDconst [c]) [d]) 3364 // result: (CMNconst x [c>>uint64(d)]) 3365 for { 3366 d := v.AuxInt 3367 _ = v.Args[1] 3368 x := v.Args[0] 3369 v_1 := v.Args[1] 3370 if v_1.Op != OpARM64MOVDconst { 3371 break 3372 } 3373 c := v_1.AuxInt 3374 v.reset(OpARM64CMNconst) 3375 v.AuxInt = c >> uint64(d) 3376 v.AddArg(x) 3377 return true 3378 } 3379 return false 3380 } 3381 func rewriteValueARM64_OpARM64CMNshiftRL_0(v *Value) bool { 3382 b := v.Block 3383 // match: (CMNshiftRL (MOVDconst [c]) x [d]) 3384 // result: (CMNconst [c] (SRLconst <x.Type> x [d])) 3385 for { 3386 d := v.AuxInt 3387 x := v.Args[1] 3388 v_0 := v.Args[0] 3389 if v_0.Op != OpARM64MOVDconst { 3390 break 3391 } 3392 c := v_0.AuxInt 3393 v.reset(OpARM64CMNconst) 3394 v.AuxInt = c 3395 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3396 v0.AuxInt = d 3397 v0.AddArg(x) 3398 v.AddArg(v0) 3399 return true 3400 } 3401 // match: (CMNshiftRL x (MOVDconst [c]) [d]) 3402 // result: (CMNconst x [int64(uint64(c)>>uint64(d))]) 3403 for { 3404 d := v.AuxInt 3405 _ = v.Args[1] 3406 x := v.Args[0] 3407 v_1 := v.Args[1] 3408 if v_1.Op != OpARM64MOVDconst { 3409 break 3410 } 3411 c := v_1.AuxInt 3412 v.reset(OpARM64CMNconst) 3413 v.AuxInt = int64(uint64(c) >> uint64(d)) 3414 v.AddArg(x) 3415 return true 3416 } 3417 return false 3418 } 3419 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 3420 b := v.Block 3421 // match: (CMP x (MOVDconst [c])) 3422 // result: (CMPconst [c] x) 3423 for { 3424 _ = v.Args[1] 3425 x := v.Args[0] 3426 v_1 := v.Args[1] 3427 if v_1.Op != OpARM64MOVDconst { 3428 break 3429 } 3430 c := v_1.AuxInt 3431 v.reset(OpARM64CMPconst) 3432 v.AuxInt = c 3433 v.AddArg(x) 3434 return true 3435 } 3436 // match: (CMP (MOVDconst [c]) x) 3437 // result: (InvertFlags (CMPconst [c] x)) 3438 for { 3439 x := v.Args[1] 3440 v_0 := v.Args[0] 3441 if v_0.Op != OpARM64MOVDconst { 3442 break 3443 } 3444 c := v_0.AuxInt 3445 v.reset(OpARM64InvertFlags) 3446 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3447 v0.AuxInt = c 3448 v0.AddArg(x) 3449 v.AddArg(v0) 3450 return true 3451 } 3452 // match: (CMP x0 x1:(SLLconst [c] y)) 3453 // cond: clobberIfDead(x1) 3454 // result: (CMPshiftLL x0 y [c]) 3455 for { 3456 _ = v.Args[1] 3457 x0 := v.Args[0] 3458 x1 := v.Args[1] 3459 if x1.Op != OpARM64SLLconst { 3460 break 3461 } 3462 c := x1.AuxInt 3463 y := x1.Args[0] 3464 if !(clobberIfDead(x1)) { 3465 break 3466 } 3467 v.reset(OpARM64CMPshiftLL) 3468 v.AuxInt = c 3469 v.AddArg(x0) 3470 v.AddArg(y) 3471 return true 3472 } 3473 // match: (CMP x0:(SLLconst [c] y) x1) 3474 // cond: clobberIfDead(x0) 3475 // result: (InvertFlags (CMPshiftLL x1 y [c])) 3476 for { 3477 x1 := v.Args[1] 3478 x0 := v.Args[0] 3479 if x0.Op != OpARM64SLLconst { 3480 break 3481 } 3482 c := x0.AuxInt 3483 y := x0.Args[0] 3484 if !(clobberIfDead(x0)) { 3485 break 3486 } 3487 v.reset(OpARM64InvertFlags) 3488 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 3489 v0.AuxInt = c 3490 v0.AddArg(x1) 3491 v0.AddArg(y) 3492 v.AddArg(v0) 3493 return true 3494 } 3495 // match: (CMP x0 x1:(SRLconst [c] y)) 3496 // cond: clobberIfDead(x1) 3497 // result: (CMPshiftRL x0 y [c]) 3498 for { 3499 _ = v.Args[1] 3500 x0 := v.Args[0] 3501 x1 := v.Args[1] 3502 if x1.Op != OpARM64SRLconst { 3503 break 3504 } 3505 c := x1.AuxInt 3506 y := x1.Args[0] 3507 if !(clobberIfDead(x1)) { 3508 break 3509 } 3510 v.reset(OpARM64CMPshiftRL) 3511 v.AuxInt = c 3512 v.AddArg(x0) 3513 v.AddArg(y) 3514 return true 3515 } 3516 // match: (CMP x0:(SRLconst [c] y) x1) 3517 // cond: clobberIfDead(x0) 3518 // result: (InvertFlags (CMPshiftRL x1 y [c])) 3519 for { 3520 x1 := v.Args[1] 3521 x0 := v.Args[0] 3522 if x0.Op != OpARM64SRLconst { 3523 break 3524 } 3525 c := x0.AuxInt 3526 y := x0.Args[0] 3527 if !(clobberIfDead(x0)) { 3528 break 3529 } 3530 v.reset(OpARM64InvertFlags) 3531 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 3532 v0.AuxInt = c 3533 v0.AddArg(x1) 3534 v0.AddArg(y) 3535 v.AddArg(v0) 3536 return true 3537 } 3538 // match: (CMP x0 x1:(SRAconst [c] y)) 3539 // cond: clobberIfDead(x1) 3540 // result: (CMPshiftRA x0 y [c]) 3541 for { 3542 _ = v.Args[1] 3543 x0 := v.Args[0] 3544 x1 := v.Args[1] 3545 if x1.Op != OpARM64SRAconst { 3546 break 3547 } 3548 c := x1.AuxInt 3549 y := x1.Args[0] 3550 if !(clobberIfDead(x1)) { 3551 break 3552 } 3553 v.reset(OpARM64CMPshiftRA) 3554 v.AuxInt = c 3555 v.AddArg(x0) 3556 v.AddArg(y) 3557 return true 3558 } 3559 // match: (CMP x0:(SRAconst [c] y) x1) 3560 // cond: clobberIfDead(x0) 3561 // result: (InvertFlags (CMPshiftRA x1 y [c])) 3562 for { 3563 x1 := v.Args[1] 3564 x0 := v.Args[0] 3565 if x0.Op != OpARM64SRAconst { 3566 break 3567 } 3568 c := x0.AuxInt 3569 y := x0.Args[0] 3570 if !(clobberIfDead(x0)) { 3571 break 3572 } 3573 v.reset(OpARM64InvertFlags) 3574 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 3575 v0.AuxInt = c 3576 v0.AddArg(x1) 3577 v0.AddArg(y) 3578 v.AddArg(v0) 3579 return true 3580 } 3581 return false 3582 } 3583 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 3584 b := v.Block 3585 // match: (CMPW x (MOVDconst [c])) 3586 // result: (CMPWconst [int64(int32(c))] x) 3587 for { 3588 _ = v.Args[1] 3589 x := v.Args[0] 3590 v_1 := v.Args[1] 3591 if v_1.Op != OpARM64MOVDconst { 3592 break 3593 } 3594 c := v_1.AuxInt 3595 v.reset(OpARM64CMPWconst) 3596 v.AuxInt = int64(int32(c)) 3597 v.AddArg(x) 3598 return true 3599 } 3600 // match: (CMPW (MOVDconst [c]) x) 3601 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 3602 for { 3603 x := v.Args[1] 3604 v_0 := v.Args[0] 3605 if v_0.Op != OpARM64MOVDconst { 3606 break 3607 } 3608 c := v_0.AuxInt 3609 v.reset(OpARM64InvertFlags) 3610 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 3611 v0.AuxInt = int64(int32(c)) 3612 v0.AddArg(x) 3613 v.AddArg(v0) 3614 return true 3615 } 3616 return false 3617 } 3618 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 3619 // match: (CMPWconst (MOVDconst [x]) [y]) 3620 // cond: int32(x)==int32(y) 3621 // result: (FlagEQ) 3622 for { 3623 y := v.AuxInt 3624 v_0 := v.Args[0] 3625 if v_0.Op != OpARM64MOVDconst { 3626 break 3627 } 3628 x := v_0.AuxInt 3629 if !(int32(x) == int32(y)) { 3630 break 3631 } 3632 v.reset(OpARM64FlagEQ) 3633 return true 3634 } 3635 // match: (CMPWconst (MOVDconst [x]) [y]) 3636 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 3637 // result: (FlagLT_ULT) 3638 for { 3639 y := v.AuxInt 3640 v_0 := v.Args[0] 3641 if v_0.Op != OpARM64MOVDconst { 3642 break 3643 } 3644 x := v_0.AuxInt 3645 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 3646 break 3647 } 3648 v.reset(OpARM64FlagLT_ULT) 3649 return true 3650 } 3651 // match: (CMPWconst (MOVDconst [x]) [y]) 3652 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 3653 // result: (FlagLT_UGT) 3654 for { 3655 y := v.AuxInt 3656 v_0 := v.Args[0] 3657 if v_0.Op != OpARM64MOVDconst { 3658 break 3659 } 3660 x := v_0.AuxInt 3661 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 3662 break 3663 } 3664 v.reset(OpARM64FlagLT_UGT) 3665 return true 3666 } 3667 // match: (CMPWconst (MOVDconst [x]) [y]) 3668 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 3669 // result: (FlagGT_ULT) 3670 for { 3671 y := v.AuxInt 3672 v_0 := v.Args[0] 3673 if v_0.Op != OpARM64MOVDconst { 3674 break 3675 } 3676 x := v_0.AuxInt 3677 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 3678 break 3679 } 3680 v.reset(OpARM64FlagGT_ULT) 3681 return true 3682 } 3683 // match: (CMPWconst (MOVDconst [x]) [y]) 3684 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 3685 // result: (FlagGT_UGT) 3686 for { 3687 y := v.AuxInt 3688 v_0 := v.Args[0] 3689 if v_0.Op != OpARM64MOVDconst { 3690 break 3691 } 3692 x := v_0.AuxInt 3693 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 3694 break 3695 } 3696 v.reset(OpARM64FlagGT_UGT) 3697 return true 3698 } 3699 // match: (CMPWconst (MOVBUreg _) [c]) 3700 // cond: 0xff < int32(c) 3701 // result: (FlagLT_ULT) 3702 for { 3703 c := v.AuxInt 3704 v_0 := v.Args[0] 3705 if v_0.Op != OpARM64MOVBUreg || !(0xff < int32(c)) { 3706 break 3707 } 3708 v.reset(OpARM64FlagLT_ULT) 3709 return true 3710 } 3711 // match: (CMPWconst (MOVHUreg _) [c]) 3712 // cond: 0xffff < int32(c) 3713 // result: (FlagLT_ULT) 3714 for { 3715 c := v.AuxInt 3716 v_0 := v.Args[0] 3717 if v_0.Op != OpARM64MOVHUreg || !(0xffff < int32(c)) { 3718 break 3719 } 3720 v.reset(OpARM64FlagLT_ULT) 3721 return true 3722 } 3723 return false 3724 } 3725 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 3726 // match: (CMPconst (MOVDconst [x]) [y]) 3727 // cond: x==y 3728 // result: (FlagEQ) 3729 for { 3730 y := v.AuxInt 3731 v_0 := v.Args[0] 3732 if v_0.Op != OpARM64MOVDconst { 3733 break 3734 } 3735 x := v_0.AuxInt 3736 if !(x == y) { 3737 break 3738 } 3739 v.reset(OpARM64FlagEQ) 3740 return true 3741 } 3742 // match: (CMPconst (MOVDconst [x]) [y]) 3743 // cond: x<y && uint64(x)<uint64(y) 3744 // result: (FlagLT_ULT) 3745 for { 3746 y := v.AuxInt 3747 v_0 := v.Args[0] 3748 if v_0.Op != OpARM64MOVDconst { 3749 break 3750 } 3751 x := v_0.AuxInt 3752 if !(x < y && uint64(x) < uint64(y)) { 3753 break 3754 } 3755 v.reset(OpARM64FlagLT_ULT) 3756 return true 3757 } 3758 // match: (CMPconst (MOVDconst [x]) [y]) 3759 // cond: x<y && uint64(x)>uint64(y) 3760 // result: (FlagLT_UGT) 3761 for { 3762 y := v.AuxInt 3763 v_0 := v.Args[0] 3764 if v_0.Op != OpARM64MOVDconst { 3765 break 3766 } 3767 x := v_0.AuxInt 3768 if !(x < y && uint64(x) > uint64(y)) { 3769 break 3770 } 3771 v.reset(OpARM64FlagLT_UGT) 3772 return true 3773 } 3774 // match: (CMPconst (MOVDconst [x]) [y]) 3775 // cond: x>y && uint64(x)<uint64(y) 3776 // result: (FlagGT_ULT) 3777 for { 3778 y := v.AuxInt 3779 v_0 := v.Args[0] 3780 if v_0.Op != OpARM64MOVDconst { 3781 break 3782 } 3783 x := v_0.AuxInt 3784 if !(x > y && uint64(x) < uint64(y)) { 3785 break 3786 } 3787 v.reset(OpARM64FlagGT_ULT) 3788 return true 3789 } 3790 // match: (CMPconst (MOVDconst [x]) [y]) 3791 // cond: x>y && uint64(x)>uint64(y) 3792 // result: (FlagGT_UGT) 3793 for { 3794 y := v.AuxInt 3795 v_0 := v.Args[0] 3796 if v_0.Op != OpARM64MOVDconst { 3797 break 3798 } 3799 x := v_0.AuxInt 3800 if !(x > y && uint64(x) > uint64(y)) { 3801 break 3802 } 3803 v.reset(OpARM64FlagGT_UGT) 3804 return true 3805 } 3806 // match: (CMPconst (MOVBUreg _) [c]) 3807 // cond: 0xff < c 3808 // result: (FlagLT_ULT) 3809 for { 3810 c := v.AuxInt 3811 v_0 := v.Args[0] 3812 if v_0.Op != OpARM64MOVBUreg || !(0xff < c) { 3813 break 3814 } 3815 v.reset(OpARM64FlagLT_ULT) 3816 return true 3817 } 3818 // match: (CMPconst (MOVHUreg _) [c]) 3819 // cond: 0xffff < c 3820 // result: (FlagLT_ULT) 3821 for { 3822 c := v.AuxInt 3823 v_0 := v.Args[0] 3824 if v_0.Op != OpARM64MOVHUreg || !(0xffff < c) { 3825 break 3826 } 3827 v.reset(OpARM64FlagLT_ULT) 3828 return true 3829 } 3830 // match: (CMPconst (MOVWUreg _) [c]) 3831 // cond: 0xffffffff < c 3832 // result: (FlagLT_ULT) 3833 for { 3834 c := v.AuxInt 3835 v_0 := v.Args[0] 3836 if v_0.Op != OpARM64MOVWUreg || !(0xffffffff < c) { 3837 break 3838 } 3839 v.reset(OpARM64FlagLT_ULT) 3840 return true 3841 } 3842 // match: (CMPconst (ANDconst _ [m]) [n]) 3843 // cond: 0 <= m && m < n 3844 // result: (FlagLT_ULT) 3845 for { 3846 n := v.AuxInt 3847 v_0 := v.Args[0] 3848 if v_0.Op != OpARM64ANDconst { 3849 break 3850 } 3851 m := v_0.AuxInt 3852 if !(0 <= m && m < n) { 3853 break 3854 } 3855 v.reset(OpARM64FlagLT_ULT) 3856 return true 3857 } 3858 // match: (CMPconst (SRLconst _ [c]) [n]) 3859 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 3860 // result: (FlagLT_ULT) 3861 for { 3862 n := v.AuxInt 3863 v_0 := v.Args[0] 3864 if v_0.Op != OpARM64SRLconst { 3865 break 3866 } 3867 c := v_0.AuxInt 3868 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 3869 break 3870 } 3871 v.reset(OpARM64FlagLT_ULT) 3872 return true 3873 } 3874 return false 3875 } 3876 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 3877 b := v.Block 3878 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 3879 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 3880 for { 3881 d := v.AuxInt 3882 x := v.Args[1] 3883 v_0 := v.Args[0] 3884 if v_0.Op != OpARM64MOVDconst { 3885 break 3886 } 3887 c := v_0.AuxInt 3888 v.reset(OpARM64InvertFlags) 3889 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3890 v0.AuxInt = c 3891 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 3892 v1.AuxInt = d 3893 v1.AddArg(x) 3894 v0.AddArg(v1) 3895 v.AddArg(v0) 3896 return true 3897 } 3898 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 3899 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 3900 for { 3901 d := v.AuxInt 3902 _ = v.Args[1] 3903 x := v.Args[0] 3904 v_1 := v.Args[1] 3905 if v_1.Op != OpARM64MOVDconst { 3906 break 3907 } 3908 c := v_1.AuxInt 3909 v.reset(OpARM64CMPconst) 3910 v.AuxInt = int64(uint64(c) << uint64(d)) 3911 v.AddArg(x) 3912 return true 3913 } 3914 return false 3915 } 3916 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 3917 b := v.Block 3918 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 3919 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 3920 for { 3921 d := v.AuxInt 3922 x := v.Args[1] 3923 v_0 := v.Args[0] 3924 if v_0.Op != OpARM64MOVDconst { 3925 break 3926 } 3927 c := v_0.AuxInt 3928 v.reset(OpARM64InvertFlags) 3929 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3930 v0.AuxInt = c 3931 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3932 v1.AuxInt = d 3933 v1.AddArg(x) 3934 v0.AddArg(v1) 3935 v.AddArg(v0) 3936 return true 3937 } 3938 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 3939 // result: (CMPconst x [c>>uint64(d)]) 3940 for { 3941 d := v.AuxInt 3942 _ = v.Args[1] 3943 x := v.Args[0] 3944 v_1 := v.Args[1] 3945 if v_1.Op != OpARM64MOVDconst { 3946 break 3947 } 3948 c := v_1.AuxInt 3949 v.reset(OpARM64CMPconst) 3950 v.AuxInt = c >> uint64(d) 3951 v.AddArg(x) 3952 return true 3953 } 3954 return false 3955 } 3956 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 3957 b := v.Block 3958 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 3959 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 3960 for { 3961 d := v.AuxInt 3962 x := v.Args[1] 3963 v_0 := v.Args[0] 3964 if v_0.Op != OpARM64MOVDconst { 3965 break 3966 } 3967 c := v_0.AuxInt 3968 v.reset(OpARM64InvertFlags) 3969 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3970 v0.AuxInt = c 3971 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3972 v1.AuxInt = d 3973 v1.AddArg(x) 3974 v0.AddArg(v1) 3975 v.AddArg(v0) 3976 return true 3977 } 3978 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 3979 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 3980 for { 3981 d := v.AuxInt 3982 _ = v.Args[1] 3983 x := v.Args[0] 3984 v_1 := v.Args[1] 3985 if v_1.Op != OpARM64MOVDconst { 3986 break 3987 } 3988 c := v_1.AuxInt 3989 v.reset(OpARM64CMPconst) 3990 v.AuxInt = int64(uint64(c) >> uint64(d)) 3991 v.AddArg(x) 3992 return true 3993 } 3994 return false 3995 } 3996 func rewriteValueARM64_OpARM64CSEL_0(v *Value) bool { 3997 // match: (CSEL {cc} x (MOVDconst [0]) flag) 3998 // result: (CSEL0 {cc} x flag) 3999 for { 4000 cc := v.Aux 4001 flag := v.Args[2] 4002 x := v.Args[0] 4003 v_1 := v.Args[1] 4004 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 4005 break 4006 } 4007 v.reset(OpARM64CSEL0) 4008 v.Aux = cc 4009 v.AddArg(x) 4010 v.AddArg(flag) 4011 return true 4012 } 4013 // match: (CSEL {cc} (MOVDconst [0]) y flag) 4014 // result: (CSEL0 {arm64Negate(cc.(Op))} y flag) 4015 for { 4016 cc := v.Aux 4017 flag := v.Args[2] 4018 v_0 := v.Args[0] 4019 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != 0 { 4020 break 4021 } 4022 y := v.Args[1] 4023 v.reset(OpARM64CSEL0) 4024 v.Aux = arm64Negate(cc.(Op)) 4025 v.AddArg(y) 4026 v.AddArg(flag) 4027 return true 4028 } 4029 // match: (CSEL {cc} x y (InvertFlags cmp)) 4030 // result: (CSEL {arm64Invert(cc.(Op))} x y cmp) 4031 for { 4032 cc := v.Aux 4033 _ = v.Args[2] 4034 x := v.Args[0] 4035 y := v.Args[1] 4036 v_2 := v.Args[2] 4037 if v_2.Op != OpARM64InvertFlags { 4038 break 4039 } 4040 cmp := v_2.Args[0] 4041 v.reset(OpARM64CSEL) 4042 v.Aux = arm64Invert(cc.(Op)) 4043 v.AddArg(x) 4044 v.AddArg(y) 4045 v.AddArg(cmp) 4046 return true 4047 } 4048 // match: (CSEL {cc} x _ flag) 4049 // cond: ccARM64Eval(cc, flag) > 0 4050 // result: x 4051 for { 4052 cc := v.Aux 4053 flag := v.Args[2] 4054 x := v.Args[0] 4055 if !(ccARM64Eval(cc, flag) > 0) { 4056 break 4057 } 4058 v.reset(OpCopy) 4059 v.Type = x.Type 4060 v.AddArg(x) 4061 return true 4062 } 4063 // match: (CSEL {cc} _ y flag) 4064 // cond: ccARM64Eval(cc, flag) < 0 4065 // result: y 4066 for { 4067 cc := v.Aux 4068 flag := v.Args[2] 4069 y := v.Args[1] 4070 if !(ccARM64Eval(cc, flag) < 0) { 4071 break 4072 } 4073 v.reset(OpCopy) 4074 v.Type = y.Type 4075 v.AddArg(y) 4076 return true 4077 } 4078 // match: (CSEL {cc} x y (CMPWconst [0] boolval)) 4079 // cond: cc.(Op) == OpARM64NotEqual && flagArg(boolval) != nil 4080 // result: (CSEL {boolval.Op} x y flagArg(boolval)) 4081 for { 4082 cc := v.Aux 4083 _ = v.Args[2] 4084 x := v.Args[0] 4085 y := v.Args[1] 4086 v_2 := v.Args[2] 4087 if v_2.Op != OpARM64CMPWconst || v_2.AuxInt != 0 { 4088 break 4089 } 4090 boolval := v_2.Args[0] 4091 if !(cc.(Op) == OpARM64NotEqual && flagArg(boolval) != nil) { 4092 break 4093 } 4094 v.reset(OpARM64CSEL) 4095 v.Aux = boolval.Op 4096 v.AddArg(x) 4097 v.AddArg(y) 4098 v.AddArg(flagArg(boolval)) 4099 return true 4100 } 4101 // match: (CSEL {cc} x y (CMPWconst [0] boolval)) 4102 // cond: cc.(Op) == OpARM64Equal && flagArg(boolval) != nil 4103 // result: (CSEL {arm64Negate(boolval.Op)} x y flagArg(boolval)) 4104 for { 4105 cc := v.Aux 4106 _ = v.Args[2] 4107 x := v.Args[0] 4108 y := v.Args[1] 4109 v_2 := v.Args[2] 4110 if v_2.Op != OpARM64CMPWconst || v_2.AuxInt != 0 { 4111 break 4112 } 4113 boolval := v_2.Args[0] 4114 if !(cc.(Op) == OpARM64Equal && flagArg(boolval) != nil) { 4115 break 4116 } 4117 v.reset(OpARM64CSEL) 4118 v.Aux = arm64Negate(boolval.Op) 4119 v.AddArg(x) 4120 v.AddArg(y) 4121 v.AddArg(flagArg(boolval)) 4122 return true 4123 } 4124 return false 4125 } 4126 func rewriteValueARM64_OpARM64CSEL0_0(v *Value) bool { 4127 // match: (CSEL0 {cc} x (InvertFlags cmp)) 4128 // result: (CSEL0 {arm64Invert(cc.(Op))} x cmp) 4129 for { 4130 cc := v.Aux 4131 _ = v.Args[1] 4132 x := v.Args[0] 4133 v_1 := v.Args[1] 4134 if v_1.Op != OpARM64InvertFlags { 4135 break 4136 } 4137 cmp := v_1.Args[0] 4138 v.reset(OpARM64CSEL0) 4139 v.Aux = arm64Invert(cc.(Op)) 4140 v.AddArg(x) 4141 v.AddArg(cmp) 4142 return true 4143 } 4144 // match: (CSEL0 {cc} x flag) 4145 // cond: ccARM64Eval(cc, flag) > 0 4146 // result: x 4147 for { 4148 cc := v.Aux 4149 flag := v.Args[1] 4150 x := v.Args[0] 4151 if !(ccARM64Eval(cc, flag) > 0) { 4152 break 4153 } 4154 v.reset(OpCopy) 4155 v.Type = x.Type 4156 v.AddArg(x) 4157 return true 4158 } 4159 // match: (CSEL0 {cc} _ flag) 4160 // cond: ccARM64Eval(cc, flag) < 0 4161 // result: (MOVDconst [0]) 4162 for { 4163 cc := v.Aux 4164 flag := v.Args[1] 4165 if !(ccARM64Eval(cc, flag) < 0) { 4166 break 4167 } 4168 v.reset(OpARM64MOVDconst) 4169 v.AuxInt = 0 4170 return true 4171 } 4172 // match: (CSEL0 {cc} x (CMPWconst [0] boolval)) 4173 // cond: cc.(Op) == OpARM64NotEqual && flagArg(boolval) != nil 4174 // result: (CSEL0 {boolval.Op} x flagArg(boolval)) 4175 for { 4176 cc := v.Aux 4177 _ = v.Args[1] 4178 x := v.Args[0] 4179 v_1 := v.Args[1] 4180 if v_1.Op != OpARM64CMPWconst || v_1.AuxInt != 0 { 4181 break 4182 } 4183 boolval := v_1.Args[0] 4184 if !(cc.(Op) == OpARM64NotEqual && flagArg(boolval) != nil) { 4185 break 4186 } 4187 v.reset(OpARM64CSEL0) 4188 v.Aux = boolval.Op 4189 v.AddArg(x) 4190 v.AddArg(flagArg(boolval)) 4191 return true 4192 } 4193 // match: (CSEL0 {cc} x (CMPWconst [0] boolval)) 4194 // cond: cc.(Op) == OpARM64Equal && flagArg(boolval) != nil 4195 // result: (CSEL0 {arm64Negate(boolval.Op)} x flagArg(boolval)) 4196 for { 4197 cc := v.Aux 4198 _ = v.Args[1] 4199 x := v.Args[0] 4200 v_1 := v.Args[1] 4201 if v_1.Op != OpARM64CMPWconst || v_1.AuxInt != 0 { 4202 break 4203 } 4204 boolval := v_1.Args[0] 4205 if !(cc.(Op) == OpARM64Equal && flagArg(boolval) != nil) { 4206 break 4207 } 4208 v.reset(OpARM64CSEL0) 4209 v.Aux = arm64Negate(boolval.Op) 4210 v.AddArg(x) 4211 v.AddArg(flagArg(boolval)) 4212 return true 4213 } 4214 return false 4215 } 4216 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 4217 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 4218 // result: (MOVDconst [c/d]) 4219 for { 4220 _ = v.Args[1] 4221 v_0 := v.Args[0] 4222 if v_0.Op != OpARM64MOVDconst { 4223 break 4224 } 4225 c := v_0.AuxInt 4226 v_1 := v.Args[1] 4227 if v_1.Op != OpARM64MOVDconst { 4228 break 4229 } 4230 d := v_1.AuxInt 4231 v.reset(OpARM64MOVDconst) 4232 v.AuxInt = c / d 4233 return true 4234 } 4235 return false 4236 } 4237 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 4238 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 4239 // result: (MOVDconst [int64(int32(c)/int32(d))]) 4240 for { 4241 _ = v.Args[1] 4242 v_0 := v.Args[0] 4243 if v_0.Op != OpARM64MOVDconst { 4244 break 4245 } 4246 c := v_0.AuxInt 4247 v_1 := v.Args[1] 4248 if v_1.Op != OpARM64MOVDconst { 4249 break 4250 } 4251 d := v_1.AuxInt 4252 v.reset(OpARM64MOVDconst) 4253 v.AuxInt = int64(int32(c) / int32(d)) 4254 return true 4255 } 4256 return false 4257 } 4258 func rewriteValueARM64_OpARM64EON_0(v *Value) bool { 4259 // match: (EON x (MOVDconst [c])) 4260 // result: (XORconst [^c] x) 4261 for { 4262 _ = v.Args[1] 4263 x := v.Args[0] 4264 v_1 := v.Args[1] 4265 if v_1.Op != OpARM64MOVDconst { 4266 break 4267 } 4268 c := v_1.AuxInt 4269 v.reset(OpARM64XORconst) 4270 v.AuxInt = ^c 4271 v.AddArg(x) 4272 return true 4273 } 4274 // match: (EON x x) 4275 // result: (MOVDconst [-1]) 4276 for { 4277 x := v.Args[1] 4278 if x != v.Args[0] { 4279 break 4280 } 4281 v.reset(OpARM64MOVDconst) 4282 v.AuxInt = -1 4283 return true 4284 } 4285 // match: (EON x0 x1:(SLLconst [c] y)) 4286 // cond: clobberIfDead(x1) 4287 // result: (EONshiftLL x0 y [c]) 4288 for { 4289 _ = v.Args[1] 4290 x0 := v.Args[0] 4291 x1 := v.Args[1] 4292 if x1.Op != OpARM64SLLconst { 4293 break 4294 } 4295 c := x1.AuxInt 4296 y := x1.Args[0] 4297 if !(clobberIfDead(x1)) { 4298 break 4299 } 4300 v.reset(OpARM64EONshiftLL) 4301 v.AuxInt = c 4302 v.AddArg(x0) 4303 v.AddArg(y) 4304 return true 4305 } 4306 // match: (EON x0 x1:(SRLconst [c] y)) 4307 // cond: clobberIfDead(x1) 4308 // result: (EONshiftRL x0 y [c]) 4309 for { 4310 _ = v.Args[1] 4311 x0 := v.Args[0] 4312 x1 := v.Args[1] 4313 if x1.Op != OpARM64SRLconst { 4314 break 4315 } 4316 c := x1.AuxInt 4317 y := x1.Args[0] 4318 if !(clobberIfDead(x1)) { 4319 break 4320 } 4321 v.reset(OpARM64EONshiftRL) 4322 v.AuxInt = c 4323 v.AddArg(x0) 4324 v.AddArg(y) 4325 return true 4326 } 4327 // match: (EON x0 x1:(SRAconst [c] y)) 4328 // cond: clobberIfDead(x1) 4329 // result: (EONshiftRA x0 y [c]) 4330 for { 4331 _ = v.Args[1] 4332 x0 := v.Args[0] 4333 x1 := v.Args[1] 4334 if x1.Op != OpARM64SRAconst { 4335 break 4336 } 4337 c := x1.AuxInt 4338 y := x1.Args[0] 4339 if !(clobberIfDead(x1)) { 4340 break 4341 } 4342 v.reset(OpARM64EONshiftRA) 4343 v.AuxInt = c 4344 v.AddArg(x0) 4345 v.AddArg(y) 4346 return true 4347 } 4348 return false 4349 } 4350 func rewriteValueARM64_OpARM64EONshiftLL_0(v *Value) bool { 4351 // match: (EONshiftLL x (MOVDconst [c]) [d]) 4352 // result: (XORconst x [^int64(uint64(c)<<uint64(d))]) 4353 for { 4354 d := v.AuxInt 4355 _ = v.Args[1] 4356 x := v.Args[0] 4357 v_1 := v.Args[1] 4358 if v_1.Op != OpARM64MOVDconst { 4359 break 4360 } 4361 c := v_1.AuxInt 4362 v.reset(OpARM64XORconst) 4363 v.AuxInt = ^int64(uint64(c) << uint64(d)) 4364 v.AddArg(x) 4365 return true 4366 } 4367 // match: (EONshiftLL x (SLLconst x [c]) [d]) 4368 // cond: c==d 4369 // result: (MOVDconst [-1]) 4370 for { 4371 d := v.AuxInt 4372 _ = v.Args[1] 4373 x := v.Args[0] 4374 v_1 := v.Args[1] 4375 if v_1.Op != OpARM64SLLconst { 4376 break 4377 } 4378 c := v_1.AuxInt 4379 if x != v_1.Args[0] || !(c == d) { 4380 break 4381 } 4382 v.reset(OpARM64MOVDconst) 4383 v.AuxInt = -1 4384 return true 4385 } 4386 return false 4387 } 4388 func rewriteValueARM64_OpARM64EONshiftRA_0(v *Value) bool { 4389 // match: (EONshiftRA x (MOVDconst [c]) [d]) 4390 // result: (XORconst x [^(c>>uint64(d))]) 4391 for { 4392 d := v.AuxInt 4393 _ = v.Args[1] 4394 x := v.Args[0] 4395 v_1 := v.Args[1] 4396 if v_1.Op != OpARM64MOVDconst { 4397 break 4398 } 4399 c := v_1.AuxInt 4400 v.reset(OpARM64XORconst) 4401 v.AuxInt = ^(c >> uint64(d)) 4402 v.AddArg(x) 4403 return true 4404 } 4405 // match: (EONshiftRA x (SRAconst x [c]) [d]) 4406 // cond: c==d 4407 // result: (MOVDconst [-1]) 4408 for { 4409 d := v.AuxInt 4410 _ = v.Args[1] 4411 x := v.Args[0] 4412 v_1 := v.Args[1] 4413 if v_1.Op != OpARM64SRAconst { 4414 break 4415 } 4416 c := v_1.AuxInt 4417 if x != v_1.Args[0] || !(c == d) { 4418 break 4419 } 4420 v.reset(OpARM64MOVDconst) 4421 v.AuxInt = -1 4422 return true 4423 } 4424 return false 4425 } 4426 func rewriteValueARM64_OpARM64EONshiftRL_0(v *Value) bool { 4427 // match: (EONshiftRL x (MOVDconst [c]) [d]) 4428 // result: (XORconst x [^int64(uint64(c)>>uint64(d))]) 4429 for { 4430 d := v.AuxInt 4431 _ = v.Args[1] 4432 x := v.Args[0] 4433 v_1 := v.Args[1] 4434 if v_1.Op != OpARM64MOVDconst { 4435 break 4436 } 4437 c := v_1.AuxInt 4438 v.reset(OpARM64XORconst) 4439 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 4440 v.AddArg(x) 4441 return true 4442 } 4443 // match: (EONshiftRL x (SRLconst x [c]) [d]) 4444 // cond: c==d 4445 // result: (MOVDconst [-1]) 4446 for { 4447 d := v.AuxInt 4448 _ = v.Args[1] 4449 x := v.Args[0] 4450 v_1 := v.Args[1] 4451 if v_1.Op != OpARM64SRLconst { 4452 break 4453 } 4454 c := v_1.AuxInt 4455 if x != v_1.Args[0] || !(c == d) { 4456 break 4457 } 4458 v.reset(OpARM64MOVDconst) 4459 v.AuxInt = -1 4460 return true 4461 } 4462 return false 4463 } 4464 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 4465 // match: (Equal (FlagEQ)) 4466 // result: (MOVDconst [1]) 4467 for { 4468 v_0 := v.Args[0] 4469 if v_0.Op != OpARM64FlagEQ { 4470 break 4471 } 4472 v.reset(OpARM64MOVDconst) 4473 v.AuxInt = 1 4474 return true 4475 } 4476 // match: (Equal (FlagLT_ULT)) 4477 // result: (MOVDconst [0]) 4478 for { 4479 v_0 := v.Args[0] 4480 if v_0.Op != OpARM64FlagLT_ULT { 4481 break 4482 } 4483 v.reset(OpARM64MOVDconst) 4484 v.AuxInt = 0 4485 return true 4486 } 4487 // match: (Equal (FlagLT_UGT)) 4488 // result: (MOVDconst [0]) 4489 for { 4490 v_0 := v.Args[0] 4491 if v_0.Op != OpARM64FlagLT_UGT { 4492 break 4493 } 4494 v.reset(OpARM64MOVDconst) 4495 v.AuxInt = 0 4496 return true 4497 } 4498 // match: (Equal (FlagGT_ULT)) 4499 // result: (MOVDconst [0]) 4500 for { 4501 v_0 := v.Args[0] 4502 if v_0.Op != OpARM64FlagGT_ULT { 4503 break 4504 } 4505 v.reset(OpARM64MOVDconst) 4506 v.AuxInt = 0 4507 return true 4508 } 4509 // match: (Equal (FlagGT_UGT)) 4510 // result: (MOVDconst [0]) 4511 for { 4512 v_0 := v.Args[0] 4513 if v_0.Op != OpARM64FlagGT_UGT { 4514 break 4515 } 4516 v.reset(OpARM64MOVDconst) 4517 v.AuxInt = 0 4518 return true 4519 } 4520 // match: (Equal (InvertFlags x)) 4521 // result: (Equal x) 4522 for { 4523 v_0 := v.Args[0] 4524 if v_0.Op != OpARM64InvertFlags { 4525 break 4526 } 4527 x := v_0.Args[0] 4528 v.reset(OpARM64Equal) 4529 v.AddArg(x) 4530 return true 4531 } 4532 return false 4533 } 4534 func rewriteValueARM64_OpARM64FADDD_0(v *Value) bool { 4535 // match: (FADDD a (FMULD x y)) 4536 // result: (FMADDD a x y) 4537 for { 4538 _ = v.Args[1] 4539 a := v.Args[0] 4540 v_1 := v.Args[1] 4541 if v_1.Op != OpARM64FMULD { 4542 break 4543 } 4544 y := v_1.Args[1] 4545 x := v_1.Args[0] 4546 v.reset(OpARM64FMADDD) 4547 v.AddArg(a) 4548 v.AddArg(x) 4549 v.AddArg(y) 4550 return true 4551 } 4552 // match: (FADDD (FMULD x y) a) 4553 // result: (FMADDD a x y) 4554 for { 4555 a := v.Args[1] 4556 v_0 := v.Args[0] 4557 if v_0.Op != OpARM64FMULD { 4558 break 4559 } 4560 y := v_0.Args[1] 4561 x := v_0.Args[0] 4562 v.reset(OpARM64FMADDD) 4563 v.AddArg(a) 4564 v.AddArg(x) 4565 v.AddArg(y) 4566 return true 4567 } 4568 // match: (FADDD a (FNMULD x y)) 4569 // result: (FMSUBD a x y) 4570 for { 4571 _ = v.Args[1] 4572 a := v.Args[0] 4573 v_1 := v.Args[1] 4574 if v_1.Op != OpARM64FNMULD { 4575 break 4576 } 4577 y := v_1.Args[1] 4578 x := v_1.Args[0] 4579 v.reset(OpARM64FMSUBD) 4580 v.AddArg(a) 4581 v.AddArg(x) 4582 v.AddArg(y) 4583 return true 4584 } 4585 // match: (FADDD (FNMULD x y) a) 4586 // result: (FMSUBD a x y) 4587 for { 4588 a := v.Args[1] 4589 v_0 := v.Args[0] 4590 if v_0.Op != OpARM64FNMULD { 4591 break 4592 } 4593 y := v_0.Args[1] 4594 x := v_0.Args[0] 4595 v.reset(OpARM64FMSUBD) 4596 v.AddArg(a) 4597 v.AddArg(x) 4598 v.AddArg(y) 4599 return true 4600 } 4601 return false 4602 } 4603 func rewriteValueARM64_OpARM64FADDS_0(v *Value) bool { 4604 // match: (FADDS a (FMULS x y)) 4605 // result: (FMADDS a x y) 4606 for { 4607 _ = v.Args[1] 4608 a := v.Args[0] 4609 v_1 := v.Args[1] 4610 if v_1.Op != OpARM64FMULS { 4611 break 4612 } 4613 y := v_1.Args[1] 4614 x := v_1.Args[0] 4615 v.reset(OpARM64FMADDS) 4616 v.AddArg(a) 4617 v.AddArg(x) 4618 v.AddArg(y) 4619 return true 4620 } 4621 // match: (FADDS (FMULS x y) a) 4622 // result: (FMADDS a x y) 4623 for { 4624 a := v.Args[1] 4625 v_0 := v.Args[0] 4626 if v_0.Op != OpARM64FMULS { 4627 break 4628 } 4629 y := v_0.Args[1] 4630 x := v_0.Args[0] 4631 v.reset(OpARM64FMADDS) 4632 v.AddArg(a) 4633 v.AddArg(x) 4634 v.AddArg(y) 4635 return true 4636 } 4637 // match: (FADDS a (FNMULS x y)) 4638 // result: (FMSUBS a x y) 4639 for { 4640 _ = v.Args[1] 4641 a := v.Args[0] 4642 v_1 := v.Args[1] 4643 if v_1.Op != OpARM64FNMULS { 4644 break 4645 } 4646 y := v_1.Args[1] 4647 x := v_1.Args[0] 4648 v.reset(OpARM64FMSUBS) 4649 v.AddArg(a) 4650 v.AddArg(x) 4651 v.AddArg(y) 4652 return true 4653 } 4654 // match: (FADDS (FNMULS x y) a) 4655 // result: (FMSUBS a x y) 4656 for { 4657 a := v.Args[1] 4658 v_0 := v.Args[0] 4659 if v_0.Op != OpARM64FNMULS { 4660 break 4661 } 4662 y := v_0.Args[1] 4663 x := v_0.Args[0] 4664 v.reset(OpARM64FMSUBS) 4665 v.AddArg(a) 4666 v.AddArg(x) 4667 v.AddArg(y) 4668 return true 4669 } 4670 return false 4671 } 4672 func rewriteValueARM64_OpARM64FCMPD_0(v *Value) bool { 4673 b := v.Block 4674 // match: (FCMPD x (FMOVDconst [0])) 4675 // result: (FCMPD0 x) 4676 for { 4677 _ = v.Args[1] 4678 x := v.Args[0] 4679 v_1 := v.Args[1] 4680 if v_1.Op != OpARM64FMOVDconst || v_1.AuxInt != 0 { 4681 break 4682 } 4683 v.reset(OpARM64FCMPD0) 4684 v.AddArg(x) 4685 return true 4686 } 4687 // match: (FCMPD (FMOVDconst [0]) x) 4688 // result: (InvertFlags (FCMPD0 x)) 4689 for { 4690 x := v.Args[1] 4691 v_0 := v.Args[0] 4692 if v_0.Op != OpARM64FMOVDconst || v_0.AuxInt != 0 { 4693 break 4694 } 4695 v.reset(OpARM64InvertFlags) 4696 v0 := b.NewValue0(v.Pos, OpARM64FCMPD0, types.TypeFlags) 4697 v0.AddArg(x) 4698 v.AddArg(v0) 4699 return true 4700 } 4701 return false 4702 } 4703 func rewriteValueARM64_OpARM64FCMPS_0(v *Value) bool { 4704 b := v.Block 4705 // match: (FCMPS x (FMOVSconst [0])) 4706 // result: (FCMPS0 x) 4707 for { 4708 _ = v.Args[1] 4709 x := v.Args[0] 4710 v_1 := v.Args[1] 4711 if v_1.Op != OpARM64FMOVSconst || v_1.AuxInt != 0 { 4712 break 4713 } 4714 v.reset(OpARM64FCMPS0) 4715 v.AddArg(x) 4716 return true 4717 } 4718 // match: (FCMPS (FMOVSconst [0]) x) 4719 // result: (InvertFlags (FCMPS0 x)) 4720 for { 4721 x := v.Args[1] 4722 v_0 := v.Args[0] 4723 if v_0.Op != OpARM64FMOVSconst || v_0.AuxInt != 0 { 4724 break 4725 } 4726 v.reset(OpARM64InvertFlags) 4727 v0 := b.NewValue0(v.Pos, OpARM64FCMPS0, types.TypeFlags) 4728 v0.AddArg(x) 4729 v.AddArg(v0) 4730 return true 4731 } 4732 return false 4733 } 4734 func rewriteValueARM64_OpARM64FMOVDfpgp_0(v *Value) bool { 4735 b := v.Block 4736 // match: (FMOVDfpgp <t> (Arg [off] {sym})) 4737 // result: @b.Func.Entry (Arg <t> [off] {sym}) 4738 for { 4739 t := v.Type 4740 v_0 := v.Args[0] 4741 if v_0.Op != OpArg { 4742 break 4743 } 4744 off := v_0.AuxInt 4745 sym := v_0.Aux 4746 b = b.Func.Entry 4747 v0 := b.NewValue0(v.Pos, OpArg, t) 4748 v.reset(OpCopy) 4749 v.AddArg(v0) 4750 v0.AuxInt = off 4751 v0.Aux = sym 4752 return true 4753 } 4754 return false 4755 } 4756 func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool { 4757 b := v.Block 4758 // match: (FMOVDgpfp <t> (Arg [off] {sym})) 4759 // result: @b.Func.Entry (Arg <t> [off] {sym}) 4760 for { 4761 t := v.Type 4762 v_0 := v.Args[0] 4763 if v_0.Op != OpArg { 4764 break 4765 } 4766 off := v_0.AuxInt 4767 sym := v_0.Aux 4768 b = b.Func.Entry 4769 v0 := b.NewValue0(v.Pos, OpArg, t) 4770 v.reset(OpCopy) 4771 v.AddArg(v0) 4772 v0.AuxInt = off 4773 v0.Aux = sym 4774 return true 4775 } 4776 return false 4777 } 4778 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 4779 b := v.Block 4780 config := b.Func.Config 4781 // match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _)) 4782 // result: (FMOVDgpfp val) 4783 for { 4784 off := v.AuxInt 4785 sym := v.Aux 4786 _ = v.Args[1] 4787 ptr := v.Args[0] 4788 v_1 := v.Args[1] 4789 if v_1.Op != OpARM64MOVDstore || v_1.AuxInt != off || v_1.Aux != sym { 4790 break 4791 } 4792 _ = v_1.Args[2] 4793 if ptr != v_1.Args[0] { 4794 break 4795 } 4796 val := v_1.Args[1] 4797 v.reset(OpARM64FMOVDgpfp) 4798 v.AddArg(val) 4799 return true 4800 } 4801 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 4802 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4803 // result: (FMOVDload [off1+off2] {sym} ptr mem) 4804 for { 4805 off1 := v.AuxInt 4806 sym := v.Aux 4807 mem := v.Args[1] 4808 v_0 := v.Args[0] 4809 if v_0.Op != OpARM64ADDconst { 4810 break 4811 } 4812 off2 := v_0.AuxInt 4813 ptr := v_0.Args[0] 4814 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4815 break 4816 } 4817 v.reset(OpARM64FMOVDload) 4818 v.AuxInt = off1 + off2 4819 v.Aux = sym 4820 v.AddArg(ptr) 4821 v.AddArg(mem) 4822 return true 4823 } 4824 // match: (FMOVDload [off] {sym} (ADD ptr idx) mem) 4825 // cond: off == 0 && sym == nil 4826 // result: (FMOVDloadidx ptr idx mem) 4827 for { 4828 off := v.AuxInt 4829 sym := v.Aux 4830 mem := v.Args[1] 4831 v_0 := v.Args[0] 4832 if v_0.Op != OpARM64ADD { 4833 break 4834 } 4835 idx := v_0.Args[1] 4836 ptr := v_0.Args[0] 4837 if !(off == 0 && sym == nil) { 4838 break 4839 } 4840 v.reset(OpARM64FMOVDloadidx) 4841 v.AddArg(ptr) 4842 v.AddArg(idx) 4843 v.AddArg(mem) 4844 return true 4845 } 4846 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 4847 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4848 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4849 for { 4850 off1 := v.AuxInt 4851 sym1 := v.Aux 4852 mem := v.Args[1] 4853 v_0 := v.Args[0] 4854 if v_0.Op != OpARM64MOVDaddr { 4855 break 4856 } 4857 off2 := v_0.AuxInt 4858 sym2 := v_0.Aux 4859 ptr := v_0.Args[0] 4860 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4861 break 4862 } 4863 v.reset(OpARM64FMOVDload) 4864 v.AuxInt = off1 + off2 4865 v.Aux = mergeSym(sym1, sym2) 4866 v.AddArg(ptr) 4867 v.AddArg(mem) 4868 return true 4869 } 4870 return false 4871 } 4872 func rewriteValueARM64_OpARM64FMOVDloadidx_0(v *Value) bool { 4873 // match: (FMOVDloadidx ptr (MOVDconst [c]) mem) 4874 // result: (FMOVDload [c] ptr mem) 4875 for { 4876 mem := v.Args[2] 4877 ptr := v.Args[0] 4878 v_1 := v.Args[1] 4879 if v_1.Op != OpARM64MOVDconst { 4880 break 4881 } 4882 c := v_1.AuxInt 4883 v.reset(OpARM64FMOVDload) 4884 v.AuxInt = c 4885 v.AddArg(ptr) 4886 v.AddArg(mem) 4887 return true 4888 } 4889 // match: (FMOVDloadidx (MOVDconst [c]) ptr mem) 4890 // result: (FMOVDload [c] ptr mem) 4891 for { 4892 mem := v.Args[2] 4893 v_0 := v.Args[0] 4894 if v_0.Op != OpARM64MOVDconst { 4895 break 4896 } 4897 c := v_0.AuxInt 4898 ptr := v.Args[1] 4899 v.reset(OpARM64FMOVDload) 4900 v.AuxInt = c 4901 v.AddArg(ptr) 4902 v.AddArg(mem) 4903 return true 4904 } 4905 return false 4906 } 4907 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 4908 b := v.Block 4909 config := b.Func.Config 4910 // match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem) 4911 // result: (MOVDstore [off] {sym} ptr val mem) 4912 for { 4913 off := v.AuxInt 4914 sym := v.Aux 4915 mem := v.Args[2] 4916 ptr := v.Args[0] 4917 v_1 := v.Args[1] 4918 if v_1.Op != OpARM64FMOVDgpfp { 4919 break 4920 } 4921 val := v_1.Args[0] 4922 v.reset(OpARM64MOVDstore) 4923 v.AuxInt = off 4924 v.Aux = sym 4925 v.AddArg(ptr) 4926 v.AddArg(val) 4927 v.AddArg(mem) 4928 return true 4929 } 4930 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 4931 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4932 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 4933 for { 4934 off1 := v.AuxInt 4935 sym := v.Aux 4936 mem := v.Args[2] 4937 v_0 := v.Args[0] 4938 if v_0.Op != OpARM64ADDconst { 4939 break 4940 } 4941 off2 := v_0.AuxInt 4942 ptr := v_0.Args[0] 4943 val := v.Args[1] 4944 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4945 break 4946 } 4947 v.reset(OpARM64FMOVDstore) 4948 v.AuxInt = off1 + off2 4949 v.Aux = sym 4950 v.AddArg(ptr) 4951 v.AddArg(val) 4952 v.AddArg(mem) 4953 return true 4954 } 4955 // match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem) 4956 // cond: off == 0 && sym == nil 4957 // result: (FMOVDstoreidx ptr idx val mem) 4958 for { 4959 off := v.AuxInt 4960 sym := v.Aux 4961 mem := v.Args[2] 4962 v_0 := v.Args[0] 4963 if v_0.Op != OpARM64ADD { 4964 break 4965 } 4966 idx := v_0.Args[1] 4967 ptr := v_0.Args[0] 4968 val := v.Args[1] 4969 if !(off == 0 && sym == nil) { 4970 break 4971 } 4972 v.reset(OpARM64FMOVDstoreidx) 4973 v.AddArg(ptr) 4974 v.AddArg(idx) 4975 v.AddArg(val) 4976 v.AddArg(mem) 4977 return true 4978 } 4979 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 4980 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 4981 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4982 for { 4983 off1 := v.AuxInt 4984 sym1 := v.Aux 4985 mem := v.Args[2] 4986 v_0 := v.Args[0] 4987 if v_0.Op != OpARM64MOVDaddr { 4988 break 4989 } 4990 off2 := v_0.AuxInt 4991 sym2 := v_0.Aux 4992 ptr := v_0.Args[0] 4993 val := v.Args[1] 4994 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 4995 break 4996 } 4997 v.reset(OpARM64FMOVDstore) 4998 v.AuxInt = off1 + off2 4999 v.Aux = mergeSym(sym1, sym2) 5000 v.AddArg(ptr) 5001 v.AddArg(val) 5002 v.AddArg(mem) 5003 return true 5004 } 5005 return false 5006 } 5007 func rewriteValueARM64_OpARM64FMOVDstoreidx_0(v *Value) bool { 5008 // match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem) 5009 // result: (FMOVDstore [c] ptr val mem) 5010 for { 5011 mem := v.Args[3] 5012 ptr := v.Args[0] 5013 v_1 := v.Args[1] 5014 if v_1.Op != OpARM64MOVDconst { 5015 break 5016 } 5017 c := v_1.AuxInt 5018 val := v.Args[2] 5019 v.reset(OpARM64FMOVDstore) 5020 v.AuxInt = c 5021 v.AddArg(ptr) 5022 v.AddArg(val) 5023 v.AddArg(mem) 5024 return true 5025 } 5026 // match: (FMOVDstoreidx (MOVDconst [c]) idx val mem) 5027 // result: (FMOVDstore [c] idx val mem) 5028 for { 5029 mem := v.Args[3] 5030 v_0 := v.Args[0] 5031 if v_0.Op != OpARM64MOVDconst { 5032 break 5033 } 5034 c := v_0.AuxInt 5035 idx := v.Args[1] 5036 val := v.Args[2] 5037 v.reset(OpARM64FMOVDstore) 5038 v.AuxInt = c 5039 v.AddArg(idx) 5040 v.AddArg(val) 5041 v.AddArg(mem) 5042 return true 5043 } 5044 return false 5045 } 5046 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 5047 b := v.Block 5048 config := b.Func.Config 5049 // match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _)) 5050 // result: (FMOVSgpfp val) 5051 for { 5052 off := v.AuxInt 5053 sym := v.Aux 5054 _ = v.Args[1] 5055 ptr := v.Args[0] 5056 v_1 := v.Args[1] 5057 if v_1.Op != OpARM64MOVWstore || v_1.AuxInt != off || v_1.Aux != sym { 5058 break 5059 } 5060 _ = v_1.Args[2] 5061 if ptr != v_1.Args[0] { 5062 break 5063 } 5064 val := v_1.Args[1] 5065 v.reset(OpARM64FMOVSgpfp) 5066 v.AddArg(val) 5067 return true 5068 } 5069 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 5070 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5071 // result: (FMOVSload [off1+off2] {sym} ptr mem) 5072 for { 5073 off1 := v.AuxInt 5074 sym := v.Aux 5075 mem := v.Args[1] 5076 v_0 := v.Args[0] 5077 if v_0.Op != OpARM64ADDconst { 5078 break 5079 } 5080 off2 := v_0.AuxInt 5081 ptr := v_0.Args[0] 5082 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5083 break 5084 } 5085 v.reset(OpARM64FMOVSload) 5086 v.AuxInt = off1 + off2 5087 v.Aux = sym 5088 v.AddArg(ptr) 5089 v.AddArg(mem) 5090 return true 5091 } 5092 // match: (FMOVSload [off] {sym} (ADD ptr idx) mem) 5093 // cond: off == 0 && sym == nil 5094 // result: (FMOVSloadidx ptr idx mem) 5095 for { 5096 off := v.AuxInt 5097 sym := v.Aux 5098 mem := v.Args[1] 5099 v_0 := v.Args[0] 5100 if v_0.Op != OpARM64ADD { 5101 break 5102 } 5103 idx := v_0.Args[1] 5104 ptr := v_0.Args[0] 5105 if !(off == 0 && sym == nil) { 5106 break 5107 } 5108 v.reset(OpARM64FMOVSloadidx) 5109 v.AddArg(ptr) 5110 v.AddArg(idx) 5111 v.AddArg(mem) 5112 return true 5113 } 5114 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5115 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5116 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5117 for { 5118 off1 := v.AuxInt 5119 sym1 := v.Aux 5120 mem := v.Args[1] 5121 v_0 := v.Args[0] 5122 if v_0.Op != OpARM64MOVDaddr { 5123 break 5124 } 5125 off2 := v_0.AuxInt 5126 sym2 := v_0.Aux 5127 ptr := v_0.Args[0] 5128 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5129 break 5130 } 5131 v.reset(OpARM64FMOVSload) 5132 v.AuxInt = off1 + off2 5133 v.Aux = mergeSym(sym1, sym2) 5134 v.AddArg(ptr) 5135 v.AddArg(mem) 5136 return true 5137 } 5138 return false 5139 } 5140 func rewriteValueARM64_OpARM64FMOVSloadidx_0(v *Value) bool { 5141 // match: (FMOVSloadidx ptr (MOVDconst [c]) mem) 5142 // result: (FMOVSload [c] ptr mem) 5143 for { 5144 mem := v.Args[2] 5145 ptr := v.Args[0] 5146 v_1 := v.Args[1] 5147 if v_1.Op != OpARM64MOVDconst { 5148 break 5149 } 5150 c := v_1.AuxInt 5151 v.reset(OpARM64FMOVSload) 5152 v.AuxInt = c 5153 v.AddArg(ptr) 5154 v.AddArg(mem) 5155 return true 5156 } 5157 // match: (FMOVSloadidx (MOVDconst [c]) ptr mem) 5158 // result: (FMOVSload [c] ptr mem) 5159 for { 5160 mem := v.Args[2] 5161 v_0 := v.Args[0] 5162 if v_0.Op != OpARM64MOVDconst { 5163 break 5164 } 5165 c := v_0.AuxInt 5166 ptr := v.Args[1] 5167 v.reset(OpARM64FMOVSload) 5168 v.AuxInt = c 5169 v.AddArg(ptr) 5170 v.AddArg(mem) 5171 return true 5172 } 5173 return false 5174 } 5175 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 5176 b := v.Block 5177 config := b.Func.Config 5178 // match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem) 5179 // result: (MOVWstore [off] {sym} ptr val mem) 5180 for { 5181 off := v.AuxInt 5182 sym := v.Aux 5183 mem := v.Args[2] 5184 ptr := v.Args[0] 5185 v_1 := v.Args[1] 5186 if v_1.Op != OpARM64FMOVSgpfp { 5187 break 5188 } 5189 val := v_1.Args[0] 5190 v.reset(OpARM64MOVWstore) 5191 v.AuxInt = off 5192 v.Aux = sym 5193 v.AddArg(ptr) 5194 v.AddArg(val) 5195 v.AddArg(mem) 5196 return true 5197 } 5198 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5199 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5200 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 5201 for { 5202 off1 := v.AuxInt 5203 sym := v.Aux 5204 mem := v.Args[2] 5205 v_0 := v.Args[0] 5206 if v_0.Op != OpARM64ADDconst { 5207 break 5208 } 5209 off2 := v_0.AuxInt 5210 ptr := v_0.Args[0] 5211 val := v.Args[1] 5212 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5213 break 5214 } 5215 v.reset(OpARM64FMOVSstore) 5216 v.AuxInt = off1 + off2 5217 v.Aux = sym 5218 v.AddArg(ptr) 5219 v.AddArg(val) 5220 v.AddArg(mem) 5221 return true 5222 } 5223 // match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem) 5224 // cond: off == 0 && sym == nil 5225 // result: (FMOVSstoreidx ptr idx val mem) 5226 for { 5227 off := v.AuxInt 5228 sym := v.Aux 5229 mem := v.Args[2] 5230 v_0 := v.Args[0] 5231 if v_0.Op != OpARM64ADD { 5232 break 5233 } 5234 idx := v_0.Args[1] 5235 ptr := v_0.Args[0] 5236 val := v.Args[1] 5237 if !(off == 0 && sym == nil) { 5238 break 5239 } 5240 v.reset(OpARM64FMOVSstoreidx) 5241 v.AddArg(ptr) 5242 v.AddArg(idx) 5243 v.AddArg(val) 5244 v.AddArg(mem) 5245 return true 5246 } 5247 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5248 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5249 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5250 for { 5251 off1 := v.AuxInt 5252 sym1 := v.Aux 5253 mem := v.Args[2] 5254 v_0 := v.Args[0] 5255 if v_0.Op != OpARM64MOVDaddr { 5256 break 5257 } 5258 off2 := v_0.AuxInt 5259 sym2 := v_0.Aux 5260 ptr := v_0.Args[0] 5261 val := v.Args[1] 5262 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5263 break 5264 } 5265 v.reset(OpARM64FMOVSstore) 5266 v.AuxInt = off1 + off2 5267 v.Aux = mergeSym(sym1, sym2) 5268 v.AddArg(ptr) 5269 v.AddArg(val) 5270 v.AddArg(mem) 5271 return true 5272 } 5273 return false 5274 } 5275 func rewriteValueARM64_OpARM64FMOVSstoreidx_0(v *Value) bool { 5276 // match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem) 5277 // result: (FMOVSstore [c] ptr val mem) 5278 for { 5279 mem := v.Args[3] 5280 ptr := v.Args[0] 5281 v_1 := v.Args[1] 5282 if v_1.Op != OpARM64MOVDconst { 5283 break 5284 } 5285 c := v_1.AuxInt 5286 val := v.Args[2] 5287 v.reset(OpARM64FMOVSstore) 5288 v.AuxInt = c 5289 v.AddArg(ptr) 5290 v.AddArg(val) 5291 v.AddArg(mem) 5292 return true 5293 } 5294 // match: (FMOVSstoreidx (MOVDconst [c]) idx val mem) 5295 // result: (FMOVSstore [c] idx val mem) 5296 for { 5297 mem := v.Args[3] 5298 v_0 := v.Args[0] 5299 if v_0.Op != OpARM64MOVDconst { 5300 break 5301 } 5302 c := v_0.AuxInt 5303 idx := v.Args[1] 5304 val := v.Args[2] 5305 v.reset(OpARM64FMOVSstore) 5306 v.AuxInt = c 5307 v.AddArg(idx) 5308 v.AddArg(val) 5309 v.AddArg(mem) 5310 return true 5311 } 5312 return false 5313 } 5314 func rewriteValueARM64_OpARM64FMULD_0(v *Value) bool { 5315 // match: (FMULD (FNEGD x) y) 5316 // result: (FNMULD x y) 5317 for { 5318 y := v.Args[1] 5319 v_0 := v.Args[0] 5320 if v_0.Op != OpARM64FNEGD { 5321 break 5322 } 5323 x := v_0.Args[0] 5324 v.reset(OpARM64FNMULD) 5325 v.AddArg(x) 5326 v.AddArg(y) 5327 return true 5328 } 5329 // match: (FMULD y (FNEGD x)) 5330 // result: (FNMULD x y) 5331 for { 5332 _ = v.Args[1] 5333 y := v.Args[0] 5334 v_1 := v.Args[1] 5335 if v_1.Op != OpARM64FNEGD { 5336 break 5337 } 5338 x := v_1.Args[0] 5339 v.reset(OpARM64FNMULD) 5340 v.AddArg(x) 5341 v.AddArg(y) 5342 return true 5343 } 5344 return false 5345 } 5346 func rewriteValueARM64_OpARM64FMULS_0(v *Value) bool { 5347 // match: (FMULS (FNEGS x) y) 5348 // result: (FNMULS x y) 5349 for { 5350 y := v.Args[1] 5351 v_0 := v.Args[0] 5352 if v_0.Op != OpARM64FNEGS { 5353 break 5354 } 5355 x := v_0.Args[0] 5356 v.reset(OpARM64FNMULS) 5357 v.AddArg(x) 5358 v.AddArg(y) 5359 return true 5360 } 5361 // match: (FMULS y (FNEGS x)) 5362 // result: (FNMULS x y) 5363 for { 5364 _ = v.Args[1] 5365 y := v.Args[0] 5366 v_1 := v.Args[1] 5367 if v_1.Op != OpARM64FNEGS { 5368 break 5369 } 5370 x := v_1.Args[0] 5371 v.reset(OpARM64FNMULS) 5372 v.AddArg(x) 5373 v.AddArg(y) 5374 return true 5375 } 5376 return false 5377 } 5378 func rewriteValueARM64_OpARM64FNEGD_0(v *Value) bool { 5379 // match: (FNEGD (FMULD x y)) 5380 // result: (FNMULD x y) 5381 for { 5382 v_0 := v.Args[0] 5383 if v_0.Op != OpARM64FMULD { 5384 break 5385 } 5386 y := v_0.Args[1] 5387 x := v_0.Args[0] 5388 v.reset(OpARM64FNMULD) 5389 v.AddArg(x) 5390 v.AddArg(y) 5391 return true 5392 } 5393 // match: (FNEGD (FNMULD x y)) 5394 // result: (FMULD x y) 5395 for { 5396 v_0 := v.Args[0] 5397 if v_0.Op != OpARM64FNMULD { 5398 break 5399 } 5400 y := v_0.Args[1] 5401 x := v_0.Args[0] 5402 v.reset(OpARM64FMULD) 5403 v.AddArg(x) 5404 v.AddArg(y) 5405 return true 5406 } 5407 return false 5408 } 5409 func rewriteValueARM64_OpARM64FNEGS_0(v *Value) bool { 5410 // match: (FNEGS (FMULS x y)) 5411 // result: (FNMULS x y) 5412 for { 5413 v_0 := v.Args[0] 5414 if v_0.Op != OpARM64FMULS { 5415 break 5416 } 5417 y := v_0.Args[1] 5418 x := v_0.Args[0] 5419 v.reset(OpARM64FNMULS) 5420 v.AddArg(x) 5421 v.AddArg(y) 5422 return true 5423 } 5424 // match: (FNEGS (FNMULS x y)) 5425 // result: (FMULS x y) 5426 for { 5427 v_0 := v.Args[0] 5428 if v_0.Op != OpARM64FNMULS { 5429 break 5430 } 5431 y := v_0.Args[1] 5432 x := v_0.Args[0] 5433 v.reset(OpARM64FMULS) 5434 v.AddArg(x) 5435 v.AddArg(y) 5436 return true 5437 } 5438 return false 5439 } 5440 func rewriteValueARM64_OpARM64FNMULD_0(v *Value) bool { 5441 // match: (FNMULD (FNEGD x) y) 5442 // result: (FMULD x y) 5443 for { 5444 y := v.Args[1] 5445 v_0 := v.Args[0] 5446 if v_0.Op != OpARM64FNEGD { 5447 break 5448 } 5449 x := v_0.Args[0] 5450 v.reset(OpARM64FMULD) 5451 v.AddArg(x) 5452 v.AddArg(y) 5453 return true 5454 } 5455 // match: (FNMULD y (FNEGD x)) 5456 // result: (FMULD x y) 5457 for { 5458 _ = v.Args[1] 5459 y := v.Args[0] 5460 v_1 := v.Args[1] 5461 if v_1.Op != OpARM64FNEGD { 5462 break 5463 } 5464 x := v_1.Args[0] 5465 v.reset(OpARM64FMULD) 5466 v.AddArg(x) 5467 v.AddArg(y) 5468 return true 5469 } 5470 return false 5471 } 5472 func rewriteValueARM64_OpARM64FNMULS_0(v *Value) bool { 5473 // match: (FNMULS (FNEGS x) y) 5474 // result: (FMULS x y) 5475 for { 5476 y := v.Args[1] 5477 v_0 := v.Args[0] 5478 if v_0.Op != OpARM64FNEGS { 5479 break 5480 } 5481 x := v_0.Args[0] 5482 v.reset(OpARM64FMULS) 5483 v.AddArg(x) 5484 v.AddArg(y) 5485 return true 5486 } 5487 // match: (FNMULS y (FNEGS x)) 5488 // result: (FMULS x y) 5489 for { 5490 _ = v.Args[1] 5491 y := v.Args[0] 5492 v_1 := v.Args[1] 5493 if v_1.Op != OpARM64FNEGS { 5494 break 5495 } 5496 x := v_1.Args[0] 5497 v.reset(OpARM64FMULS) 5498 v.AddArg(x) 5499 v.AddArg(y) 5500 return true 5501 } 5502 return false 5503 } 5504 func rewriteValueARM64_OpARM64FSUBD_0(v *Value) bool { 5505 // match: (FSUBD a (FMULD x y)) 5506 // result: (FMSUBD a x y) 5507 for { 5508 _ = v.Args[1] 5509 a := v.Args[0] 5510 v_1 := v.Args[1] 5511 if v_1.Op != OpARM64FMULD { 5512 break 5513 } 5514 y := v_1.Args[1] 5515 x := v_1.Args[0] 5516 v.reset(OpARM64FMSUBD) 5517 v.AddArg(a) 5518 v.AddArg(x) 5519 v.AddArg(y) 5520 return true 5521 } 5522 // match: (FSUBD (FMULD x y) a) 5523 // result: (FNMSUBD a x y) 5524 for { 5525 a := v.Args[1] 5526 v_0 := v.Args[0] 5527 if v_0.Op != OpARM64FMULD { 5528 break 5529 } 5530 y := v_0.Args[1] 5531 x := v_0.Args[0] 5532 v.reset(OpARM64FNMSUBD) 5533 v.AddArg(a) 5534 v.AddArg(x) 5535 v.AddArg(y) 5536 return true 5537 } 5538 // match: (FSUBD a (FNMULD x y)) 5539 // result: (FMADDD a x y) 5540 for { 5541 _ = v.Args[1] 5542 a := v.Args[0] 5543 v_1 := v.Args[1] 5544 if v_1.Op != OpARM64FNMULD { 5545 break 5546 } 5547 y := v_1.Args[1] 5548 x := v_1.Args[0] 5549 v.reset(OpARM64FMADDD) 5550 v.AddArg(a) 5551 v.AddArg(x) 5552 v.AddArg(y) 5553 return true 5554 } 5555 // match: (FSUBD (FNMULD x y) a) 5556 // result: (FNMADDD a x y) 5557 for { 5558 a := v.Args[1] 5559 v_0 := v.Args[0] 5560 if v_0.Op != OpARM64FNMULD { 5561 break 5562 } 5563 y := v_0.Args[1] 5564 x := v_0.Args[0] 5565 v.reset(OpARM64FNMADDD) 5566 v.AddArg(a) 5567 v.AddArg(x) 5568 v.AddArg(y) 5569 return true 5570 } 5571 return false 5572 } 5573 func rewriteValueARM64_OpARM64FSUBS_0(v *Value) bool { 5574 // match: (FSUBS a (FMULS x y)) 5575 // result: (FMSUBS a x y) 5576 for { 5577 _ = v.Args[1] 5578 a := v.Args[0] 5579 v_1 := v.Args[1] 5580 if v_1.Op != OpARM64FMULS { 5581 break 5582 } 5583 y := v_1.Args[1] 5584 x := v_1.Args[0] 5585 v.reset(OpARM64FMSUBS) 5586 v.AddArg(a) 5587 v.AddArg(x) 5588 v.AddArg(y) 5589 return true 5590 } 5591 // match: (FSUBS (FMULS x y) a) 5592 // result: (FNMSUBS a x y) 5593 for { 5594 a := v.Args[1] 5595 v_0 := v.Args[0] 5596 if v_0.Op != OpARM64FMULS { 5597 break 5598 } 5599 y := v_0.Args[1] 5600 x := v_0.Args[0] 5601 v.reset(OpARM64FNMSUBS) 5602 v.AddArg(a) 5603 v.AddArg(x) 5604 v.AddArg(y) 5605 return true 5606 } 5607 // match: (FSUBS a (FNMULS x y)) 5608 // result: (FMADDS a x y) 5609 for { 5610 _ = v.Args[1] 5611 a := v.Args[0] 5612 v_1 := v.Args[1] 5613 if v_1.Op != OpARM64FNMULS { 5614 break 5615 } 5616 y := v_1.Args[1] 5617 x := v_1.Args[0] 5618 v.reset(OpARM64FMADDS) 5619 v.AddArg(a) 5620 v.AddArg(x) 5621 v.AddArg(y) 5622 return true 5623 } 5624 // match: (FSUBS (FNMULS x y) a) 5625 // result: (FNMADDS a x y) 5626 for { 5627 a := v.Args[1] 5628 v_0 := v.Args[0] 5629 if v_0.Op != OpARM64FNMULS { 5630 break 5631 } 5632 y := v_0.Args[1] 5633 x := v_0.Args[0] 5634 v.reset(OpARM64FNMADDS) 5635 v.AddArg(a) 5636 v.AddArg(x) 5637 v.AddArg(y) 5638 return true 5639 } 5640 return false 5641 } 5642 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 5643 // match: (GreaterEqual (FlagEQ)) 5644 // result: (MOVDconst [1]) 5645 for { 5646 v_0 := v.Args[0] 5647 if v_0.Op != OpARM64FlagEQ { 5648 break 5649 } 5650 v.reset(OpARM64MOVDconst) 5651 v.AuxInt = 1 5652 return true 5653 } 5654 // match: (GreaterEqual (FlagLT_ULT)) 5655 // result: (MOVDconst [0]) 5656 for { 5657 v_0 := v.Args[0] 5658 if v_0.Op != OpARM64FlagLT_ULT { 5659 break 5660 } 5661 v.reset(OpARM64MOVDconst) 5662 v.AuxInt = 0 5663 return true 5664 } 5665 // match: (GreaterEqual (FlagLT_UGT)) 5666 // result: (MOVDconst [0]) 5667 for { 5668 v_0 := v.Args[0] 5669 if v_0.Op != OpARM64FlagLT_UGT { 5670 break 5671 } 5672 v.reset(OpARM64MOVDconst) 5673 v.AuxInt = 0 5674 return true 5675 } 5676 // match: (GreaterEqual (FlagGT_ULT)) 5677 // result: (MOVDconst [1]) 5678 for { 5679 v_0 := v.Args[0] 5680 if v_0.Op != OpARM64FlagGT_ULT { 5681 break 5682 } 5683 v.reset(OpARM64MOVDconst) 5684 v.AuxInt = 1 5685 return true 5686 } 5687 // match: (GreaterEqual (FlagGT_UGT)) 5688 // result: (MOVDconst [1]) 5689 for { 5690 v_0 := v.Args[0] 5691 if v_0.Op != OpARM64FlagGT_UGT { 5692 break 5693 } 5694 v.reset(OpARM64MOVDconst) 5695 v.AuxInt = 1 5696 return true 5697 } 5698 // match: (GreaterEqual (InvertFlags x)) 5699 // result: (LessEqual x) 5700 for { 5701 v_0 := v.Args[0] 5702 if v_0.Op != OpARM64InvertFlags { 5703 break 5704 } 5705 x := v_0.Args[0] 5706 v.reset(OpARM64LessEqual) 5707 v.AddArg(x) 5708 return true 5709 } 5710 return false 5711 } 5712 func rewriteValueARM64_OpARM64GreaterEqualF_0(v *Value) bool { 5713 // match: (GreaterEqualF (InvertFlags x)) 5714 // result: (LessEqualF x) 5715 for { 5716 v_0 := v.Args[0] 5717 if v_0.Op != OpARM64InvertFlags { 5718 break 5719 } 5720 x := v_0.Args[0] 5721 v.reset(OpARM64LessEqualF) 5722 v.AddArg(x) 5723 return true 5724 } 5725 return false 5726 } 5727 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 5728 // match: (GreaterEqualU (FlagEQ)) 5729 // result: (MOVDconst [1]) 5730 for { 5731 v_0 := v.Args[0] 5732 if v_0.Op != OpARM64FlagEQ { 5733 break 5734 } 5735 v.reset(OpARM64MOVDconst) 5736 v.AuxInt = 1 5737 return true 5738 } 5739 // match: (GreaterEqualU (FlagLT_ULT)) 5740 // result: (MOVDconst [0]) 5741 for { 5742 v_0 := v.Args[0] 5743 if v_0.Op != OpARM64FlagLT_ULT { 5744 break 5745 } 5746 v.reset(OpARM64MOVDconst) 5747 v.AuxInt = 0 5748 return true 5749 } 5750 // match: (GreaterEqualU (FlagLT_UGT)) 5751 // result: (MOVDconst [1]) 5752 for { 5753 v_0 := v.Args[0] 5754 if v_0.Op != OpARM64FlagLT_UGT { 5755 break 5756 } 5757 v.reset(OpARM64MOVDconst) 5758 v.AuxInt = 1 5759 return true 5760 } 5761 // match: (GreaterEqualU (FlagGT_ULT)) 5762 // result: (MOVDconst [0]) 5763 for { 5764 v_0 := v.Args[0] 5765 if v_0.Op != OpARM64FlagGT_ULT { 5766 break 5767 } 5768 v.reset(OpARM64MOVDconst) 5769 v.AuxInt = 0 5770 return true 5771 } 5772 // match: (GreaterEqualU (FlagGT_UGT)) 5773 // result: (MOVDconst [1]) 5774 for { 5775 v_0 := v.Args[0] 5776 if v_0.Op != OpARM64FlagGT_UGT { 5777 break 5778 } 5779 v.reset(OpARM64MOVDconst) 5780 v.AuxInt = 1 5781 return true 5782 } 5783 // match: (GreaterEqualU (InvertFlags x)) 5784 // result: (LessEqualU x) 5785 for { 5786 v_0 := v.Args[0] 5787 if v_0.Op != OpARM64InvertFlags { 5788 break 5789 } 5790 x := v_0.Args[0] 5791 v.reset(OpARM64LessEqualU) 5792 v.AddArg(x) 5793 return true 5794 } 5795 return false 5796 } 5797 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 5798 // match: (GreaterThan (FlagEQ)) 5799 // result: (MOVDconst [0]) 5800 for { 5801 v_0 := v.Args[0] 5802 if v_0.Op != OpARM64FlagEQ { 5803 break 5804 } 5805 v.reset(OpARM64MOVDconst) 5806 v.AuxInt = 0 5807 return true 5808 } 5809 // match: (GreaterThan (FlagLT_ULT)) 5810 // result: (MOVDconst [0]) 5811 for { 5812 v_0 := v.Args[0] 5813 if v_0.Op != OpARM64FlagLT_ULT { 5814 break 5815 } 5816 v.reset(OpARM64MOVDconst) 5817 v.AuxInt = 0 5818 return true 5819 } 5820 // match: (GreaterThan (FlagLT_UGT)) 5821 // result: (MOVDconst [0]) 5822 for { 5823 v_0 := v.Args[0] 5824 if v_0.Op != OpARM64FlagLT_UGT { 5825 break 5826 } 5827 v.reset(OpARM64MOVDconst) 5828 v.AuxInt = 0 5829 return true 5830 } 5831 // match: (GreaterThan (FlagGT_ULT)) 5832 // result: (MOVDconst [1]) 5833 for { 5834 v_0 := v.Args[0] 5835 if v_0.Op != OpARM64FlagGT_ULT { 5836 break 5837 } 5838 v.reset(OpARM64MOVDconst) 5839 v.AuxInt = 1 5840 return true 5841 } 5842 // match: (GreaterThan (FlagGT_UGT)) 5843 // result: (MOVDconst [1]) 5844 for { 5845 v_0 := v.Args[0] 5846 if v_0.Op != OpARM64FlagGT_UGT { 5847 break 5848 } 5849 v.reset(OpARM64MOVDconst) 5850 v.AuxInt = 1 5851 return true 5852 } 5853 // match: (GreaterThan (InvertFlags x)) 5854 // result: (LessThan x) 5855 for { 5856 v_0 := v.Args[0] 5857 if v_0.Op != OpARM64InvertFlags { 5858 break 5859 } 5860 x := v_0.Args[0] 5861 v.reset(OpARM64LessThan) 5862 v.AddArg(x) 5863 return true 5864 } 5865 return false 5866 } 5867 func rewriteValueARM64_OpARM64GreaterThanF_0(v *Value) bool { 5868 // match: (GreaterThanF (InvertFlags x)) 5869 // result: (LessThanF x) 5870 for { 5871 v_0 := v.Args[0] 5872 if v_0.Op != OpARM64InvertFlags { 5873 break 5874 } 5875 x := v_0.Args[0] 5876 v.reset(OpARM64LessThanF) 5877 v.AddArg(x) 5878 return true 5879 } 5880 return false 5881 } 5882 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 5883 // match: (GreaterThanU (FlagEQ)) 5884 // result: (MOVDconst [0]) 5885 for { 5886 v_0 := v.Args[0] 5887 if v_0.Op != OpARM64FlagEQ { 5888 break 5889 } 5890 v.reset(OpARM64MOVDconst) 5891 v.AuxInt = 0 5892 return true 5893 } 5894 // match: (GreaterThanU (FlagLT_ULT)) 5895 // result: (MOVDconst [0]) 5896 for { 5897 v_0 := v.Args[0] 5898 if v_0.Op != OpARM64FlagLT_ULT { 5899 break 5900 } 5901 v.reset(OpARM64MOVDconst) 5902 v.AuxInt = 0 5903 return true 5904 } 5905 // match: (GreaterThanU (FlagLT_UGT)) 5906 // result: (MOVDconst [1]) 5907 for { 5908 v_0 := v.Args[0] 5909 if v_0.Op != OpARM64FlagLT_UGT { 5910 break 5911 } 5912 v.reset(OpARM64MOVDconst) 5913 v.AuxInt = 1 5914 return true 5915 } 5916 // match: (GreaterThanU (FlagGT_ULT)) 5917 // result: (MOVDconst [0]) 5918 for { 5919 v_0 := v.Args[0] 5920 if v_0.Op != OpARM64FlagGT_ULT { 5921 break 5922 } 5923 v.reset(OpARM64MOVDconst) 5924 v.AuxInt = 0 5925 return true 5926 } 5927 // match: (GreaterThanU (FlagGT_UGT)) 5928 // result: (MOVDconst [1]) 5929 for { 5930 v_0 := v.Args[0] 5931 if v_0.Op != OpARM64FlagGT_UGT { 5932 break 5933 } 5934 v.reset(OpARM64MOVDconst) 5935 v.AuxInt = 1 5936 return true 5937 } 5938 // match: (GreaterThanU (InvertFlags x)) 5939 // result: (LessThanU x) 5940 for { 5941 v_0 := v.Args[0] 5942 if v_0.Op != OpARM64InvertFlags { 5943 break 5944 } 5945 x := v_0.Args[0] 5946 v.reset(OpARM64LessThanU) 5947 v.AddArg(x) 5948 return true 5949 } 5950 return false 5951 } 5952 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 5953 // match: (LessEqual (FlagEQ)) 5954 // result: (MOVDconst [1]) 5955 for { 5956 v_0 := v.Args[0] 5957 if v_0.Op != OpARM64FlagEQ { 5958 break 5959 } 5960 v.reset(OpARM64MOVDconst) 5961 v.AuxInt = 1 5962 return true 5963 } 5964 // match: (LessEqual (FlagLT_ULT)) 5965 // result: (MOVDconst [1]) 5966 for { 5967 v_0 := v.Args[0] 5968 if v_0.Op != OpARM64FlagLT_ULT { 5969 break 5970 } 5971 v.reset(OpARM64MOVDconst) 5972 v.AuxInt = 1 5973 return true 5974 } 5975 // match: (LessEqual (FlagLT_UGT)) 5976 // result: (MOVDconst [1]) 5977 for { 5978 v_0 := v.Args[0] 5979 if v_0.Op != OpARM64FlagLT_UGT { 5980 break 5981 } 5982 v.reset(OpARM64MOVDconst) 5983 v.AuxInt = 1 5984 return true 5985 } 5986 // match: (LessEqual (FlagGT_ULT)) 5987 // result: (MOVDconst [0]) 5988 for { 5989 v_0 := v.Args[0] 5990 if v_0.Op != OpARM64FlagGT_ULT { 5991 break 5992 } 5993 v.reset(OpARM64MOVDconst) 5994 v.AuxInt = 0 5995 return true 5996 } 5997 // match: (LessEqual (FlagGT_UGT)) 5998 // result: (MOVDconst [0]) 5999 for { 6000 v_0 := v.Args[0] 6001 if v_0.Op != OpARM64FlagGT_UGT { 6002 break 6003 } 6004 v.reset(OpARM64MOVDconst) 6005 v.AuxInt = 0 6006 return true 6007 } 6008 // match: (LessEqual (InvertFlags x)) 6009 // result: (GreaterEqual x) 6010 for { 6011 v_0 := v.Args[0] 6012 if v_0.Op != OpARM64InvertFlags { 6013 break 6014 } 6015 x := v_0.Args[0] 6016 v.reset(OpARM64GreaterEqual) 6017 v.AddArg(x) 6018 return true 6019 } 6020 return false 6021 } 6022 func rewriteValueARM64_OpARM64LessEqualF_0(v *Value) bool { 6023 // match: (LessEqualF (InvertFlags x)) 6024 // result: (GreaterEqualF x) 6025 for { 6026 v_0 := v.Args[0] 6027 if v_0.Op != OpARM64InvertFlags { 6028 break 6029 } 6030 x := v_0.Args[0] 6031 v.reset(OpARM64GreaterEqualF) 6032 v.AddArg(x) 6033 return true 6034 } 6035 return false 6036 } 6037 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 6038 // match: (LessEqualU (FlagEQ)) 6039 // result: (MOVDconst [1]) 6040 for { 6041 v_0 := v.Args[0] 6042 if v_0.Op != OpARM64FlagEQ { 6043 break 6044 } 6045 v.reset(OpARM64MOVDconst) 6046 v.AuxInt = 1 6047 return true 6048 } 6049 // match: (LessEqualU (FlagLT_ULT)) 6050 // result: (MOVDconst [1]) 6051 for { 6052 v_0 := v.Args[0] 6053 if v_0.Op != OpARM64FlagLT_ULT { 6054 break 6055 } 6056 v.reset(OpARM64MOVDconst) 6057 v.AuxInt = 1 6058 return true 6059 } 6060 // match: (LessEqualU (FlagLT_UGT)) 6061 // result: (MOVDconst [0]) 6062 for { 6063 v_0 := v.Args[0] 6064 if v_0.Op != OpARM64FlagLT_UGT { 6065 break 6066 } 6067 v.reset(OpARM64MOVDconst) 6068 v.AuxInt = 0 6069 return true 6070 } 6071 // match: (LessEqualU (FlagGT_ULT)) 6072 // result: (MOVDconst [1]) 6073 for { 6074 v_0 := v.Args[0] 6075 if v_0.Op != OpARM64FlagGT_ULT { 6076 break 6077 } 6078 v.reset(OpARM64MOVDconst) 6079 v.AuxInt = 1 6080 return true 6081 } 6082 // match: (LessEqualU (FlagGT_UGT)) 6083 // result: (MOVDconst [0]) 6084 for { 6085 v_0 := v.Args[0] 6086 if v_0.Op != OpARM64FlagGT_UGT { 6087 break 6088 } 6089 v.reset(OpARM64MOVDconst) 6090 v.AuxInt = 0 6091 return true 6092 } 6093 // match: (LessEqualU (InvertFlags x)) 6094 // result: (GreaterEqualU x) 6095 for { 6096 v_0 := v.Args[0] 6097 if v_0.Op != OpARM64InvertFlags { 6098 break 6099 } 6100 x := v_0.Args[0] 6101 v.reset(OpARM64GreaterEqualU) 6102 v.AddArg(x) 6103 return true 6104 } 6105 return false 6106 } 6107 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 6108 // match: (LessThan (FlagEQ)) 6109 // result: (MOVDconst [0]) 6110 for { 6111 v_0 := v.Args[0] 6112 if v_0.Op != OpARM64FlagEQ { 6113 break 6114 } 6115 v.reset(OpARM64MOVDconst) 6116 v.AuxInt = 0 6117 return true 6118 } 6119 // match: (LessThan (FlagLT_ULT)) 6120 // result: (MOVDconst [1]) 6121 for { 6122 v_0 := v.Args[0] 6123 if v_0.Op != OpARM64FlagLT_ULT { 6124 break 6125 } 6126 v.reset(OpARM64MOVDconst) 6127 v.AuxInt = 1 6128 return true 6129 } 6130 // match: (LessThan (FlagLT_UGT)) 6131 // result: (MOVDconst [1]) 6132 for { 6133 v_0 := v.Args[0] 6134 if v_0.Op != OpARM64FlagLT_UGT { 6135 break 6136 } 6137 v.reset(OpARM64MOVDconst) 6138 v.AuxInt = 1 6139 return true 6140 } 6141 // match: (LessThan (FlagGT_ULT)) 6142 // result: (MOVDconst [0]) 6143 for { 6144 v_0 := v.Args[0] 6145 if v_0.Op != OpARM64FlagGT_ULT { 6146 break 6147 } 6148 v.reset(OpARM64MOVDconst) 6149 v.AuxInt = 0 6150 return true 6151 } 6152 // match: (LessThan (FlagGT_UGT)) 6153 // result: (MOVDconst [0]) 6154 for { 6155 v_0 := v.Args[0] 6156 if v_0.Op != OpARM64FlagGT_UGT { 6157 break 6158 } 6159 v.reset(OpARM64MOVDconst) 6160 v.AuxInt = 0 6161 return true 6162 } 6163 // match: (LessThan (InvertFlags x)) 6164 // result: (GreaterThan x) 6165 for { 6166 v_0 := v.Args[0] 6167 if v_0.Op != OpARM64InvertFlags { 6168 break 6169 } 6170 x := v_0.Args[0] 6171 v.reset(OpARM64GreaterThan) 6172 v.AddArg(x) 6173 return true 6174 } 6175 return false 6176 } 6177 func rewriteValueARM64_OpARM64LessThanF_0(v *Value) bool { 6178 // match: (LessThanF (InvertFlags x)) 6179 // result: (GreaterThanF x) 6180 for { 6181 v_0 := v.Args[0] 6182 if v_0.Op != OpARM64InvertFlags { 6183 break 6184 } 6185 x := v_0.Args[0] 6186 v.reset(OpARM64GreaterThanF) 6187 v.AddArg(x) 6188 return true 6189 } 6190 return false 6191 } 6192 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 6193 // match: (LessThanU (FlagEQ)) 6194 // result: (MOVDconst [0]) 6195 for { 6196 v_0 := v.Args[0] 6197 if v_0.Op != OpARM64FlagEQ { 6198 break 6199 } 6200 v.reset(OpARM64MOVDconst) 6201 v.AuxInt = 0 6202 return true 6203 } 6204 // match: (LessThanU (FlagLT_ULT)) 6205 // result: (MOVDconst [1]) 6206 for { 6207 v_0 := v.Args[0] 6208 if v_0.Op != OpARM64FlagLT_ULT { 6209 break 6210 } 6211 v.reset(OpARM64MOVDconst) 6212 v.AuxInt = 1 6213 return true 6214 } 6215 // match: (LessThanU (FlagLT_UGT)) 6216 // result: (MOVDconst [0]) 6217 for { 6218 v_0 := v.Args[0] 6219 if v_0.Op != OpARM64FlagLT_UGT { 6220 break 6221 } 6222 v.reset(OpARM64MOVDconst) 6223 v.AuxInt = 0 6224 return true 6225 } 6226 // match: (LessThanU (FlagGT_ULT)) 6227 // result: (MOVDconst [1]) 6228 for { 6229 v_0 := v.Args[0] 6230 if v_0.Op != OpARM64FlagGT_ULT { 6231 break 6232 } 6233 v.reset(OpARM64MOVDconst) 6234 v.AuxInt = 1 6235 return true 6236 } 6237 // match: (LessThanU (FlagGT_UGT)) 6238 // result: (MOVDconst [0]) 6239 for { 6240 v_0 := v.Args[0] 6241 if v_0.Op != OpARM64FlagGT_UGT { 6242 break 6243 } 6244 v.reset(OpARM64MOVDconst) 6245 v.AuxInt = 0 6246 return true 6247 } 6248 // match: (LessThanU (InvertFlags x)) 6249 // result: (GreaterThanU x) 6250 for { 6251 v_0 := v.Args[0] 6252 if v_0.Op != OpARM64InvertFlags { 6253 break 6254 } 6255 x := v_0.Args[0] 6256 v.reset(OpARM64GreaterThanU) 6257 v.AddArg(x) 6258 return true 6259 } 6260 return false 6261 } 6262 func rewriteValueARM64_OpARM64MADD_0(v *Value) bool { 6263 b := v.Block 6264 // match: (MADD a x (MOVDconst [-1])) 6265 // result: (SUB a x) 6266 for { 6267 _ = v.Args[2] 6268 a := v.Args[0] 6269 x := v.Args[1] 6270 v_2 := v.Args[2] 6271 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != -1 { 6272 break 6273 } 6274 v.reset(OpARM64SUB) 6275 v.AddArg(a) 6276 v.AddArg(x) 6277 return true 6278 } 6279 // match: (MADD a _ (MOVDconst [0])) 6280 // result: a 6281 for { 6282 _ = v.Args[2] 6283 a := v.Args[0] 6284 v_2 := v.Args[2] 6285 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 6286 break 6287 } 6288 v.reset(OpCopy) 6289 v.Type = a.Type 6290 v.AddArg(a) 6291 return true 6292 } 6293 // match: (MADD a x (MOVDconst [1])) 6294 // result: (ADD a x) 6295 for { 6296 _ = v.Args[2] 6297 a := v.Args[0] 6298 x := v.Args[1] 6299 v_2 := v.Args[2] 6300 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 1 { 6301 break 6302 } 6303 v.reset(OpARM64ADD) 6304 v.AddArg(a) 6305 v.AddArg(x) 6306 return true 6307 } 6308 // match: (MADD a x (MOVDconst [c])) 6309 // cond: isPowerOfTwo(c) 6310 // result: (ADDshiftLL a x [log2(c)]) 6311 for { 6312 _ = v.Args[2] 6313 a := v.Args[0] 6314 x := v.Args[1] 6315 v_2 := v.Args[2] 6316 if v_2.Op != OpARM64MOVDconst { 6317 break 6318 } 6319 c := v_2.AuxInt 6320 if !(isPowerOfTwo(c)) { 6321 break 6322 } 6323 v.reset(OpARM64ADDshiftLL) 6324 v.AuxInt = log2(c) 6325 v.AddArg(a) 6326 v.AddArg(x) 6327 return true 6328 } 6329 // match: (MADD a x (MOVDconst [c])) 6330 // cond: isPowerOfTwo(c-1) && c>=3 6331 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 6332 for { 6333 _ = v.Args[2] 6334 a := v.Args[0] 6335 x := v.Args[1] 6336 v_2 := v.Args[2] 6337 if v_2.Op != OpARM64MOVDconst { 6338 break 6339 } 6340 c := v_2.AuxInt 6341 if !(isPowerOfTwo(c-1) && c >= 3) { 6342 break 6343 } 6344 v.reset(OpARM64ADD) 6345 v.AddArg(a) 6346 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6347 v0.AuxInt = log2(c - 1) 6348 v0.AddArg(x) 6349 v0.AddArg(x) 6350 v.AddArg(v0) 6351 return true 6352 } 6353 // match: (MADD a x (MOVDconst [c])) 6354 // cond: isPowerOfTwo(c+1) && c>=7 6355 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 6356 for { 6357 _ = v.Args[2] 6358 a := v.Args[0] 6359 x := v.Args[1] 6360 v_2 := v.Args[2] 6361 if v_2.Op != OpARM64MOVDconst { 6362 break 6363 } 6364 c := v_2.AuxInt 6365 if !(isPowerOfTwo(c+1) && c >= 7) { 6366 break 6367 } 6368 v.reset(OpARM64SUB) 6369 v.AddArg(a) 6370 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6371 v0.AuxInt = log2(c + 1) 6372 v0.AddArg(x) 6373 v0.AddArg(x) 6374 v.AddArg(v0) 6375 return true 6376 } 6377 // match: (MADD a x (MOVDconst [c])) 6378 // cond: c%3 == 0 && isPowerOfTwo(c/3) 6379 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 6380 for { 6381 _ = v.Args[2] 6382 a := v.Args[0] 6383 x := v.Args[1] 6384 v_2 := v.Args[2] 6385 if v_2.Op != OpARM64MOVDconst { 6386 break 6387 } 6388 c := v_2.AuxInt 6389 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 6390 break 6391 } 6392 v.reset(OpARM64SUBshiftLL) 6393 v.AuxInt = log2(c / 3) 6394 v.AddArg(a) 6395 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6396 v0.AuxInt = 2 6397 v0.AddArg(x) 6398 v0.AddArg(x) 6399 v.AddArg(v0) 6400 return true 6401 } 6402 // match: (MADD a x (MOVDconst [c])) 6403 // cond: c%5 == 0 && isPowerOfTwo(c/5) 6404 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 6405 for { 6406 _ = v.Args[2] 6407 a := v.Args[0] 6408 x := v.Args[1] 6409 v_2 := v.Args[2] 6410 if v_2.Op != OpARM64MOVDconst { 6411 break 6412 } 6413 c := v_2.AuxInt 6414 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 6415 break 6416 } 6417 v.reset(OpARM64ADDshiftLL) 6418 v.AuxInt = log2(c / 5) 6419 v.AddArg(a) 6420 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6421 v0.AuxInt = 2 6422 v0.AddArg(x) 6423 v0.AddArg(x) 6424 v.AddArg(v0) 6425 return true 6426 } 6427 // match: (MADD a x (MOVDconst [c])) 6428 // cond: c%7 == 0 && isPowerOfTwo(c/7) 6429 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 6430 for { 6431 _ = v.Args[2] 6432 a := v.Args[0] 6433 x := v.Args[1] 6434 v_2 := v.Args[2] 6435 if v_2.Op != OpARM64MOVDconst { 6436 break 6437 } 6438 c := v_2.AuxInt 6439 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 6440 break 6441 } 6442 v.reset(OpARM64SUBshiftLL) 6443 v.AuxInt = log2(c / 7) 6444 v.AddArg(a) 6445 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6446 v0.AuxInt = 3 6447 v0.AddArg(x) 6448 v0.AddArg(x) 6449 v.AddArg(v0) 6450 return true 6451 } 6452 // match: (MADD a x (MOVDconst [c])) 6453 // cond: c%9 == 0 && isPowerOfTwo(c/9) 6454 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 6455 for { 6456 _ = v.Args[2] 6457 a := v.Args[0] 6458 x := v.Args[1] 6459 v_2 := v.Args[2] 6460 if v_2.Op != OpARM64MOVDconst { 6461 break 6462 } 6463 c := v_2.AuxInt 6464 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 6465 break 6466 } 6467 v.reset(OpARM64ADDshiftLL) 6468 v.AuxInt = log2(c / 9) 6469 v.AddArg(a) 6470 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6471 v0.AuxInt = 3 6472 v0.AddArg(x) 6473 v0.AddArg(x) 6474 v.AddArg(v0) 6475 return true 6476 } 6477 return false 6478 } 6479 func rewriteValueARM64_OpARM64MADD_10(v *Value) bool { 6480 b := v.Block 6481 // match: (MADD a (MOVDconst [-1]) x) 6482 // result: (SUB a x) 6483 for { 6484 x := v.Args[2] 6485 a := v.Args[0] 6486 v_1 := v.Args[1] 6487 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != -1 { 6488 break 6489 } 6490 v.reset(OpARM64SUB) 6491 v.AddArg(a) 6492 v.AddArg(x) 6493 return true 6494 } 6495 // match: (MADD a (MOVDconst [0]) _) 6496 // result: a 6497 for { 6498 _ = v.Args[2] 6499 a := v.Args[0] 6500 v_1 := v.Args[1] 6501 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 6502 break 6503 } 6504 v.reset(OpCopy) 6505 v.Type = a.Type 6506 v.AddArg(a) 6507 return true 6508 } 6509 // match: (MADD a (MOVDconst [1]) x) 6510 // result: (ADD a x) 6511 for { 6512 x := v.Args[2] 6513 a := v.Args[0] 6514 v_1 := v.Args[1] 6515 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 1 { 6516 break 6517 } 6518 v.reset(OpARM64ADD) 6519 v.AddArg(a) 6520 v.AddArg(x) 6521 return true 6522 } 6523 // match: (MADD a (MOVDconst [c]) x) 6524 // cond: isPowerOfTwo(c) 6525 // result: (ADDshiftLL a x [log2(c)]) 6526 for { 6527 x := v.Args[2] 6528 a := v.Args[0] 6529 v_1 := v.Args[1] 6530 if v_1.Op != OpARM64MOVDconst { 6531 break 6532 } 6533 c := v_1.AuxInt 6534 if !(isPowerOfTwo(c)) { 6535 break 6536 } 6537 v.reset(OpARM64ADDshiftLL) 6538 v.AuxInt = log2(c) 6539 v.AddArg(a) 6540 v.AddArg(x) 6541 return true 6542 } 6543 // match: (MADD a (MOVDconst [c]) x) 6544 // cond: isPowerOfTwo(c-1) && c>=3 6545 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 6546 for { 6547 x := v.Args[2] 6548 a := v.Args[0] 6549 v_1 := v.Args[1] 6550 if v_1.Op != OpARM64MOVDconst { 6551 break 6552 } 6553 c := v_1.AuxInt 6554 if !(isPowerOfTwo(c-1) && c >= 3) { 6555 break 6556 } 6557 v.reset(OpARM64ADD) 6558 v.AddArg(a) 6559 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6560 v0.AuxInt = log2(c - 1) 6561 v0.AddArg(x) 6562 v0.AddArg(x) 6563 v.AddArg(v0) 6564 return true 6565 } 6566 // match: (MADD a (MOVDconst [c]) x) 6567 // cond: isPowerOfTwo(c+1) && c>=7 6568 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 6569 for { 6570 x := v.Args[2] 6571 a := v.Args[0] 6572 v_1 := v.Args[1] 6573 if v_1.Op != OpARM64MOVDconst { 6574 break 6575 } 6576 c := v_1.AuxInt 6577 if !(isPowerOfTwo(c+1) && c >= 7) { 6578 break 6579 } 6580 v.reset(OpARM64SUB) 6581 v.AddArg(a) 6582 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6583 v0.AuxInt = log2(c + 1) 6584 v0.AddArg(x) 6585 v0.AddArg(x) 6586 v.AddArg(v0) 6587 return true 6588 } 6589 // match: (MADD a (MOVDconst [c]) x) 6590 // cond: c%3 == 0 && isPowerOfTwo(c/3) 6591 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 6592 for { 6593 x := v.Args[2] 6594 a := v.Args[0] 6595 v_1 := v.Args[1] 6596 if v_1.Op != OpARM64MOVDconst { 6597 break 6598 } 6599 c := v_1.AuxInt 6600 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 6601 break 6602 } 6603 v.reset(OpARM64SUBshiftLL) 6604 v.AuxInt = log2(c / 3) 6605 v.AddArg(a) 6606 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6607 v0.AuxInt = 2 6608 v0.AddArg(x) 6609 v0.AddArg(x) 6610 v.AddArg(v0) 6611 return true 6612 } 6613 // match: (MADD a (MOVDconst [c]) x) 6614 // cond: c%5 == 0 && isPowerOfTwo(c/5) 6615 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 6616 for { 6617 x := v.Args[2] 6618 a := v.Args[0] 6619 v_1 := v.Args[1] 6620 if v_1.Op != OpARM64MOVDconst { 6621 break 6622 } 6623 c := v_1.AuxInt 6624 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 6625 break 6626 } 6627 v.reset(OpARM64ADDshiftLL) 6628 v.AuxInt = log2(c / 5) 6629 v.AddArg(a) 6630 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6631 v0.AuxInt = 2 6632 v0.AddArg(x) 6633 v0.AddArg(x) 6634 v.AddArg(v0) 6635 return true 6636 } 6637 // match: (MADD a (MOVDconst [c]) x) 6638 // cond: c%7 == 0 && isPowerOfTwo(c/7) 6639 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 6640 for { 6641 x := v.Args[2] 6642 a := v.Args[0] 6643 v_1 := v.Args[1] 6644 if v_1.Op != OpARM64MOVDconst { 6645 break 6646 } 6647 c := v_1.AuxInt 6648 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 6649 break 6650 } 6651 v.reset(OpARM64SUBshiftLL) 6652 v.AuxInt = log2(c / 7) 6653 v.AddArg(a) 6654 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6655 v0.AuxInt = 3 6656 v0.AddArg(x) 6657 v0.AddArg(x) 6658 v.AddArg(v0) 6659 return true 6660 } 6661 // match: (MADD a (MOVDconst [c]) x) 6662 // cond: c%9 == 0 && isPowerOfTwo(c/9) 6663 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 6664 for { 6665 x := v.Args[2] 6666 a := v.Args[0] 6667 v_1 := v.Args[1] 6668 if v_1.Op != OpARM64MOVDconst { 6669 break 6670 } 6671 c := v_1.AuxInt 6672 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 6673 break 6674 } 6675 v.reset(OpARM64ADDshiftLL) 6676 v.AuxInt = log2(c / 9) 6677 v.AddArg(a) 6678 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6679 v0.AuxInt = 3 6680 v0.AddArg(x) 6681 v0.AddArg(x) 6682 v.AddArg(v0) 6683 return true 6684 } 6685 return false 6686 } 6687 func rewriteValueARM64_OpARM64MADD_20(v *Value) bool { 6688 b := v.Block 6689 // match: (MADD (MOVDconst [c]) x y) 6690 // result: (ADDconst [c] (MUL <x.Type> x y)) 6691 for { 6692 y := v.Args[2] 6693 v_0 := v.Args[0] 6694 if v_0.Op != OpARM64MOVDconst { 6695 break 6696 } 6697 c := v_0.AuxInt 6698 x := v.Args[1] 6699 v.reset(OpARM64ADDconst) 6700 v.AuxInt = c 6701 v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 6702 v0.AddArg(x) 6703 v0.AddArg(y) 6704 v.AddArg(v0) 6705 return true 6706 } 6707 // match: (MADD a (MOVDconst [c]) (MOVDconst [d])) 6708 // result: (ADDconst [c*d] a) 6709 for { 6710 _ = v.Args[2] 6711 a := v.Args[0] 6712 v_1 := v.Args[1] 6713 if v_1.Op != OpARM64MOVDconst { 6714 break 6715 } 6716 c := v_1.AuxInt 6717 v_2 := v.Args[2] 6718 if v_2.Op != OpARM64MOVDconst { 6719 break 6720 } 6721 d := v_2.AuxInt 6722 v.reset(OpARM64ADDconst) 6723 v.AuxInt = c * d 6724 v.AddArg(a) 6725 return true 6726 } 6727 return false 6728 } 6729 func rewriteValueARM64_OpARM64MADDW_0(v *Value) bool { 6730 b := v.Block 6731 // match: (MADDW a x (MOVDconst [c])) 6732 // cond: int32(c)==-1 6733 // result: (SUB a x) 6734 for { 6735 _ = v.Args[2] 6736 a := v.Args[0] 6737 x := v.Args[1] 6738 v_2 := v.Args[2] 6739 if v_2.Op != OpARM64MOVDconst { 6740 break 6741 } 6742 c := v_2.AuxInt 6743 if !(int32(c) == -1) { 6744 break 6745 } 6746 v.reset(OpARM64SUB) 6747 v.AddArg(a) 6748 v.AddArg(x) 6749 return true 6750 } 6751 // match: (MADDW a _ (MOVDconst [c])) 6752 // cond: int32(c)==0 6753 // result: a 6754 for { 6755 _ = v.Args[2] 6756 a := v.Args[0] 6757 v_2 := v.Args[2] 6758 if v_2.Op != OpARM64MOVDconst { 6759 break 6760 } 6761 c := v_2.AuxInt 6762 if !(int32(c) == 0) { 6763 break 6764 } 6765 v.reset(OpCopy) 6766 v.Type = a.Type 6767 v.AddArg(a) 6768 return true 6769 } 6770 // match: (MADDW a x (MOVDconst [c])) 6771 // cond: int32(c)==1 6772 // result: (ADD a x) 6773 for { 6774 _ = v.Args[2] 6775 a := v.Args[0] 6776 x := v.Args[1] 6777 v_2 := v.Args[2] 6778 if v_2.Op != OpARM64MOVDconst { 6779 break 6780 } 6781 c := v_2.AuxInt 6782 if !(int32(c) == 1) { 6783 break 6784 } 6785 v.reset(OpARM64ADD) 6786 v.AddArg(a) 6787 v.AddArg(x) 6788 return true 6789 } 6790 // match: (MADDW a x (MOVDconst [c])) 6791 // cond: isPowerOfTwo(c) 6792 // result: (ADDshiftLL a x [log2(c)]) 6793 for { 6794 _ = v.Args[2] 6795 a := v.Args[0] 6796 x := v.Args[1] 6797 v_2 := v.Args[2] 6798 if v_2.Op != OpARM64MOVDconst { 6799 break 6800 } 6801 c := v_2.AuxInt 6802 if !(isPowerOfTwo(c)) { 6803 break 6804 } 6805 v.reset(OpARM64ADDshiftLL) 6806 v.AuxInt = log2(c) 6807 v.AddArg(a) 6808 v.AddArg(x) 6809 return true 6810 } 6811 // match: (MADDW a x (MOVDconst [c])) 6812 // cond: isPowerOfTwo(c-1) && int32(c)>=3 6813 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 6814 for { 6815 _ = v.Args[2] 6816 a := v.Args[0] 6817 x := v.Args[1] 6818 v_2 := v.Args[2] 6819 if v_2.Op != OpARM64MOVDconst { 6820 break 6821 } 6822 c := v_2.AuxInt 6823 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 6824 break 6825 } 6826 v.reset(OpARM64ADD) 6827 v.AddArg(a) 6828 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6829 v0.AuxInt = log2(c - 1) 6830 v0.AddArg(x) 6831 v0.AddArg(x) 6832 v.AddArg(v0) 6833 return true 6834 } 6835 // match: (MADDW a x (MOVDconst [c])) 6836 // cond: isPowerOfTwo(c+1) && int32(c)>=7 6837 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 6838 for { 6839 _ = v.Args[2] 6840 a := v.Args[0] 6841 x := v.Args[1] 6842 v_2 := v.Args[2] 6843 if v_2.Op != OpARM64MOVDconst { 6844 break 6845 } 6846 c := v_2.AuxInt 6847 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 6848 break 6849 } 6850 v.reset(OpARM64SUB) 6851 v.AddArg(a) 6852 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6853 v0.AuxInt = log2(c + 1) 6854 v0.AddArg(x) 6855 v0.AddArg(x) 6856 v.AddArg(v0) 6857 return true 6858 } 6859 // match: (MADDW a x (MOVDconst [c])) 6860 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 6861 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 6862 for { 6863 _ = v.Args[2] 6864 a := v.Args[0] 6865 x := v.Args[1] 6866 v_2 := v.Args[2] 6867 if v_2.Op != OpARM64MOVDconst { 6868 break 6869 } 6870 c := v_2.AuxInt 6871 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 6872 break 6873 } 6874 v.reset(OpARM64SUBshiftLL) 6875 v.AuxInt = log2(c / 3) 6876 v.AddArg(a) 6877 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6878 v0.AuxInt = 2 6879 v0.AddArg(x) 6880 v0.AddArg(x) 6881 v.AddArg(v0) 6882 return true 6883 } 6884 // match: (MADDW a x (MOVDconst [c])) 6885 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 6886 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 6887 for { 6888 _ = v.Args[2] 6889 a := v.Args[0] 6890 x := v.Args[1] 6891 v_2 := v.Args[2] 6892 if v_2.Op != OpARM64MOVDconst { 6893 break 6894 } 6895 c := v_2.AuxInt 6896 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 6897 break 6898 } 6899 v.reset(OpARM64ADDshiftLL) 6900 v.AuxInt = log2(c / 5) 6901 v.AddArg(a) 6902 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6903 v0.AuxInt = 2 6904 v0.AddArg(x) 6905 v0.AddArg(x) 6906 v.AddArg(v0) 6907 return true 6908 } 6909 // match: (MADDW a x (MOVDconst [c])) 6910 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 6911 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 6912 for { 6913 _ = v.Args[2] 6914 a := v.Args[0] 6915 x := v.Args[1] 6916 v_2 := v.Args[2] 6917 if v_2.Op != OpARM64MOVDconst { 6918 break 6919 } 6920 c := v_2.AuxInt 6921 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 6922 break 6923 } 6924 v.reset(OpARM64SUBshiftLL) 6925 v.AuxInt = log2(c / 7) 6926 v.AddArg(a) 6927 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6928 v0.AuxInt = 3 6929 v0.AddArg(x) 6930 v0.AddArg(x) 6931 v.AddArg(v0) 6932 return true 6933 } 6934 // match: (MADDW a x (MOVDconst [c])) 6935 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 6936 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 6937 for { 6938 _ = v.Args[2] 6939 a := v.Args[0] 6940 x := v.Args[1] 6941 v_2 := v.Args[2] 6942 if v_2.Op != OpARM64MOVDconst { 6943 break 6944 } 6945 c := v_2.AuxInt 6946 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 6947 break 6948 } 6949 v.reset(OpARM64ADDshiftLL) 6950 v.AuxInt = log2(c / 9) 6951 v.AddArg(a) 6952 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6953 v0.AuxInt = 3 6954 v0.AddArg(x) 6955 v0.AddArg(x) 6956 v.AddArg(v0) 6957 return true 6958 } 6959 return false 6960 } 6961 func rewriteValueARM64_OpARM64MADDW_10(v *Value) bool { 6962 b := v.Block 6963 // match: (MADDW a (MOVDconst [c]) x) 6964 // cond: int32(c)==-1 6965 // result: (SUB a x) 6966 for { 6967 x := v.Args[2] 6968 a := v.Args[0] 6969 v_1 := v.Args[1] 6970 if v_1.Op != OpARM64MOVDconst { 6971 break 6972 } 6973 c := v_1.AuxInt 6974 if !(int32(c) == -1) { 6975 break 6976 } 6977 v.reset(OpARM64SUB) 6978 v.AddArg(a) 6979 v.AddArg(x) 6980 return true 6981 } 6982 // match: (MADDW a (MOVDconst [c]) _) 6983 // cond: int32(c)==0 6984 // result: a 6985 for { 6986 _ = v.Args[2] 6987 a := v.Args[0] 6988 v_1 := v.Args[1] 6989 if v_1.Op != OpARM64MOVDconst { 6990 break 6991 } 6992 c := v_1.AuxInt 6993 if !(int32(c) == 0) { 6994 break 6995 } 6996 v.reset(OpCopy) 6997 v.Type = a.Type 6998 v.AddArg(a) 6999 return true 7000 } 7001 // match: (MADDW a (MOVDconst [c]) x) 7002 // cond: int32(c)==1 7003 // result: (ADD a x) 7004 for { 7005 x := v.Args[2] 7006 a := v.Args[0] 7007 v_1 := v.Args[1] 7008 if v_1.Op != OpARM64MOVDconst { 7009 break 7010 } 7011 c := v_1.AuxInt 7012 if !(int32(c) == 1) { 7013 break 7014 } 7015 v.reset(OpARM64ADD) 7016 v.AddArg(a) 7017 v.AddArg(x) 7018 return true 7019 } 7020 // match: (MADDW a (MOVDconst [c]) x) 7021 // cond: isPowerOfTwo(c) 7022 // result: (ADDshiftLL a x [log2(c)]) 7023 for { 7024 x := v.Args[2] 7025 a := v.Args[0] 7026 v_1 := v.Args[1] 7027 if v_1.Op != OpARM64MOVDconst { 7028 break 7029 } 7030 c := v_1.AuxInt 7031 if !(isPowerOfTwo(c)) { 7032 break 7033 } 7034 v.reset(OpARM64ADDshiftLL) 7035 v.AuxInt = log2(c) 7036 v.AddArg(a) 7037 v.AddArg(x) 7038 return true 7039 } 7040 // match: (MADDW a (MOVDconst [c]) x) 7041 // cond: isPowerOfTwo(c-1) && int32(c)>=3 7042 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7043 for { 7044 x := v.Args[2] 7045 a := v.Args[0] 7046 v_1 := v.Args[1] 7047 if v_1.Op != OpARM64MOVDconst { 7048 break 7049 } 7050 c := v_1.AuxInt 7051 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7052 break 7053 } 7054 v.reset(OpARM64ADD) 7055 v.AddArg(a) 7056 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7057 v0.AuxInt = log2(c - 1) 7058 v0.AddArg(x) 7059 v0.AddArg(x) 7060 v.AddArg(v0) 7061 return true 7062 } 7063 // match: (MADDW a (MOVDconst [c]) x) 7064 // cond: isPowerOfTwo(c+1) && int32(c)>=7 7065 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7066 for { 7067 x := v.Args[2] 7068 a := v.Args[0] 7069 v_1 := v.Args[1] 7070 if v_1.Op != OpARM64MOVDconst { 7071 break 7072 } 7073 c := v_1.AuxInt 7074 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7075 break 7076 } 7077 v.reset(OpARM64SUB) 7078 v.AddArg(a) 7079 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7080 v0.AuxInt = log2(c + 1) 7081 v0.AddArg(x) 7082 v0.AddArg(x) 7083 v.AddArg(v0) 7084 return true 7085 } 7086 // match: (MADDW a (MOVDconst [c]) x) 7087 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7088 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7089 for { 7090 x := v.Args[2] 7091 a := v.Args[0] 7092 v_1 := v.Args[1] 7093 if v_1.Op != OpARM64MOVDconst { 7094 break 7095 } 7096 c := v_1.AuxInt 7097 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7098 break 7099 } 7100 v.reset(OpARM64SUBshiftLL) 7101 v.AuxInt = log2(c / 3) 7102 v.AddArg(a) 7103 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7104 v0.AuxInt = 2 7105 v0.AddArg(x) 7106 v0.AddArg(x) 7107 v.AddArg(v0) 7108 return true 7109 } 7110 // match: (MADDW a (MOVDconst [c]) x) 7111 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7112 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7113 for { 7114 x := v.Args[2] 7115 a := v.Args[0] 7116 v_1 := v.Args[1] 7117 if v_1.Op != OpARM64MOVDconst { 7118 break 7119 } 7120 c := v_1.AuxInt 7121 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7122 break 7123 } 7124 v.reset(OpARM64ADDshiftLL) 7125 v.AuxInt = log2(c / 5) 7126 v.AddArg(a) 7127 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7128 v0.AuxInt = 2 7129 v0.AddArg(x) 7130 v0.AddArg(x) 7131 v.AddArg(v0) 7132 return true 7133 } 7134 // match: (MADDW a (MOVDconst [c]) x) 7135 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 7136 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7137 for { 7138 x := v.Args[2] 7139 a := v.Args[0] 7140 v_1 := v.Args[1] 7141 if v_1.Op != OpARM64MOVDconst { 7142 break 7143 } 7144 c := v_1.AuxInt 7145 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 7146 break 7147 } 7148 v.reset(OpARM64SUBshiftLL) 7149 v.AuxInt = log2(c / 7) 7150 v.AddArg(a) 7151 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7152 v0.AuxInt = 3 7153 v0.AddArg(x) 7154 v0.AddArg(x) 7155 v.AddArg(v0) 7156 return true 7157 } 7158 // match: (MADDW a (MOVDconst [c]) x) 7159 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 7160 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7161 for { 7162 x := v.Args[2] 7163 a := v.Args[0] 7164 v_1 := v.Args[1] 7165 if v_1.Op != OpARM64MOVDconst { 7166 break 7167 } 7168 c := v_1.AuxInt 7169 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 7170 break 7171 } 7172 v.reset(OpARM64ADDshiftLL) 7173 v.AuxInt = log2(c / 9) 7174 v.AddArg(a) 7175 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7176 v0.AuxInt = 3 7177 v0.AddArg(x) 7178 v0.AddArg(x) 7179 v.AddArg(v0) 7180 return true 7181 } 7182 return false 7183 } 7184 func rewriteValueARM64_OpARM64MADDW_20(v *Value) bool { 7185 b := v.Block 7186 // match: (MADDW (MOVDconst [c]) x y) 7187 // result: (ADDconst [c] (MULW <x.Type> x y)) 7188 for { 7189 y := v.Args[2] 7190 v_0 := v.Args[0] 7191 if v_0.Op != OpARM64MOVDconst { 7192 break 7193 } 7194 c := v_0.AuxInt 7195 x := v.Args[1] 7196 v.reset(OpARM64ADDconst) 7197 v.AuxInt = c 7198 v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 7199 v0.AddArg(x) 7200 v0.AddArg(y) 7201 v.AddArg(v0) 7202 return true 7203 } 7204 // match: (MADDW a (MOVDconst [c]) (MOVDconst [d])) 7205 // result: (ADDconst [int64(int32(c)*int32(d))] a) 7206 for { 7207 _ = v.Args[2] 7208 a := v.Args[0] 7209 v_1 := v.Args[1] 7210 if v_1.Op != OpARM64MOVDconst { 7211 break 7212 } 7213 c := v_1.AuxInt 7214 v_2 := v.Args[2] 7215 if v_2.Op != OpARM64MOVDconst { 7216 break 7217 } 7218 d := v_2.AuxInt 7219 v.reset(OpARM64ADDconst) 7220 v.AuxInt = int64(int32(c) * int32(d)) 7221 v.AddArg(a) 7222 return true 7223 } 7224 return false 7225 } 7226 func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool { 7227 b := v.Block 7228 // match: (MNEG x (MOVDconst [-1])) 7229 // result: x 7230 for { 7231 _ = v.Args[1] 7232 x := v.Args[0] 7233 v_1 := v.Args[1] 7234 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != -1 { 7235 break 7236 } 7237 v.reset(OpCopy) 7238 v.Type = x.Type 7239 v.AddArg(x) 7240 return true 7241 } 7242 // match: (MNEG (MOVDconst [-1]) x) 7243 // result: x 7244 for { 7245 x := v.Args[1] 7246 v_0 := v.Args[0] 7247 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != -1 { 7248 break 7249 } 7250 v.reset(OpCopy) 7251 v.Type = x.Type 7252 v.AddArg(x) 7253 return true 7254 } 7255 // match: (MNEG _ (MOVDconst [0])) 7256 // result: (MOVDconst [0]) 7257 for { 7258 _ = v.Args[1] 7259 v_1 := v.Args[1] 7260 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 7261 break 7262 } 7263 v.reset(OpARM64MOVDconst) 7264 v.AuxInt = 0 7265 return true 7266 } 7267 // match: (MNEG (MOVDconst [0]) _) 7268 // result: (MOVDconst [0]) 7269 for { 7270 _ = v.Args[1] 7271 v_0 := v.Args[0] 7272 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != 0 { 7273 break 7274 } 7275 v.reset(OpARM64MOVDconst) 7276 v.AuxInt = 0 7277 return true 7278 } 7279 // match: (MNEG x (MOVDconst [1])) 7280 // result: (NEG x) 7281 for { 7282 _ = v.Args[1] 7283 x := v.Args[0] 7284 v_1 := v.Args[1] 7285 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 1 { 7286 break 7287 } 7288 v.reset(OpARM64NEG) 7289 v.AddArg(x) 7290 return true 7291 } 7292 // match: (MNEG (MOVDconst [1]) x) 7293 // result: (NEG x) 7294 for { 7295 x := v.Args[1] 7296 v_0 := v.Args[0] 7297 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != 1 { 7298 break 7299 } 7300 v.reset(OpARM64NEG) 7301 v.AddArg(x) 7302 return true 7303 } 7304 // match: (MNEG x (MOVDconst [c])) 7305 // cond: isPowerOfTwo(c) 7306 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7307 for { 7308 _ = v.Args[1] 7309 x := v.Args[0] 7310 v_1 := v.Args[1] 7311 if v_1.Op != OpARM64MOVDconst { 7312 break 7313 } 7314 c := v_1.AuxInt 7315 if !(isPowerOfTwo(c)) { 7316 break 7317 } 7318 v.reset(OpARM64NEG) 7319 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7320 v0.AuxInt = log2(c) 7321 v0.AddArg(x) 7322 v.AddArg(v0) 7323 return true 7324 } 7325 // match: (MNEG (MOVDconst [c]) x) 7326 // cond: isPowerOfTwo(c) 7327 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7328 for { 7329 x := v.Args[1] 7330 v_0 := v.Args[0] 7331 if v_0.Op != OpARM64MOVDconst { 7332 break 7333 } 7334 c := v_0.AuxInt 7335 if !(isPowerOfTwo(c)) { 7336 break 7337 } 7338 v.reset(OpARM64NEG) 7339 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7340 v0.AuxInt = log2(c) 7341 v0.AddArg(x) 7342 v.AddArg(v0) 7343 return true 7344 } 7345 // match: (MNEG x (MOVDconst [c])) 7346 // cond: isPowerOfTwo(c-1) && c >= 3 7347 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7348 for { 7349 _ = v.Args[1] 7350 x := v.Args[0] 7351 v_1 := v.Args[1] 7352 if v_1.Op != OpARM64MOVDconst { 7353 break 7354 } 7355 c := v_1.AuxInt 7356 if !(isPowerOfTwo(c-1) && c >= 3) { 7357 break 7358 } 7359 v.reset(OpARM64NEG) 7360 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7361 v0.AuxInt = log2(c - 1) 7362 v0.AddArg(x) 7363 v0.AddArg(x) 7364 v.AddArg(v0) 7365 return true 7366 } 7367 // match: (MNEG (MOVDconst [c]) x) 7368 // cond: isPowerOfTwo(c-1) && c >= 3 7369 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7370 for { 7371 x := v.Args[1] 7372 v_0 := v.Args[0] 7373 if v_0.Op != OpARM64MOVDconst { 7374 break 7375 } 7376 c := v_0.AuxInt 7377 if !(isPowerOfTwo(c-1) && c >= 3) { 7378 break 7379 } 7380 v.reset(OpARM64NEG) 7381 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7382 v0.AuxInt = log2(c - 1) 7383 v0.AddArg(x) 7384 v0.AddArg(x) 7385 v.AddArg(v0) 7386 return true 7387 } 7388 return false 7389 } 7390 func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool { 7391 b := v.Block 7392 // match: (MNEG x (MOVDconst [c])) 7393 // cond: isPowerOfTwo(c+1) && c >= 7 7394 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 7395 for { 7396 _ = v.Args[1] 7397 x := v.Args[0] 7398 v_1 := v.Args[1] 7399 if v_1.Op != OpARM64MOVDconst { 7400 break 7401 } 7402 c := v_1.AuxInt 7403 if !(isPowerOfTwo(c+1) && c >= 7) { 7404 break 7405 } 7406 v.reset(OpARM64NEG) 7407 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7408 v0.AuxInt = log2(c + 1) 7409 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7410 v1.AddArg(x) 7411 v0.AddArg(v1) 7412 v0.AddArg(x) 7413 v.AddArg(v0) 7414 return true 7415 } 7416 // match: (MNEG (MOVDconst [c]) x) 7417 // cond: isPowerOfTwo(c+1) && c >= 7 7418 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 7419 for { 7420 x := v.Args[1] 7421 v_0 := v.Args[0] 7422 if v_0.Op != OpARM64MOVDconst { 7423 break 7424 } 7425 c := v_0.AuxInt 7426 if !(isPowerOfTwo(c+1) && c >= 7) { 7427 break 7428 } 7429 v.reset(OpARM64NEG) 7430 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7431 v0.AuxInt = log2(c + 1) 7432 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7433 v1.AddArg(x) 7434 v0.AddArg(v1) 7435 v0.AddArg(x) 7436 v.AddArg(v0) 7437 return true 7438 } 7439 // match: (MNEG x (MOVDconst [c])) 7440 // cond: c%3 == 0 && isPowerOfTwo(c/3) 7441 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 7442 for { 7443 _ = v.Args[1] 7444 x := v.Args[0] 7445 v_1 := v.Args[1] 7446 if v_1.Op != OpARM64MOVDconst { 7447 break 7448 } 7449 c := v_1.AuxInt 7450 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 7451 break 7452 } 7453 v.reset(OpARM64SLLconst) 7454 v.Type = x.Type 7455 v.AuxInt = log2(c / 3) 7456 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7457 v0.AuxInt = 2 7458 v0.AddArg(x) 7459 v0.AddArg(x) 7460 v.AddArg(v0) 7461 return true 7462 } 7463 // match: (MNEG (MOVDconst [c]) x) 7464 // cond: c%3 == 0 && isPowerOfTwo(c/3) 7465 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 7466 for { 7467 x := v.Args[1] 7468 v_0 := v.Args[0] 7469 if v_0.Op != OpARM64MOVDconst { 7470 break 7471 } 7472 c := v_0.AuxInt 7473 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 7474 break 7475 } 7476 v.reset(OpARM64SLLconst) 7477 v.Type = x.Type 7478 v.AuxInt = log2(c / 3) 7479 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7480 v0.AuxInt = 2 7481 v0.AddArg(x) 7482 v0.AddArg(x) 7483 v.AddArg(v0) 7484 return true 7485 } 7486 // match: (MNEG x (MOVDconst [c])) 7487 // cond: c%5 == 0 && isPowerOfTwo(c/5) 7488 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 7489 for { 7490 _ = v.Args[1] 7491 x := v.Args[0] 7492 v_1 := v.Args[1] 7493 if v_1.Op != OpARM64MOVDconst { 7494 break 7495 } 7496 c := v_1.AuxInt 7497 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 7498 break 7499 } 7500 v.reset(OpARM64NEG) 7501 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7502 v0.AuxInt = log2(c / 5) 7503 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7504 v1.AuxInt = 2 7505 v1.AddArg(x) 7506 v1.AddArg(x) 7507 v0.AddArg(v1) 7508 v.AddArg(v0) 7509 return true 7510 } 7511 // match: (MNEG (MOVDconst [c]) x) 7512 // cond: c%5 == 0 && isPowerOfTwo(c/5) 7513 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 7514 for { 7515 x := v.Args[1] 7516 v_0 := v.Args[0] 7517 if v_0.Op != OpARM64MOVDconst { 7518 break 7519 } 7520 c := v_0.AuxInt 7521 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 7522 break 7523 } 7524 v.reset(OpARM64NEG) 7525 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7526 v0.AuxInt = log2(c / 5) 7527 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7528 v1.AuxInt = 2 7529 v1.AddArg(x) 7530 v1.AddArg(x) 7531 v0.AddArg(v1) 7532 v.AddArg(v0) 7533 return true 7534 } 7535 // match: (MNEG x (MOVDconst [c])) 7536 // cond: c%7 == 0 && isPowerOfTwo(c/7) 7537 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 7538 for { 7539 _ = v.Args[1] 7540 x := v.Args[0] 7541 v_1 := v.Args[1] 7542 if v_1.Op != OpARM64MOVDconst { 7543 break 7544 } 7545 c := v_1.AuxInt 7546 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 7547 break 7548 } 7549 v.reset(OpARM64SLLconst) 7550 v.Type = x.Type 7551 v.AuxInt = log2(c / 7) 7552 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7553 v0.AuxInt = 3 7554 v0.AddArg(x) 7555 v0.AddArg(x) 7556 v.AddArg(v0) 7557 return true 7558 } 7559 // match: (MNEG (MOVDconst [c]) x) 7560 // cond: c%7 == 0 && isPowerOfTwo(c/7) 7561 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 7562 for { 7563 x := v.Args[1] 7564 v_0 := v.Args[0] 7565 if v_0.Op != OpARM64MOVDconst { 7566 break 7567 } 7568 c := v_0.AuxInt 7569 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 7570 break 7571 } 7572 v.reset(OpARM64SLLconst) 7573 v.Type = x.Type 7574 v.AuxInt = log2(c / 7) 7575 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7576 v0.AuxInt = 3 7577 v0.AddArg(x) 7578 v0.AddArg(x) 7579 v.AddArg(v0) 7580 return true 7581 } 7582 // match: (MNEG x (MOVDconst [c])) 7583 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7584 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 7585 for { 7586 _ = v.Args[1] 7587 x := v.Args[0] 7588 v_1 := v.Args[1] 7589 if v_1.Op != OpARM64MOVDconst { 7590 break 7591 } 7592 c := v_1.AuxInt 7593 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7594 break 7595 } 7596 v.reset(OpARM64NEG) 7597 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7598 v0.AuxInt = log2(c / 9) 7599 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7600 v1.AuxInt = 3 7601 v1.AddArg(x) 7602 v1.AddArg(x) 7603 v0.AddArg(v1) 7604 v.AddArg(v0) 7605 return true 7606 } 7607 // match: (MNEG (MOVDconst [c]) x) 7608 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7609 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 7610 for { 7611 x := v.Args[1] 7612 v_0 := v.Args[0] 7613 if v_0.Op != OpARM64MOVDconst { 7614 break 7615 } 7616 c := v_0.AuxInt 7617 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7618 break 7619 } 7620 v.reset(OpARM64NEG) 7621 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7622 v0.AuxInt = log2(c / 9) 7623 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7624 v1.AuxInt = 3 7625 v1.AddArg(x) 7626 v1.AddArg(x) 7627 v0.AddArg(v1) 7628 v.AddArg(v0) 7629 return true 7630 } 7631 return false 7632 } 7633 func rewriteValueARM64_OpARM64MNEG_20(v *Value) bool { 7634 // match: (MNEG (MOVDconst [c]) (MOVDconst [d])) 7635 // result: (MOVDconst [-c*d]) 7636 for { 7637 _ = v.Args[1] 7638 v_0 := v.Args[0] 7639 if v_0.Op != OpARM64MOVDconst { 7640 break 7641 } 7642 c := v_0.AuxInt 7643 v_1 := v.Args[1] 7644 if v_1.Op != OpARM64MOVDconst { 7645 break 7646 } 7647 d := v_1.AuxInt 7648 v.reset(OpARM64MOVDconst) 7649 v.AuxInt = -c * d 7650 return true 7651 } 7652 // match: (MNEG (MOVDconst [d]) (MOVDconst [c])) 7653 // result: (MOVDconst [-c*d]) 7654 for { 7655 _ = v.Args[1] 7656 v_0 := v.Args[0] 7657 if v_0.Op != OpARM64MOVDconst { 7658 break 7659 } 7660 d := v_0.AuxInt 7661 v_1 := v.Args[1] 7662 if v_1.Op != OpARM64MOVDconst { 7663 break 7664 } 7665 c := v_1.AuxInt 7666 v.reset(OpARM64MOVDconst) 7667 v.AuxInt = -c * d 7668 return true 7669 } 7670 return false 7671 } 7672 func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool { 7673 b := v.Block 7674 // match: (MNEGW x (MOVDconst [c])) 7675 // cond: int32(c)==-1 7676 // result: x 7677 for { 7678 _ = v.Args[1] 7679 x := v.Args[0] 7680 v_1 := v.Args[1] 7681 if v_1.Op != OpARM64MOVDconst { 7682 break 7683 } 7684 c := v_1.AuxInt 7685 if !(int32(c) == -1) { 7686 break 7687 } 7688 v.reset(OpCopy) 7689 v.Type = x.Type 7690 v.AddArg(x) 7691 return true 7692 } 7693 // match: (MNEGW (MOVDconst [c]) x) 7694 // cond: int32(c)==-1 7695 // result: x 7696 for { 7697 x := v.Args[1] 7698 v_0 := v.Args[0] 7699 if v_0.Op != OpARM64MOVDconst { 7700 break 7701 } 7702 c := v_0.AuxInt 7703 if !(int32(c) == -1) { 7704 break 7705 } 7706 v.reset(OpCopy) 7707 v.Type = x.Type 7708 v.AddArg(x) 7709 return true 7710 } 7711 // match: (MNEGW _ (MOVDconst [c])) 7712 // cond: int32(c)==0 7713 // result: (MOVDconst [0]) 7714 for { 7715 _ = v.Args[1] 7716 v_1 := v.Args[1] 7717 if v_1.Op != OpARM64MOVDconst { 7718 break 7719 } 7720 c := v_1.AuxInt 7721 if !(int32(c) == 0) { 7722 break 7723 } 7724 v.reset(OpARM64MOVDconst) 7725 v.AuxInt = 0 7726 return true 7727 } 7728 // match: (MNEGW (MOVDconst [c]) _) 7729 // cond: int32(c)==0 7730 // result: (MOVDconst [0]) 7731 for { 7732 _ = v.Args[1] 7733 v_0 := v.Args[0] 7734 if v_0.Op != OpARM64MOVDconst { 7735 break 7736 } 7737 c := v_0.AuxInt 7738 if !(int32(c) == 0) { 7739 break 7740 } 7741 v.reset(OpARM64MOVDconst) 7742 v.AuxInt = 0 7743 return true 7744 } 7745 // match: (MNEGW x (MOVDconst [c])) 7746 // cond: int32(c)==1 7747 // result: (NEG x) 7748 for { 7749 _ = v.Args[1] 7750 x := v.Args[0] 7751 v_1 := v.Args[1] 7752 if v_1.Op != OpARM64MOVDconst { 7753 break 7754 } 7755 c := v_1.AuxInt 7756 if !(int32(c) == 1) { 7757 break 7758 } 7759 v.reset(OpARM64NEG) 7760 v.AddArg(x) 7761 return true 7762 } 7763 // match: (MNEGW (MOVDconst [c]) x) 7764 // cond: int32(c)==1 7765 // result: (NEG x) 7766 for { 7767 x := v.Args[1] 7768 v_0 := v.Args[0] 7769 if v_0.Op != OpARM64MOVDconst { 7770 break 7771 } 7772 c := v_0.AuxInt 7773 if !(int32(c) == 1) { 7774 break 7775 } 7776 v.reset(OpARM64NEG) 7777 v.AddArg(x) 7778 return true 7779 } 7780 // match: (MNEGW x (MOVDconst [c])) 7781 // cond: isPowerOfTwo(c) 7782 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7783 for { 7784 _ = v.Args[1] 7785 x := v.Args[0] 7786 v_1 := v.Args[1] 7787 if v_1.Op != OpARM64MOVDconst { 7788 break 7789 } 7790 c := v_1.AuxInt 7791 if !(isPowerOfTwo(c)) { 7792 break 7793 } 7794 v.reset(OpARM64NEG) 7795 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7796 v0.AuxInt = log2(c) 7797 v0.AddArg(x) 7798 v.AddArg(v0) 7799 return true 7800 } 7801 // match: (MNEGW (MOVDconst [c]) x) 7802 // cond: isPowerOfTwo(c) 7803 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7804 for { 7805 x := v.Args[1] 7806 v_0 := v.Args[0] 7807 if v_0.Op != OpARM64MOVDconst { 7808 break 7809 } 7810 c := v_0.AuxInt 7811 if !(isPowerOfTwo(c)) { 7812 break 7813 } 7814 v.reset(OpARM64NEG) 7815 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7816 v0.AuxInt = log2(c) 7817 v0.AddArg(x) 7818 v.AddArg(v0) 7819 return true 7820 } 7821 // match: (MNEGW x (MOVDconst [c])) 7822 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 7823 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7824 for { 7825 _ = v.Args[1] 7826 x := v.Args[0] 7827 v_1 := v.Args[1] 7828 if v_1.Op != OpARM64MOVDconst { 7829 break 7830 } 7831 c := v_1.AuxInt 7832 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7833 break 7834 } 7835 v.reset(OpARM64NEG) 7836 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7837 v0.AuxInt = log2(c - 1) 7838 v0.AddArg(x) 7839 v0.AddArg(x) 7840 v.AddArg(v0) 7841 return true 7842 } 7843 // match: (MNEGW (MOVDconst [c]) x) 7844 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 7845 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7846 for { 7847 x := v.Args[1] 7848 v_0 := v.Args[0] 7849 if v_0.Op != OpARM64MOVDconst { 7850 break 7851 } 7852 c := v_0.AuxInt 7853 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7854 break 7855 } 7856 v.reset(OpARM64NEG) 7857 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7858 v0.AuxInt = log2(c - 1) 7859 v0.AddArg(x) 7860 v0.AddArg(x) 7861 v.AddArg(v0) 7862 return true 7863 } 7864 return false 7865 } 7866 func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool { 7867 b := v.Block 7868 // match: (MNEGW x (MOVDconst [c])) 7869 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 7870 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 7871 for { 7872 _ = v.Args[1] 7873 x := v.Args[0] 7874 v_1 := v.Args[1] 7875 if v_1.Op != OpARM64MOVDconst { 7876 break 7877 } 7878 c := v_1.AuxInt 7879 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7880 break 7881 } 7882 v.reset(OpARM64NEG) 7883 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7884 v0.AuxInt = log2(c + 1) 7885 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7886 v1.AddArg(x) 7887 v0.AddArg(v1) 7888 v0.AddArg(x) 7889 v.AddArg(v0) 7890 return true 7891 } 7892 // match: (MNEGW (MOVDconst [c]) x) 7893 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 7894 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 7895 for { 7896 x := v.Args[1] 7897 v_0 := v.Args[0] 7898 if v_0.Op != OpARM64MOVDconst { 7899 break 7900 } 7901 c := v_0.AuxInt 7902 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7903 break 7904 } 7905 v.reset(OpARM64NEG) 7906 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7907 v0.AuxInt = log2(c + 1) 7908 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 7909 v1.AddArg(x) 7910 v0.AddArg(v1) 7911 v0.AddArg(x) 7912 v.AddArg(v0) 7913 return true 7914 } 7915 // match: (MNEGW x (MOVDconst [c])) 7916 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7917 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 7918 for { 7919 _ = v.Args[1] 7920 x := v.Args[0] 7921 v_1 := v.Args[1] 7922 if v_1.Op != OpARM64MOVDconst { 7923 break 7924 } 7925 c := v_1.AuxInt 7926 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7927 break 7928 } 7929 v.reset(OpARM64SLLconst) 7930 v.Type = x.Type 7931 v.AuxInt = log2(c / 3) 7932 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7933 v0.AuxInt = 2 7934 v0.AddArg(x) 7935 v0.AddArg(x) 7936 v.AddArg(v0) 7937 return true 7938 } 7939 // match: (MNEGW (MOVDconst [c]) x) 7940 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7941 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 7942 for { 7943 x := v.Args[1] 7944 v_0 := v.Args[0] 7945 if v_0.Op != OpARM64MOVDconst { 7946 break 7947 } 7948 c := v_0.AuxInt 7949 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7950 break 7951 } 7952 v.reset(OpARM64SLLconst) 7953 v.Type = x.Type 7954 v.AuxInt = log2(c / 3) 7955 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7956 v0.AuxInt = 2 7957 v0.AddArg(x) 7958 v0.AddArg(x) 7959 v.AddArg(v0) 7960 return true 7961 } 7962 // match: (MNEGW x (MOVDconst [c])) 7963 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7964 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 7965 for { 7966 _ = v.Args[1] 7967 x := v.Args[0] 7968 v_1 := v.Args[1] 7969 if v_1.Op != OpARM64MOVDconst { 7970 break 7971 } 7972 c := v_1.AuxInt 7973 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7974 break 7975 } 7976 v.reset(OpARM64NEG) 7977 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7978 v0.AuxInt = log2(c / 5) 7979 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7980 v1.AuxInt = 2 7981 v1.AddArg(x) 7982 v1.AddArg(x) 7983 v0.AddArg(v1) 7984 v.AddArg(v0) 7985 return true 7986 } 7987 // match: (MNEGW (MOVDconst [c]) x) 7988 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7989 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 7990 for { 7991 x := v.Args[1] 7992 v_0 := v.Args[0] 7993 if v_0.Op != OpARM64MOVDconst { 7994 break 7995 } 7996 c := v_0.AuxInt 7997 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7998 break 7999 } 8000 v.reset(OpARM64NEG) 8001 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8002 v0.AuxInt = log2(c / 5) 8003 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8004 v1.AuxInt = 2 8005 v1.AddArg(x) 8006 v1.AddArg(x) 8007 v0.AddArg(v1) 8008 v.AddArg(v0) 8009 return true 8010 } 8011 // match: (MNEGW x (MOVDconst [c])) 8012 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8013 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8014 for { 8015 _ = v.Args[1] 8016 x := v.Args[0] 8017 v_1 := v.Args[1] 8018 if v_1.Op != OpARM64MOVDconst { 8019 break 8020 } 8021 c := v_1.AuxInt 8022 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8023 break 8024 } 8025 v.reset(OpARM64SLLconst) 8026 v.Type = x.Type 8027 v.AuxInt = log2(c / 7) 8028 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8029 v0.AuxInt = 3 8030 v0.AddArg(x) 8031 v0.AddArg(x) 8032 v.AddArg(v0) 8033 return true 8034 } 8035 // match: (MNEGW (MOVDconst [c]) x) 8036 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8037 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8038 for { 8039 x := v.Args[1] 8040 v_0 := v.Args[0] 8041 if v_0.Op != OpARM64MOVDconst { 8042 break 8043 } 8044 c := v_0.AuxInt 8045 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8046 break 8047 } 8048 v.reset(OpARM64SLLconst) 8049 v.Type = x.Type 8050 v.AuxInt = log2(c / 7) 8051 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8052 v0.AuxInt = 3 8053 v0.AddArg(x) 8054 v0.AddArg(x) 8055 v.AddArg(v0) 8056 return true 8057 } 8058 // match: (MNEGW x (MOVDconst [c])) 8059 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8060 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8061 for { 8062 _ = v.Args[1] 8063 x := v.Args[0] 8064 v_1 := v.Args[1] 8065 if v_1.Op != OpARM64MOVDconst { 8066 break 8067 } 8068 c := v_1.AuxInt 8069 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8070 break 8071 } 8072 v.reset(OpARM64NEG) 8073 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8074 v0.AuxInt = log2(c / 9) 8075 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8076 v1.AuxInt = 3 8077 v1.AddArg(x) 8078 v1.AddArg(x) 8079 v0.AddArg(v1) 8080 v.AddArg(v0) 8081 return true 8082 } 8083 // match: (MNEGW (MOVDconst [c]) x) 8084 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8085 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8086 for { 8087 x := v.Args[1] 8088 v_0 := v.Args[0] 8089 if v_0.Op != OpARM64MOVDconst { 8090 break 8091 } 8092 c := v_0.AuxInt 8093 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8094 break 8095 } 8096 v.reset(OpARM64NEG) 8097 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8098 v0.AuxInt = log2(c / 9) 8099 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8100 v1.AuxInt = 3 8101 v1.AddArg(x) 8102 v1.AddArg(x) 8103 v0.AddArg(v1) 8104 v.AddArg(v0) 8105 return true 8106 } 8107 return false 8108 } 8109 func rewriteValueARM64_OpARM64MNEGW_20(v *Value) bool { 8110 // match: (MNEGW (MOVDconst [c]) (MOVDconst [d])) 8111 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 8112 for { 8113 _ = v.Args[1] 8114 v_0 := v.Args[0] 8115 if v_0.Op != OpARM64MOVDconst { 8116 break 8117 } 8118 c := v_0.AuxInt 8119 v_1 := v.Args[1] 8120 if v_1.Op != OpARM64MOVDconst { 8121 break 8122 } 8123 d := v_1.AuxInt 8124 v.reset(OpARM64MOVDconst) 8125 v.AuxInt = -int64(int32(c) * int32(d)) 8126 return true 8127 } 8128 // match: (MNEGW (MOVDconst [d]) (MOVDconst [c])) 8129 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 8130 for { 8131 _ = v.Args[1] 8132 v_0 := v.Args[0] 8133 if v_0.Op != OpARM64MOVDconst { 8134 break 8135 } 8136 d := v_0.AuxInt 8137 v_1 := v.Args[1] 8138 if v_1.Op != OpARM64MOVDconst { 8139 break 8140 } 8141 c := v_1.AuxInt 8142 v.reset(OpARM64MOVDconst) 8143 v.AuxInt = -int64(int32(c) * int32(d)) 8144 return true 8145 } 8146 return false 8147 } 8148 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 8149 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 8150 // result: (MOVDconst [c%d]) 8151 for { 8152 _ = v.Args[1] 8153 v_0 := v.Args[0] 8154 if v_0.Op != OpARM64MOVDconst { 8155 break 8156 } 8157 c := v_0.AuxInt 8158 v_1 := v.Args[1] 8159 if v_1.Op != OpARM64MOVDconst { 8160 break 8161 } 8162 d := v_1.AuxInt 8163 v.reset(OpARM64MOVDconst) 8164 v.AuxInt = c % d 8165 return true 8166 } 8167 return false 8168 } 8169 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 8170 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 8171 // result: (MOVDconst [int64(int32(c)%int32(d))]) 8172 for { 8173 _ = v.Args[1] 8174 v_0 := v.Args[0] 8175 if v_0.Op != OpARM64MOVDconst { 8176 break 8177 } 8178 c := v_0.AuxInt 8179 v_1 := v.Args[1] 8180 if v_1.Op != OpARM64MOVDconst { 8181 break 8182 } 8183 d := v_1.AuxInt 8184 v.reset(OpARM64MOVDconst) 8185 v.AuxInt = int64(int32(c) % int32(d)) 8186 return true 8187 } 8188 return false 8189 } 8190 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 8191 b := v.Block 8192 config := b.Func.Config 8193 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 8194 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8195 // result: (MOVBUload [off1+off2] {sym} ptr mem) 8196 for { 8197 off1 := v.AuxInt 8198 sym := v.Aux 8199 mem := v.Args[1] 8200 v_0 := v.Args[0] 8201 if v_0.Op != OpARM64ADDconst { 8202 break 8203 } 8204 off2 := v_0.AuxInt 8205 ptr := v_0.Args[0] 8206 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8207 break 8208 } 8209 v.reset(OpARM64MOVBUload) 8210 v.AuxInt = off1 + off2 8211 v.Aux = sym 8212 v.AddArg(ptr) 8213 v.AddArg(mem) 8214 return true 8215 } 8216 // match: (MOVBUload [off] {sym} (ADD ptr idx) mem) 8217 // cond: off == 0 && sym == nil 8218 // result: (MOVBUloadidx ptr idx mem) 8219 for { 8220 off := v.AuxInt 8221 sym := v.Aux 8222 mem := v.Args[1] 8223 v_0 := v.Args[0] 8224 if v_0.Op != OpARM64ADD { 8225 break 8226 } 8227 idx := v_0.Args[1] 8228 ptr := v_0.Args[0] 8229 if !(off == 0 && sym == nil) { 8230 break 8231 } 8232 v.reset(OpARM64MOVBUloadidx) 8233 v.AddArg(ptr) 8234 v.AddArg(idx) 8235 v.AddArg(mem) 8236 return true 8237 } 8238 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 8239 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8240 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 8241 for { 8242 off1 := v.AuxInt 8243 sym1 := v.Aux 8244 mem := v.Args[1] 8245 v_0 := v.Args[0] 8246 if v_0.Op != OpARM64MOVDaddr { 8247 break 8248 } 8249 off2 := v_0.AuxInt 8250 sym2 := v_0.Aux 8251 ptr := v_0.Args[0] 8252 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8253 break 8254 } 8255 v.reset(OpARM64MOVBUload) 8256 v.AuxInt = off1 + off2 8257 v.Aux = mergeSym(sym1, sym2) 8258 v.AddArg(ptr) 8259 v.AddArg(mem) 8260 return true 8261 } 8262 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 8263 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 8264 // result: (MOVDconst [0]) 8265 for { 8266 off := v.AuxInt 8267 sym := v.Aux 8268 _ = v.Args[1] 8269 ptr := v.Args[0] 8270 v_1 := v.Args[1] 8271 if v_1.Op != OpARM64MOVBstorezero { 8272 break 8273 } 8274 off2 := v_1.AuxInt 8275 sym2 := v_1.Aux 8276 _ = v_1.Args[1] 8277 ptr2 := v_1.Args[0] 8278 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 8279 break 8280 } 8281 v.reset(OpARM64MOVDconst) 8282 v.AuxInt = 0 8283 return true 8284 } 8285 // match: (MOVBUload [off] {sym} (SB) _) 8286 // cond: symIsRO(sym) 8287 // result: (MOVDconst [int64(read8(sym, off))]) 8288 for { 8289 off := v.AuxInt 8290 sym := v.Aux 8291 _ = v.Args[1] 8292 v_0 := v.Args[0] 8293 if v_0.Op != OpSB || !(symIsRO(sym)) { 8294 break 8295 } 8296 v.reset(OpARM64MOVDconst) 8297 v.AuxInt = int64(read8(sym, off)) 8298 return true 8299 } 8300 return false 8301 } 8302 func rewriteValueARM64_OpARM64MOVBUloadidx_0(v *Value) bool { 8303 // match: (MOVBUloadidx ptr (MOVDconst [c]) mem) 8304 // result: (MOVBUload [c] ptr mem) 8305 for { 8306 mem := v.Args[2] 8307 ptr := v.Args[0] 8308 v_1 := v.Args[1] 8309 if v_1.Op != OpARM64MOVDconst { 8310 break 8311 } 8312 c := v_1.AuxInt 8313 v.reset(OpARM64MOVBUload) 8314 v.AuxInt = c 8315 v.AddArg(ptr) 8316 v.AddArg(mem) 8317 return true 8318 } 8319 // match: (MOVBUloadidx (MOVDconst [c]) ptr mem) 8320 // result: (MOVBUload [c] ptr mem) 8321 for { 8322 mem := v.Args[2] 8323 v_0 := v.Args[0] 8324 if v_0.Op != OpARM64MOVDconst { 8325 break 8326 } 8327 c := v_0.AuxInt 8328 ptr := v.Args[1] 8329 v.reset(OpARM64MOVBUload) 8330 v.AuxInt = c 8331 v.AddArg(ptr) 8332 v.AddArg(mem) 8333 return true 8334 } 8335 // match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 8336 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 8337 // result: (MOVDconst [0]) 8338 for { 8339 _ = v.Args[2] 8340 ptr := v.Args[0] 8341 idx := v.Args[1] 8342 v_2 := v.Args[2] 8343 if v_2.Op != OpARM64MOVBstorezeroidx { 8344 break 8345 } 8346 _ = v_2.Args[2] 8347 ptr2 := v_2.Args[0] 8348 idx2 := v_2.Args[1] 8349 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 8350 break 8351 } 8352 v.reset(OpARM64MOVDconst) 8353 v.AuxInt = 0 8354 return true 8355 } 8356 return false 8357 } 8358 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 8359 // match: (MOVBUreg x:(MOVBUload _ _)) 8360 // result: (MOVDreg x) 8361 for { 8362 x := v.Args[0] 8363 if x.Op != OpARM64MOVBUload { 8364 break 8365 } 8366 _ = x.Args[1] 8367 v.reset(OpARM64MOVDreg) 8368 v.AddArg(x) 8369 return true 8370 } 8371 // match: (MOVBUreg x:(MOVBUloadidx _ _ _)) 8372 // result: (MOVDreg x) 8373 for { 8374 x := v.Args[0] 8375 if x.Op != OpARM64MOVBUloadidx { 8376 break 8377 } 8378 _ = x.Args[2] 8379 v.reset(OpARM64MOVDreg) 8380 v.AddArg(x) 8381 return true 8382 } 8383 // match: (MOVBUreg x:(MOVBUreg _)) 8384 // result: (MOVDreg x) 8385 for { 8386 x := v.Args[0] 8387 if x.Op != OpARM64MOVBUreg { 8388 break 8389 } 8390 v.reset(OpARM64MOVDreg) 8391 v.AddArg(x) 8392 return true 8393 } 8394 // match: (MOVBUreg (ANDconst [c] x)) 8395 // result: (ANDconst [c&(1<<8-1)] x) 8396 for { 8397 v_0 := v.Args[0] 8398 if v_0.Op != OpARM64ANDconst { 8399 break 8400 } 8401 c := v_0.AuxInt 8402 x := v_0.Args[0] 8403 v.reset(OpARM64ANDconst) 8404 v.AuxInt = c & (1<<8 - 1) 8405 v.AddArg(x) 8406 return true 8407 } 8408 // match: (MOVBUreg (MOVDconst [c])) 8409 // result: (MOVDconst [int64(uint8(c))]) 8410 for { 8411 v_0 := v.Args[0] 8412 if v_0.Op != OpARM64MOVDconst { 8413 break 8414 } 8415 c := v_0.AuxInt 8416 v.reset(OpARM64MOVDconst) 8417 v.AuxInt = int64(uint8(c)) 8418 return true 8419 } 8420 // match: (MOVBUreg x) 8421 // cond: x.Type.IsBoolean() 8422 // result: (MOVDreg x) 8423 for { 8424 x := v.Args[0] 8425 if !(x.Type.IsBoolean()) { 8426 break 8427 } 8428 v.reset(OpARM64MOVDreg) 8429 v.AddArg(x) 8430 return true 8431 } 8432 // match: (MOVBUreg (SLLconst [sc] x)) 8433 // cond: isARM64BFMask(sc, 1<<8-1, sc) 8434 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x) 8435 for { 8436 v_0 := v.Args[0] 8437 if v_0.Op != OpARM64SLLconst { 8438 break 8439 } 8440 sc := v_0.AuxInt 8441 x := v_0.Args[0] 8442 if !(isARM64BFMask(sc, 1<<8-1, sc)) { 8443 break 8444 } 8445 v.reset(OpARM64UBFIZ) 8446 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc)) 8447 v.AddArg(x) 8448 return true 8449 } 8450 // match: (MOVBUreg (SRLconst [sc] x)) 8451 // cond: isARM64BFMask(sc, 1<<8-1, 0) 8452 // result: (UBFX [armBFAuxInt(sc, 8)] x) 8453 for { 8454 v_0 := v.Args[0] 8455 if v_0.Op != OpARM64SRLconst { 8456 break 8457 } 8458 sc := v_0.AuxInt 8459 x := v_0.Args[0] 8460 if !(isARM64BFMask(sc, 1<<8-1, 0)) { 8461 break 8462 } 8463 v.reset(OpARM64UBFX) 8464 v.AuxInt = armBFAuxInt(sc, 8) 8465 v.AddArg(x) 8466 return true 8467 } 8468 return false 8469 } 8470 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 8471 b := v.Block 8472 config := b.Func.Config 8473 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 8474 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8475 // result: (MOVBload [off1+off2] {sym} ptr mem) 8476 for { 8477 off1 := v.AuxInt 8478 sym := v.Aux 8479 mem := v.Args[1] 8480 v_0 := v.Args[0] 8481 if v_0.Op != OpARM64ADDconst { 8482 break 8483 } 8484 off2 := v_0.AuxInt 8485 ptr := v_0.Args[0] 8486 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8487 break 8488 } 8489 v.reset(OpARM64MOVBload) 8490 v.AuxInt = off1 + off2 8491 v.Aux = sym 8492 v.AddArg(ptr) 8493 v.AddArg(mem) 8494 return true 8495 } 8496 // match: (MOVBload [off] {sym} (ADD ptr idx) mem) 8497 // cond: off == 0 && sym == nil 8498 // result: (MOVBloadidx ptr idx mem) 8499 for { 8500 off := v.AuxInt 8501 sym := v.Aux 8502 mem := v.Args[1] 8503 v_0 := v.Args[0] 8504 if v_0.Op != OpARM64ADD { 8505 break 8506 } 8507 idx := v_0.Args[1] 8508 ptr := v_0.Args[0] 8509 if !(off == 0 && sym == nil) { 8510 break 8511 } 8512 v.reset(OpARM64MOVBloadidx) 8513 v.AddArg(ptr) 8514 v.AddArg(idx) 8515 v.AddArg(mem) 8516 return true 8517 } 8518 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 8519 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8520 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 8521 for { 8522 off1 := v.AuxInt 8523 sym1 := v.Aux 8524 mem := v.Args[1] 8525 v_0 := v.Args[0] 8526 if v_0.Op != OpARM64MOVDaddr { 8527 break 8528 } 8529 off2 := v_0.AuxInt 8530 sym2 := v_0.Aux 8531 ptr := v_0.Args[0] 8532 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8533 break 8534 } 8535 v.reset(OpARM64MOVBload) 8536 v.AuxInt = off1 + off2 8537 v.Aux = mergeSym(sym1, sym2) 8538 v.AddArg(ptr) 8539 v.AddArg(mem) 8540 return true 8541 } 8542 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 8543 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 8544 // result: (MOVDconst [0]) 8545 for { 8546 off := v.AuxInt 8547 sym := v.Aux 8548 _ = v.Args[1] 8549 ptr := v.Args[0] 8550 v_1 := v.Args[1] 8551 if v_1.Op != OpARM64MOVBstorezero { 8552 break 8553 } 8554 off2 := v_1.AuxInt 8555 sym2 := v_1.Aux 8556 _ = v_1.Args[1] 8557 ptr2 := v_1.Args[0] 8558 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 8559 break 8560 } 8561 v.reset(OpARM64MOVDconst) 8562 v.AuxInt = 0 8563 return true 8564 } 8565 return false 8566 } 8567 func rewriteValueARM64_OpARM64MOVBloadidx_0(v *Value) bool { 8568 // match: (MOVBloadidx ptr (MOVDconst [c]) mem) 8569 // result: (MOVBload [c] ptr mem) 8570 for { 8571 mem := v.Args[2] 8572 ptr := v.Args[0] 8573 v_1 := v.Args[1] 8574 if v_1.Op != OpARM64MOVDconst { 8575 break 8576 } 8577 c := v_1.AuxInt 8578 v.reset(OpARM64MOVBload) 8579 v.AuxInt = c 8580 v.AddArg(ptr) 8581 v.AddArg(mem) 8582 return true 8583 } 8584 // match: (MOVBloadidx (MOVDconst [c]) ptr mem) 8585 // result: (MOVBload [c] ptr mem) 8586 for { 8587 mem := v.Args[2] 8588 v_0 := v.Args[0] 8589 if v_0.Op != OpARM64MOVDconst { 8590 break 8591 } 8592 c := v_0.AuxInt 8593 ptr := v.Args[1] 8594 v.reset(OpARM64MOVBload) 8595 v.AuxInt = c 8596 v.AddArg(ptr) 8597 v.AddArg(mem) 8598 return true 8599 } 8600 // match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 8601 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 8602 // result: (MOVDconst [0]) 8603 for { 8604 _ = v.Args[2] 8605 ptr := v.Args[0] 8606 idx := v.Args[1] 8607 v_2 := v.Args[2] 8608 if v_2.Op != OpARM64MOVBstorezeroidx { 8609 break 8610 } 8611 _ = v_2.Args[2] 8612 ptr2 := v_2.Args[0] 8613 idx2 := v_2.Args[1] 8614 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 8615 break 8616 } 8617 v.reset(OpARM64MOVDconst) 8618 v.AuxInt = 0 8619 return true 8620 } 8621 return false 8622 } 8623 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 8624 // match: (MOVBreg x:(MOVBload _ _)) 8625 // result: (MOVDreg x) 8626 for { 8627 x := v.Args[0] 8628 if x.Op != OpARM64MOVBload { 8629 break 8630 } 8631 _ = x.Args[1] 8632 v.reset(OpARM64MOVDreg) 8633 v.AddArg(x) 8634 return true 8635 } 8636 // match: (MOVBreg x:(MOVBloadidx _ _ _)) 8637 // result: (MOVDreg x) 8638 for { 8639 x := v.Args[0] 8640 if x.Op != OpARM64MOVBloadidx { 8641 break 8642 } 8643 _ = x.Args[2] 8644 v.reset(OpARM64MOVDreg) 8645 v.AddArg(x) 8646 return true 8647 } 8648 // match: (MOVBreg x:(MOVBreg _)) 8649 // result: (MOVDreg x) 8650 for { 8651 x := v.Args[0] 8652 if x.Op != OpARM64MOVBreg { 8653 break 8654 } 8655 v.reset(OpARM64MOVDreg) 8656 v.AddArg(x) 8657 return true 8658 } 8659 // match: (MOVBreg (MOVDconst [c])) 8660 // result: (MOVDconst [int64(int8(c))]) 8661 for { 8662 v_0 := v.Args[0] 8663 if v_0.Op != OpARM64MOVDconst { 8664 break 8665 } 8666 c := v_0.AuxInt 8667 v.reset(OpARM64MOVDconst) 8668 v.AuxInt = int64(int8(c)) 8669 return true 8670 } 8671 // match: (MOVBreg (SLLconst [lc] x)) 8672 // cond: lc < 8 8673 // result: (SBFIZ [armBFAuxInt(lc, 8-lc)] x) 8674 for { 8675 v_0 := v.Args[0] 8676 if v_0.Op != OpARM64SLLconst { 8677 break 8678 } 8679 lc := v_0.AuxInt 8680 x := v_0.Args[0] 8681 if !(lc < 8) { 8682 break 8683 } 8684 v.reset(OpARM64SBFIZ) 8685 v.AuxInt = armBFAuxInt(lc, 8-lc) 8686 v.AddArg(x) 8687 return true 8688 } 8689 return false 8690 } 8691 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 8692 b := v.Block 8693 config := b.Func.Config 8694 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 8695 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8696 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 8697 for { 8698 off1 := v.AuxInt 8699 sym := v.Aux 8700 mem := v.Args[2] 8701 v_0 := v.Args[0] 8702 if v_0.Op != OpARM64ADDconst { 8703 break 8704 } 8705 off2 := v_0.AuxInt 8706 ptr := v_0.Args[0] 8707 val := v.Args[1] 8708 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8709 break 8710 } 8711 v.reset(OpARM64MOVBstore) 8712 v.AuxInt = off1 + off2 8713 v.Aux = sym 8714 v.AddArg(ptr) 8715 v.AddArg(val) 8716 v.AddArg(mem) 8717 return true 8718 } 8719 // match: (MOVBstore [off] {sym} (ADD ptr idx) val mem) 8720 // cond: off == 0 && sym == nil 8721 // result: (MOVBstoreidx ptr idx val mem) 8722 for { 8723 off := v.AuxInt 8724 sym := v.Aux 8725 mem := v.Args[2] 8726 v_0 := v.Args[0] 8727 if v_0.Op != OpARM64ADD { 8728 break 8729 } 8730 idx := v_0.Args[1] 8731 ptr := v_0.Args[0] 8732 val := v.Args[1] 8733 if !(off == 0 && sym == nil) { 8734 break 8735 } 8736 v.reset(OpARM64MOVBstoreidx) 8737 v.AddArg(ptr) 8738 v.AddArg(idx) 8739 v.AddArg(val) 8740 v.AddArg(mem) 8741 return true 8742 } 8743 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 8744 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8745 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 8746 for { 8747 off1 := v.AuxInt 8748 sym1 := v.Aux 8749 mem := v.Args[2] 8750 v_0 := v.Args[0] 8751 if v_0.Op != OpARM64MOVDaddr { 8752 break 8753 } 8754 off2 := v_0.AuxInt 8755 sym2 := v_0.Aux 8756 ptr := v_0.Args[0] 8757 val := v.Args[1] 8758 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8759 break 8760 } 8761 v.reset(OpARM64MOVBstore) 8762 v.AuxInt = off1 + off2 8763 v.Aux = mergeSym(sym1, sym2) 8764 v.AddArg(ptr) 8765 v.AddArg(val) 8766 v.AddArg(mem) 8767 return true 8768 } 8769 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 8770 // result: (MOVBstorezero [off] {sym} ptr mem) 8771 for { 8772 off := v.AuxInt 8773 sym := v.Aux 8774 mem := v.Args[2] 8775 ptr := v.Args[0] 8776 v_1 := v.Args[1] 8777 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 8778 break 8779 } 8780 v.reset(OpARM64MOVBstorezero) 8781 v.AuxInt = off 8782 v.Aux = sym 8783 v.AddArg(ptr) 8784 v.AddArg(mem) 8785 return true 8786 } 8787 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 8788 // result: (MOVBstore [off] {sym} ptr x mem) 8789 for { 8790 off := v.AuxInt 8791 sym := v.Aux 8792 mem := v.Args[2] 8793 ptr := v.Args[0] 8794 v_1 := v.Args[1] 8795 if v_1.Op != OpARM64MOVBreg { 8796 break 8797 } 8798 x := v_1.Args[0] 8799 v.reset(OpARM64MOVBstore) 8800 v.AuxInt = off 8801 v.Aux = sym 8802 v.AddArg(ptr) 8803 v.AddArg(x) 8804 v.AddArg(mem) 8805 return true 8806 } 8807 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 8808 // result: (MOVBstore [off] {sym} ptr x mem) 8809 for { 8810 off := v.AuxInt 8811 sym := v.Aux 8812 mem := v.Args[2] 8813 ptr := v.Args[0] 8814 v_1 := v.Args[1] 8815 if v_1.Op != OpARM64MOVBUreg { 8816 break 8817 } 8818 x := v_1.Args[0] 8819 v.reset(OpARM64MOVBstore) 8820 v.AuxInt = off 8821 v.Aux = sym 8822 v.AddArg(ptr) 8823 v.AddArg(x) 8824 v.AddArg(mem) 8825 return true 8826 } 8827 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 8828 // result: (MOVBstore [off] {sym} ptr x mem) 8829 for { 8830 off := v.AuxInt 8831 sym := v.Aux 8832 mem := v.Args[2] 8833 ptr := v.Args[0] 8834 v_1 := v.Args[1] 8835 if v_1.Op != OpARM64MOVHreg { 8836 break 8837 } 8838 x := v_1.Args[0] 8839 v.reset(OpARM64MOVBstore) 8840 v.AuxInt = off 8841 v.Aux = sym 8842 v.AddArg(ptr) 8843 v.AddArg(x) 8844 v.AddArg(mem) 8845 return true 8846 } 8847 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 8848 // result: (MOVBstore [off] {sym} ptr x mem) 8849 for { 8850 off := v.AuxInt 8851 sym := v.Aux 8852 mem := v.Args[2] 8853 ptr := v.Args[0] 8854 v_1 := v.Args[1] 8855 if v_1.Op != OpARM64MOVHUreg { 8856 break 8857 } 8858 x := v_1.Args[0] 8859 v.reset(OpARM64MOVBstore) 8860 v.AuxInt = off 8861 v.Aux = sym 8862 v.AddArg(ptr) 8863 v.AddArg(x) 8864 v.AddArg(mem) 8865 return true 8866 } 8867 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 8868 // result: (MOVBstore [off] {sym} ptr x mem) 8869 for { 8870 off := v.AuxInt 8871 sym := v.Aux 8872 mem := v.Args[2] 8873 ptr := v.Args[0] 8874 v_1 := v.Args[1] 8875 if v_1.Op != OpARM64MOVWreg { 8876 break 8877 } 8878 x := v_1.Args[0] 8879 v.reset(OpARM64MOVBstore) 8880 v.AuxInt = off 8881 v.Aux = sym 8882 v.AddArg(ptr) 8883 v.AddArg(x) 8884 v.AddArg(mem) 8885 return true 8886 } 8887 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 8888 // result: (MOVBstore [off] {sym} ptr x mem) 8889 for { 8890 off := v.AuxInt 8891 sym := v.Aux 8892 mem := v.Args[2] 8893 ptr := v.Args[0] 8894 v_1 := v.Args[1] 8895 if v_1.Op != OpARM64MOVWUreg { 8896 break 8897 } 8898 x := v_1.Args[0] 8899 v.reset(OpARM64MOVBstore) 8900 v.AuxInt = off 8901 v.Aux = sym 8902 v.AddArg(ptr) 8903 v.AddArg(x) 8904 v.AddArg(mem) 8905 return true 8906 } 8907 return false 8908 } 8909 func rewriteValueARM64_OpARM64MOVBstore_10(v *Value) bool { 8910 // match: (MOVBstore [i] {s} ptr0 (SRLconst [8] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 8911 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 8912 // result: (MOVHstore [i-1] {s} ptr0 w mem) 8913 for { 8914 i := v.AuxInt 8915 s := v.Aux 8916 _ = v.Args[2] 8917 ptr0 := v.Args[0] 8918 v_1 := v.Args[1] 8919 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 8 { 8920 break 8921 } 8922 w := v_1.Args[0] 8923 x := v.Args[2] 8924 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 8925 break 8926 } 8927 mem := x.Args[2] 8928 ptr1 := x.Args[0] 8929 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 8930 break 8931 } 8932 v.reset(OpARM64MOVHstore) 8933 v.AuxInt = i - 1 8934 v.Aux = s 8935 v.AddArg(ptr0) 8936 v.AddArg(w) 8937 v.AddArg(mem) 8938 return true 8939 } 8940 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 8941 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 8942 // result: (MOVHstoreidx ptr1 idx1 w mem) 8943 for { 8944 if v.AuxInt != 1 { 8945 break 8946 } 8947 s := v.Aux 8948 _ = v.Args[2] 8949 v_0 := v.Args[0] 8950 if v_0.Op != OpARM64ADD { 8951 break 8952 } 8953 idx0 := v_0.Args[1] 8954 ptr0 := v_0.Args[0] 8955 v_1 := v.Args[1] 8956 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 8 { 8957 break 8958 } 8959 w := v_1.Args[0] 8960 x := v.Args[2] 8961 if x.Op != OpARM64MOVBstoreidx { 8962 break 8963 } 8964 mem := x.Args[3] 8965 ptr1 := x.Args[0] 8966 idx1 := x.Args[1] 8967 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 8968 break 8969 } 8970 v.reset(OpARM64MOVHstoreidx) 8971 v.AddArg(ptr1) 8972 v.AddArg(idx1) 8973 v.AddArg(w) 8974 v.AddArg(mem) 8975 return true 8976 } 8977 // match: (MOVBstore [i] {s} ptr0 (UBFX [armBFAuxInt(8, 8)] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 8978 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 8979 // result: (MOVHstore [i-1] {s} ptr0 w mem) 8980 for { 8981 i := v.AuxInt 8982 s := v.Aux 8983 _ = v.Args[2] 8984 ptr0 := v.Args[0] 8985 v_1 := v.Args[1] 8986 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(8, 8) { 8987 break 8988 } 8989 w := v_1.Args[0] 8990 x := v.Args[2] 8991 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 8992 break 8993 } 8994 mem := x.Args[2] 8995 ptr1 := x.Args[0] 8996 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 8997 break 8998 } 8999 v.reset(OpARM64MOVHstore) 9000 v.AuxInt = i - 1 9001 v.Aux = s 9002 v.AddArg(ptr0) 9003 v.AddArg(w) 9004 v.AddArg(mem) 9005 return true 9006 } 9007 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(8, 8)] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9008 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9009 // result: (MOVHstoreidx ptr1 idx1 w mem) 9010 for { 9011 if v.AuxInt != 1 { 9012 break 9013 } 9014 s := v.Aux 9015 _ = v.Args[2] 9016 v_0 := v.Args[0] 9017 if v_0.Op != OpARM64ADD { 9018 break 9019 } 9020 idx0 := v_0.Args[1] 9021 ptr0 := v_0.Args[0] 9022 v_1 := v.Args[1] 9023 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(8, 8) { 9024 break 9025 } 9026 w := v_1.Args[0] 9027 x := v.Args[2] 9028 if x.Op != OpARM64MOVBstoreidx { 9029 break 9030 } 9031 mem := x.Args[3] 9032 ptr1 := x.Args[0] 9033 idx1 := x.Args[1] 9034 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9035 break 9036 } 9037 v.reset(OpARM64MOVHstoreidx) 9038 v.AddArg(ptr1) 9039 v.AddArg(idx1) 9040 v.AddArg(w) 9041 v.AddArg(mem) 9042 return true 9043 } 9044 // match: (MOVBstore [i] {s} ptr0 (UBFX [armBFAuxInt(8, 24)] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9045 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9046 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9047 for { 9048 i := v.AuxInt 9049 s := v.Aux 9050 _ = v.Args[2] 9051 ptr0 := v.Args[0] 9052 v_1 := v.Args[1] 9053 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(8, 24) { 9054 break 9055 } 9056 w := v_1.Args[0] 9057 x := v.Args[2] 9058 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 9059 break 9060 } 9061 mem := x.Args[2] 9062 ptr1 := x.Args[0] 9063 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9064 break 9065 } 9066 v.reset(OpARM64MOVHstore) 9067 v.AuxInt = i - 1 9068 v.Aux = s 9069 v.AddArg(ptr0) 9070 v.AddArg(w) 9071 v.AddArg(mem) 9072 return true 9073 } 9074 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(8, 24)] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9075 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9076 // result: (MOVHstoreidx ptr1 idx1 w mem) 9077 for { 9078 if v.AuxInt != 1 { 9079 break 9080 } 9081 s := v.Aux 9082 _ = v.Args[2] 9083 v_0 := v.Args[0] 9084 if v_0.Op != OpARM64ADD { 9085 break 9086 } 9087 idx0 := v_0.Args[1] 9088 ptr0 := v_0.Args[0] 9089 v_1 := v.Args[1] 9090 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(8, 24) { 9091 break 9092 } 9093 w := v_1.Args[0] 9094 x := v.Args[2] 9095 if x.Op != OpARM64MOVBstoreidx { 9096 break 9097 } 9098 mem := x.Args[3] 9099 ptr1 := x.Args[0] 9100 idx1 := x.Args[1] 9101 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9102 break 9103 } 9104 v.reset(OpARM64MOVHstoreidx) 9105 v.AddArg(ptr1) 9106 v.AddArg(idx1) 9107 v.AddArg(w) 9108 v.AddArg(mem) 9109 return true 9110 } 9111 // match: (MOVBstore [i] {s} ptr0 (SRLconst [8] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9112 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9113 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9114 for { 9115 i := v.AuxInt 9116 s := v.Aux 9117 _ = v.Args[2] 9118 ptr0 := v.Args[0] 9119 v_1 := v.Args[1] 9120 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 8 { 9121 break 9122 } 9123 v_1_0 := v_1.Args[0] 9124 if v_1_0.Op != OpARM64MOVDreg { 9125 break 9126 } 9127 w := v_1_0.Args[0] 9128 x := v.Args[2] 9129 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 9130 break 9131 } 9132 mem := x.Args[2] 9133 ptr1 := x.Args[0] 9134 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9135 break 9136 } 9137 v.reset(OpARM64MOVHstore) 9138 v.AuxInt = i - 1 9139 v.Aux = s 9140 v.AddArg(ptr0) 9141 v.AddArg(w) 9142 v.AddArg(mem) 9143 return true 9144 } 9145 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w mem)) 9146 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9147 // result: (MOVHstoreidx ptr1 idx1 w mem) 9148 for { 9149 if v.AuxInt != 1 { 9150 break 9151 } 9152 s := v.Aux 9153 _ = v.Args[2] 9154 v_0 := v.Args[0] 9155 if v_0.Op != OpARM64ADD { 9156 break 9157 } 9158 idx0 := v_0.Args[1] 9159 ptr0 := v_0.Args[0] 9160 v_1 := v.Args[1] 9161 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 8 { 9162 break 9163 } 9164 v_1_0 := v_1.Args[0] 9165 if v_1_0.Op != OpARM64MOVDreg { 9166 break 9167 } 9168 w := v_1_0.Args[0] 9169 x := v.Args[2] 9170 if x.Op != OpARM64MOVBstoreidx { 9171 break 9172 } 9173 mem := x.Args[3] 9174 ptr1 := x.Args[0] 9175 idx1 := x.Args[1] 9176 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9177 break 9178 } 9179 v.reset(OpARM64MOVHstoreidx) 9180 v.AddArg(ptr1) 9181 v.AddArg(idx1) 9182 v.AddArg(w) 9183 v.AddArg(mem) 9184 return true 9185 } 9186 // match: (MOVBstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] w) mem)) 9187 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9188 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 9189 for { 9190 i := v.AuxInt 9191 s := v.Aux 9192 _ = v.Args[2] 9193 ptr0 := v.Args[0] 9194 v_1 := v.Args[1] 9195 if v_1.Op != OpARM64SRLconst { 9196 break 9197 } 9198 j := v_1.AuxInt 9199 w := v_1.Args[0] 9200 x := v.Args[2] 9201 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 9202 break 9203 } 9204 mem := x.Args[2] 9205 ptr1 := x.Args[0] 9206 w0 := x.Args[1] 9207 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-8 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9208 break 9209 } 9210 v.reset(OpARM64MOVHstore) 9211 v.AuxInt = i - 1 9212 v.Aux = s 9213 v.AddArg(ptr0) 9214 v.AddArg(w0) 9215 v.AddArg(mem) 9216 return true 9217 } 9218 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] w) mem)) 9219 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9220 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 9221 for { 9222 if v.AuxInt != 1 { 9223 break 9224 } 9225 s := v.Aux 9226 _ = v.Args[2] 9227 v_0 := v.Args[0] 9228 if v_0.Op != OpARM64ADD { 9229 break 9230 } 9231 idx0 := v_0.Args[1] 9232 ptr0 := v_0.Args[0] 9233 v_1 := v.Args[1] 9234 if v_1.Op != OpARM64SRLconst { 9235 break 9236 } 9237 j := v_1.AuxInt 9238 w := v_1.Args[0] 9239 x := v.Args[2] 9240 if x.Op != OpARM64MOVBstoreidx { 9241 break 9242 } 9243 mem := x.Args[3] 9244 ptr1 := x.Args[0] 9245 idx1 := x.Args[1] 9246 w0 := x.Args[2] 9247 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-8 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9248 break 9249 } 9250 v.reset(OpARM64MOVHstoreidx) 9251 v.AddArg(ptr1) 9252 v.AddArg(idx1) 9253 v.AddArg(w0) 9254 v.AddArg(mem) 9255 return true 9256 } 9257 return false 9258 } 9259 func rewriteValueARM64_OpARM64MOVBstore_20(v *Value) bool { 9260 b := v.Block 9261 // match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem)) 9262 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x) 9263 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 9264 for { 9265 i := v.AuxInt 9266 s := v.Aux 9267 _ = v.Args[2] 9268 ptr0 := v.Args[0] 9269 v_1 := v.Args[1] 9270 if v_1.Op != OpARM64UBFX { 9271 break 9272 } 9273 bfc := v_1.AuxInt 9274 w := v_1.Args[0] 9275 x := v.Args[2] 9276 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 9277 break 9278 } 9279 mem := x.Args[2] 9280 ptr1 := x.Args[0] 9281 w0 := x.Args[1] 9282 if w0.Op != OpARM64UBFX { 9283 break 9284 } 9285 bfc2 := w0.AuxInt 9286 if w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) { 9287 break 9288 } 9289 v.reset(OpARM64MOVHstore) 9290 v.AuxInt = i - 1 9291 v.Aux = s 9292 v.AddArg(ptr0) 9293 v.AddArg(w0) 9294 v.AddArg(mem) 9295 return true 9296 } 9297 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [bfc] w) x:(MOVBstoreidx ptr1 idx1 w0:(UBFX [bfc2] w) mem)) 9298 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x) 9299 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 9300 for { 9301 if v.AuxInt != 1 { 9302 break 9303 } 9304 s := v.Aux 9305 _ = v.Args[2] 9306 v_0 := v.Args[0] 9307 if v_0.Op != OpARM64ADD { 9308 break 9309 } 9310 idx0 := v_0.Args[1] 9311 ptr0 := v_0.Args[0] 9312 v_1 := v.Args[1] 9313 if v_1.Op != OpARM64UBFX { 9314 break 9315 } 9316 bfc := v_1.AuxInt 9317 w := v_1.Args[0] 9318 x := v.Args[2] 9319 if x.Op != OpARM64MOVBstoreidx { 9320 break 9321 } 9322 mem := x.Args[3] 9323 ptr1 := x.Args[0] 9324 idx1 := x.Args[1] 9325 w0 := x.Args[2] 9326 if w0.Op != OpARM64UBFX { 9327 break 9328 } 9329 bfc2 := w0.AuxInt 9330 if w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) { 9331 break 9332 } 9333 v.reset(OpARM64MOVHstoreidx) 9334 v.AddArg(ptr1) 9335 v.AddArg(idx1) 9336 v.AddArg(w0) 9337 v.AddArg(mem) 9338 return true 9339 } 9340 // match: (MOVBstore [i] {s} ptr0 (SRLconst [j] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] (MOVDreg w)) mem)) 9341 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9342 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 9343 for { 9344 i := v.AuxInt 9345 s := v.Aux 9346 _ = v.Args[2] 9347 ptr0 := v.Args[0] 9348 v_1 := v.Args[1] 9349 if v_1.Op != OpARM64SRLconst { 9350 break 9351 } 9352 j := v_1.AuxInt 9353 v_1_0 := v_1.Args[0] 9354 if v_1_0.Op != OpARM64MOVDreg { 9355 break 9356 } 9357 w := v_1_0.Args[0] 9358 x := v.Args[2] 9359 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 9360 break 9361 } 9362 mem := x.Args[2] 9363 ptr1 := x.Args[0] 9364 w0 := x.Args[1] 9365 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-8 { 9366 break 9367 } 9368 w0_0 := w0.Args[0] 9369 if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9370 break 9371 } 9372 v.reset(OpARM64MOVHstore) 9373 v.AuxInt = i - 1 9374 v.Aux = s 9375 v.AddArg(ptr0) 9376 v.AddArg(w0) 9377 v.AddArg(mem) 9378 return true 9379 } 9380 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] (MOVDreg w)) mem)) 9381 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9382 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 9383 for { 9384 if v.AuxInt != 1 { 9385 break 9386 } 9387 s := v.Aux 9388 _ = v.Args[2] 9389 v_0 := v.Args[0] 9390 if v_0.Op != OpARM64ADD { 9391 break 9392 } 9393 idx0 := v_0.Args[1] 9394 ptr0 := v_0.Args[0] 9395 v_1 := v.Args[1] 9396 if v_1.Op != OpARM64SRLconst { 9397 break 9398 } 9399 j := v_1.AuxInt 9400 v_1_0 := v_1.Args[0] 9401 if v_1_0.Op != OpARM64MOVDreg { 9402 break 9403 } 9404 w := v_1_0.Args[0] 9405 x := v.Args[2] 9406 if x.Op != OpARM64MOVBstoreidx { 9407 break 9408 } 9409 mem := x.Args[3] 9410 ptr1 := x.Args[0] 9411 idx1 := x.Args[1] 9412 w0 := x.Args[2] 9413 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-8 { 9414 break 9415 } 9416 w0_0 := w0.Args[0] 9417 if w0_0.Op != OpARM64MOVDreg || w != w0_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9418 break 9419 } 9420 v.reset(OpARM64MOVHstoreidx) 9421 v.AddArg(ptr1) 9422 v.AddArg(idx1) 9423 v.AddArg(w0) 9424 v.AddArg(mem) 9425 return true 9426 } 9427 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem)))))))) 9428 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) 9429 // result: (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem) 9430 for { 9431 i := v.AuxInt 9432 s := v.Aux 9433 _ = v.Args[2] 9434 ptr := v.Args[0] 9435 w := v.Args[1] 9436 x0 := v.Args[2] 9437 if x0.Op != OpARM64MOVBstore || x0.AuxInt != i-1 || x0.Aux != s { 9438 break 9439 } 9440 _ = x0.Args[2] 9441 if ptr != x0.Args[0] { 9442 break 9443 } 9444 x0_1 := x0.Args[1] 9445 if x0_1.Op != OpARM64SRLconst || x0_1.AuxInt != 8 || w != x0_1.Args[0] { 9446 break 9447 } 9448 x1 := x0.Args[2] 9449 if x1.Op != OpARM64MOVBstore || x1.AuxInt != i-2 || x1.Aux != s { 9450 break 9451 } 9452 _ = x1.Args[2] 9453 if ptr != x1.Args[0] { 9454 break 9455 } 9456 x1_1 := x1.Args[1] 9457 if x1_1.Op != OpARM64SRLconst || x1_1.AuxInt != 16 || w != x1_1.Args[0] { 9458 break 9459 } 9460 x2 := x1.Args[2] 9461 if x2.Op != OpARM64MOVBstore || x2.AuxInt != i-3 || x2.Aux != s { 9462 break 9463 } 9464 _ = x2.Args[2] 9465 if ptr != x2.Args[0] { 9466 break 9467 } 9468 x2_1 := x2.Args[1] 9469 if x2_1.Op != OpARM64SRLconst || x2_1.AuxInt != 24 || w != x2_1.Args[0] { 9470 break 9471 } 9472 x3 := x2.Args[2] 9473 if x3.Op != OpARM64MOVBstore || x3.AuxInt != i-4 || x3.Aux != s { 9474 break 9475 } 9476 _ = x3.Args[2] 9477 if ptr != x3.Args[0] { 9478 break 9479 } 9480 x3_1 := x3.Args[1] 9481 if x3_1.Op != OpARM64SRLconst || x3_1.AuxInt != 32 || w != x3_1.Args[0] { 9482 break 9483 } 9484 x4 := x3.Args[2] 9485 if x4.Op != OpARM64MOVBstore || x4.AuxInt != i-5 || x4.Aux != s { 9486 break 9487 } 9488 _ = x4.Args[2] 9489 if ptr != x4.Args[0] { 9490 break 9491 } 9492 x4_1 := x4.Args[1] 9493 if x4_1.Op != OpARM64SRLconst || x4_1.AuxInt != 40 || w != x4_1.Args[0] { 9494 break 9495 } 9496 x5 := x4.Args[2] 9497 if x5.Op != OpARM64MOVBstore || x5.AuxInt != i-6 || x5.Aux != s { 9498 break 9499 } 9500 _ = x5.Args[2] 9501 if ptr != x5.Args[0] { 9502 break 9503 } 9504 x5_1 := x5.Args[1] 9505 if x5_1.Op != OpARM64SRLconst || x5_1.AuxInt != 48 || w != x5_1.Args[0] { 9506 break 9507 } 9508 x6 := x5.Args[2] 9509 if x6.Op != OpARM64MOVBstore || x6.AuxInt != i-7 || x6.Aux != s { 9510 break 9511 } 9512 mem := x6.Args[2] 9513 if ptr != x6.Args[0] { 9514 break 9515 } 9516 x6_1 := x6.Args[1] 9517 if x6_1.Op != OpARM64SRLconst || x6_1.AuxInt != 56 || w != x6_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) { 9518 break 9519 } 9520 v.reset(OpARM64MOVDstore) 9521 v.AuxInt = i - 7 9522 v.Aux = s 9523 v.AddArg(ptr) 9524 v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type) 9525 v0.AddArg(w) 9526 v.AddArg(v0) 9527 v.AddArg(mem) 9528 return true 9529 } 9530 // match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem)))))))) 9531 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) 9532 // result: (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem) 9533 for { 9534 if v.AuxInt != 7 { 9535 break 9536 } 9537 s := v.Aux 9538 _ = v.Args[2] 9539 p := v.Args[0] 9540 w := v.Args[1] 9541 x0 := v.Args[2] 9542 if x0.Op != OpARM64MOVBstore || x0.AuxInt != 6 || x0.Aux != s { 9543 break 9544 } 9545 _ = x0.Args[2] 9546 if p != x0.Args[0] { 9547 break 9548 } 9549 x0_1 := x0.Args[1] 9550 if x0_1.Op != OpARM64SRLconst || x0_1.AuxInt != 8 || w != x0_1.Args[0] { 9551 break 9552 } 9553 x1 := x0.Args[2] 9554 if x1.Op != OpARM64MOVBstore || x1.AuxInt != 5 || x1.Aux != s { 9555 break 9556 } 9557 _ = x1.Args[2] 9558 if p != x1.Args[0] { 9559 break 9560 } 9561 x1_1 := x1.Args[1] 9562 if x1_1.Op != OpARM64SRLconst || x1_1.AuxInt != 16 || w != x1_1.Args[0] { 9563 break 9564 } 9565 x2 := x1.Args[2] 9566 if x2.Op != OpARM64MOVBstore || x2.AuxInt != 4 || x2.Aux != s { 9567 break 9568 } 9569 _ = x2.Args[2] 9570 if p != x2.Args[0] { 9571 break 9572 } 9573 x2_1 := x2.Args[1] 9574 if x2_1.Op != OpARM64SRLconst || x2_1.AuxInt != 24 || w != x2_1.Args[0] { 9575 break 9576 } 9577 x3 := x2.Args[2] 9578 if x3.Op != OpARM64MOVBstore || x3.AuxInt != 3 || x3.Aux != s { 9579 break 9580 } 9581 _ = x3.Args[2] 9582 if p != x3.Args[0] { 9583 break 9584 } 9585 x3_1 := x3.Args[1] 9586 if x3_1.Op != OpARM64SRLconst || x3_1.AuxInt != 32 || w != x3_1.Args[0] { 9587 break 9588 } 9589 x4 := x3.Args[2] 9590 if x4.Op != OpARM64MOVBstore || x4.AuxInt != 2 || x4.Aux != s { 9591 break 9592 } 9593 _ = x4.Args[2] 9594 if p != x4.Args[0] { 9595 break 9596 } 9597 x4_1 := x4.Args[1] 9598 if x4_1.Op != OpARM64SRLconst || x4_1.AuxInt != 40 || w != x4_1.Args[0] { 9599 break 9600 } 9601 x5 := x4.Args[2] 9602 if x5.Op != OpARM64MOVBstore || x5.AuxInt != 1 || x5.Aux != s { 9603 break 9604 } 9605 _ = x5.Args[2] 9606 p1 := x5.Args[0] 9607 if p1.Op != OpARM64ADD { 9608 break 9609 } 9610 idx1 := p1.Args[1] 9611 ptr1 := p1.Args[0] 9612 x5_1 := x5.Args[1] 9613 if x5_1.Op != OpARM64SRLconst || x5_1.AuxInt != 48 || w != x5_1.Args[0] { 9614 break 9615 } 9616 x6 := x5.Args[2] 9617 if x6.Op != OpARM64MOVBstoreidx { 9618 break 9619 } 9620 mem := x6.Args[3] 9621 ptr0 := x6.Args[0] 9622 idx0 := x6.Args[1] 9623 x6_2 := x6.Args[2] 9624 if x6_2.Op != OpARM64SRLconst || x6_2.AuxInt != 56 || w != x6_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) { 9625 break 9626 } 9627 v.reset(OpARM64MOVDstoreidx) 9628 v.AddArg(ptr0) 9629 v.AddArg(idx0) 9630 v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type) 9631 v0.AddArg(w) 9632 v.AddArg(v0) 9633 v.AddArg(mem) 9634 return true 9635 } 9636 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [armBFAuxInt(24, 8)] w) mem)))) 9637 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 9638 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 9639 for { 9640 i := v.AuxInt 9641 s := v.Aux 9642 _ = v.Args[2] 9643 ptr := v.Args[0] 9644 w := v.Args[1] 9645 x0 := v.Args[2] 9646 if x0.Op != OpARM64MOVBstore || x0.AuxInt != i-1 || x0.Aux != s { 9647 break 9648 } 9649 _ = x0.Args[2] 9650 if ptr != x0.Args[0] { 9651 break 9652 } 9653 x0_1 := x0.Args[1] 9654 if x0_1.Op != OpARM64UBFX || x0_1.AuxInt != armBFAuxInt(8, 24) || w != x0_1.Args[0] { 9655 break 9656 } 9657 x1 := x0.Args[2] 9658 if x1.Op != OpARM64MOVBstore || x1.AuxInt != i-2 || x1.Aux != s { 9659 break 9660 } 9661 _ = x1.Args[2] 9662 if ptr != x1.Args[0] { 9663 break 9664 } 9665 x1_1 := x1.Args[1] 9666 if x1_1.Op != OpARM64UBFX || x1_1.AuxInt != armBFAuxInt(16, 16) || w != x1_1.Args[0] { 9667 break 9668 } 9669 x2 := x1.Args[2] 9670 if x2.Op != OpARM64MOVBstore || x2.AuxInt != i-3 || x2.Aux != s { 9671 break 9672 } 9673 mem := x2.Args[2] 9674 if ptr != x2.Args[0] { 9675 break 9676 } 9677 x2_1 := x2.Args[1] 9678 if x2_1.Op != OpARM64UBFX || x2_1.AuxInt != armBFAuxInt(24, 8) || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 9679 break 9680 } 9681 v.reset(OpARM64MOVWstore) 9682 v.AuxInt = i - 3 9683 v.Aux = s 9684 v.AddArg(ptr) 9685 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type) 9686 v0.AddArg(w) 9687 v.AddArg(v0) 9688 v.AddArg(mem) 9689 return true 9690 } 9691 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(24, 8)] w) mem)))) 9692 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 9693 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 9694 for { 9695 if v.AuxInt != 3 { 9696 break 9697 } 9698 s := v.Aux 9699 _ = v.Args[2] 9700 p := v.Args[0] 9701 w := v.Args[1] 9702 x0 := v.Args[2] 9703 if x0.Op != OpARM64MOVBstore || x0.AuxInt != 2 || x0.Aux != s { 9704 break 9705 } 9706 _ = x0.Args[2] 9707 if p != x0.Args[0] { 9708 break 9709 } 9710 x0_1 := x0.Args[1] 9711 if x0_1.Op != OpARM64UBFX || x0_1.AuxInt != armBFAuxInt(8, 24) || w != x0_1.Args[0] { 9712 break 9713 } 9714 x1 := x0.Args[2] 9715 if x1.Op != OpARM64MOVBstore || x1.AuxInt != 1 || x1.Aux != s { 9716 break 9717 } 9718 _ = x1.Args[2] 9719 p1 := x1.Args[0] 9720 if p1.Op != OpARM64ADD { 9721 break 9722 } 9723 idx1 := p1.Args[1] 9724 ptr1 := p1.Args[0] 9725 x1_1 := x1.Args[1] 9726 if x1_1.Op != OpARM64UBFX || x1_1.AuxInt != armBFAuxInt(16, 16) || w != x1_1.Args[0] { 9727 break 9728 } 9729 x2 := x1.Args[2] 9730 if x2.Op != OpARM64MOVBstoreidx { 9731 break 9732 } 9733 mem := x2.Args[3] 9734 ptr0 := x2.Args[0] 9735 idx0 := x2.Args[1] 9736 x2_2 := x2.Args[2] 9737 if x2_2.Op != OpARM64UBFX || x2_2.AuxInt != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 9738 break 9739 } 9740 v.reset(OpARM64MOVWstoreidx) 9741 v.AddArg(ptr0) 9742 v.AddArg(idx0) 9743 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type) 9744 v0.AddArg(w) 9745 v.AddArg(v0) 9746 v.AddArg(mem) 9747 return true 9748 } 9749 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem)))) 9750 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 9751 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 9752 for { 9753 i := v.AuxInt 9754 s := v.Aux 9755 _ = v.Args[2] 9756 ptr := v.Args[0] 9757 w := v.Args[1] 9758 x0 := v.Args[2] 9759 if x0.Op != OpARM64MOVBstore || x0.AuxInt != i-1 || x0.Aux != s { 9760 break 9761 } 9762 _ = x0.Args[2] 9763 if ptr != x0.Args[0] { 9764 break 9765 } 9766 x0_1 := x0.Args[1] 9767 if x0_1.Op != OpARM64SRLconst || x0_1.AuxInt != 8 { 9768 break 9769 } 9770 x0_1_0 := x0_1.Args[0] 9771 if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] { 9772 break 9773 } 9774 x1 := x0.Args[2] 9775 if x1.Op != OpARM64MOVBstore || x1.AuxInt != i-2 || x1.Aux != s { 9776 break 9777 } 9778 _ = x1.Args[2] 9779 if ptr != x1.Args[0] { 9780 break 9781 } 9782 x1_1 := x1.Args[1] 9783 if x1_1.Op != OpARM64SRLconst || x1_1.AuxInt != 16 { 9784 break 9785 } 9786 x1_1_0 := x1_1.Args[0] 9787 if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] { 9788 break 9789 } 9790 x2 := x1.Args[2] 9791 if x2.Op != OpARM64MOVBstore || x2.AuxInt != i-3 || x2.Aux != s { 9792 break 9793 } 9794 mem := x2.Args[2] 9795 if ptr != x2.Args[0] { 9796 break 9797 } 9798 x2_1 := x2.Args[1] 9799 if x2_1.Op != OpARM64SRLconst || x2_1.AuxInt != 24 { 9800 break 9801 } 9802 x2_1_0 := x2_1.Args[0] 9803 if x2_1_0.Op != OpARM64MOVDreg || w != x2_1_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 9804 break 9805 } 9806 v.reset(OpARM64MOVWstore) 9807 v.AuxInt = i - 3 9808 v.Aux = s 9809 v.AddArg(ptr) 9810 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type) 9811 v0.AddArg(w) 9812 v.AddArg(v0) 9813 v.AddArg(mem) 9814 return true 9815 } 9816 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem)))) 9817 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 9818 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 9819 for { 9820 if v.AuxInt != 3 { 9821 break 9822 } 9823 s := v.Aux 9824 _ = v.Args[2] 9825 p := v.Args[0] 9826 w := v.Args[1] 9827 x0 := v.Args[2] 9828 if x0.Op != OpARM64MOVBstore || x0.AuxInt != 2 || x0.Aux != s { 9829 break 9830 } 9831 _ = x0.Args[2] 9832 if p != x0.Args[0] { 9833 break 9834 } 9835 x0_1 := x0.Args[1] 9836 if x0_1.Op != OpARM64SRLconst || x0_1.AuxInt != 8 { 9837 break 9838 } 9839 x0_1_0 := x0_1.Args[0] 9840 if x0_1_0.Op != OpARM64MOVDreg || w != x0_1_0.Args[0] { 9841 break 9842 } 9843 x1 := x0.Args[2] 9844 if x1.Op != OpARM64MOVBstore || x1.AuxInt != 1 || x1.Aux != s { 9845 break 9846 } 9847 _ = x1.Args[2] 9848 p1 := x1.Args[0] 9849 if p1.Op != OpARM64ADD { 9850 break 9851 } 9852 idx1 := p1.Args[1] 9853 ptr1 := p1.Args[0] 9854 x1_1 := x1.Args[1] 9855 if x1_1.Op != OpARM64SRLconst || x1_1.AuxInt != 16 { 9856 break 9857 } 9858 x1_1_0 := x1_1.Args[0] 9859 if x1_1_0.Op != OpARM64MOVDreg || w != x1_1_0.Args[0] { 9860 break 9861 } 9862 x2 := x1.Args[2] 9863 if x2.Op != OpARM64MOVBstoreidx { 9864 break 9865 } 9866 mem := x2.Args[3] 9867 ptr0 := x2.Args[0] 9868 idx0 := x2.Args[1] 9869 x2_2 := x2.Args[2] 9870 if x2_2.Op != OpARM64SRLconst || x2_2.AuxInt != 24 { 9871 break 9872 } 9873 x2_2_0 := x2_2.Args[0] 9874 if x2_2_0.Op != OpARM64MOVDreg || w != x2_2_0.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 9875 break 9876 } 9877 v.reset(OpARM64MOVWstoreidx) 9878 v.AddArg(ptr0) 9879 v.AddArg(idx0) 9880 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type) 9881 v0.AddArg(w) 9882 v.AddArg(v0) 9883 v.AddArg(mem) 9884 return true 9885 } 9886 return false 9887 } 9888 func rewriteValueARM64_OpARM64MOVBstore_30(v *Value) bool { 9889 b := v.Block 9890 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem)))) 9891 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 9892 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 9893 for { 9894 i := v.AuxInt 9895 s := v.Aux 9896 _ = v.Args[2] 9897 ptr := v.Args[0] 9898 w := v.Args[1] 9899 x0 := v.Args[2] 9900 if x0.Op != OpARM64MOVBstore || x0.AuxInt != i-1 || x0.Aux != s { 9901 break 9902 } 9903 _ = x0.Args[2] 9904 if ptr != x0.Args[0] { 9905 break 9906 } 9907 x0_1 := x0.Args[1] 9908 if x0_1.Op != OpARM64SRLconst || x0_1.AuxInt != 8 || w != x0_1.Args[0] { 9909 break 9910 } 9911 x1 := x0.Args[2] 9912 if x1.Op != OpARM64MOVBstore || x1.AuxInt != i-2 || x1.Aux != s { 9913 break 9914 } 9915 _ = x1.Args[2] 9916 if ptr != x1.Args[0] { 9917 break 9918 } 9919 x1_1 := x1.Args[1] 9920 if x1_1.Op != OpARM64SRLconst || x1_1.AuxInt != 16 || w != x1_1.Args[0] { 9921 break 9922 } 9923 x2 := x1.Args[2] 9924 if x2.Op != OpARM64MOVBstore || x2.AuxInt != i-3 || x2.Aux != s { 9925 break 9926 } 9927 mem := x2.Args[2] 9928 if ptr != x2.Args[0] { 9929 break 9930 } 9931 x2_1 := x2.Args[1] 9932 if x2_1.Op != OpARM64SRLconst || x2_1.AuxInt != 24 || w != x2_1.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 9933 break 9934 } 9935 v.reset(OpARM64MOVWstore) 9936 v.AuxInt = i - 3 9937 v.Aux = s 9938 v.AddArg(ptr) 9939 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type) 9940 v0.AddArg(w) 9941 v.AddArg(v0) 9942 v.AddArg(mem) 9943 return true 9944 } 9945 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem)))) 9946 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 9947 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 9948 for { 9949 if v.AuxInt != 3 { 9950 break 9951 } 9952 s := v.Aux 9953 _ = v.Args[2] 9954 p := v.Args[0] 9955 w := v.Args[1] 9956 x0 := v.Args[2] 9957 if x0.Op != OpARM64MOVBstore || x0.AuxInt != 2 || x0.Aux != s { 9958 break 9959 } 9960 _ = x0.Args[2] 9961 if p != x0.Args[0] { 9962 break 9963 } 9964 x0_1 := x0.Args[1] 9965 if x0_1.Op != OpARM64SRLconst || x0_1.AuxInt != 8 || w != x0_1.Args[0] { 9966 break 9967 } 9968 x1 := x0.Args[2] 9969 if x1.Op != OpARM64MOVBstore || x1.AuxInt != 1 || x1.Aux != s { 9970 break 9971 } 9972 _ = x1.Args[2] 9973 p1 := x1.Args[0] 9974 if p1.Op != OpARM64ADD { 9975 break 9976 } 9977 idx1 := p1.Args[1] 9978 ptr1 := p1.Args[0] 9979 x1_1 := x1.Args[1] 9980 if x1_1.Op != OpARM64SRLconst || x1_1.AuxInt != 16 || w != x1_1.Args[0] { 9981 break 9982 } 9983 x2 := x1.Args[2] 9984 if x2.Op != OpARM64MOVBstoreidx { 9985 break 9986 } 9987 mem := x2.Args[3] 9988 ptr0 := x2.Args[0] 9989 idx0 := x2.Args[1] 9990 x2_2 := x2.Args[2] 9991 if x2_2.Op != OpARM64SRLconst || x2_2.AuxInt != 24 || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 9992 break 9993 } 9994 v.reset(OpARM64MOVWstoreidx) 9995 v.AddArg(ptr0) 9996 v.AddArg(idx0) 9997 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type) 9998 v0.AddArg(w) 9999 v.AddArg(v0) 10000 v.AddArg(mem) 10001 return true 10002 } 10003 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem)) 10004 // cond: x.Uses == 1 && clobber(x) 10005 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 10006 for { 10007 i := v.AuxInt 10008 s := v.Aux 10009 _ = v.Args[2] 10010 ptr := v.Args[0] 10011 w := v.Args[1] 10012 x := v.Args[2] 10013 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 10014 break 10015 } 10016 mem := x.Args[2] 10017 if ptr != x.Args[0] { 10018 break 10019 } 10020 x_1 := x.Args[1] 10021 if x_1.Op != OpARM64SRLconst || x_1.AuxInt != 8 || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) { 10022 break 10023 } 10024 v.reset(OpARM64MOVHstore) 10025 v.AuxInt = i - 1 10026 v.Aux = s 10027 v.AddArg(ptr) 10028 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 10029 v0.AddArg(w) 10030 v.AddArg(v0) 10031 v.AddArg(mem) 10032 return true 10033 } 10034 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem)) 10035 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10036 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 10037 for { 10038 if v.AuxInt != 1 { 10039 break 10040 } 10041 s := v.Aux 10042 _ = v.Args[2] 10043 v_0 := v.Args[0] 10044 if v_0.Op != OpARM64ADD { 10045 break 10046 } 10047 idx1 := v_0.Args[1] 10048 ptr1 := v_0.Args[0] 10049 w := v.Args[1] 10050 x := v.Args[2] 10051 if x.Op != OpARM64MOVBstoreidx { 10052 break 10053 } 10054 mem := x.Args[3] 10055 ptr0 := x.Args[0] 10056 idx0 := x.Args[1] 10057 x_2 := x.Args[2] 10058 if x_2.Op != OpARM64SRLconst || x_2.AuxInt != 8 || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10059 break 10060 } 10061 v.reset(OpARM64MOVHstoreidx) 10062 v.AddArg(ptr0) 10063 v.AddArg(idx0) 10064 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 10065 v0.AddArg(w) 10066 v.AddArg(v0) 10067 v.AddArg(mem) 10068 return true 10069 } 10070 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 8)] w) mem)) 10071 // cond: x.Uses == 1 && clobber(x) 10072 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 10073 for { 10074 i := v.AuxInt 10075 s := v.Aux 10076 _ = v.Args[2] 10077 ptr := v.Args[0] 10078 w := v.Args[1] 10079 x := v.Args[2] 10080 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 10081 break 10082 } 10083 mem := x.Args[2] 10084 if ptr != x.Args[0] { 10085 break 10086 } 10087 x_1 := x.Args[1] 10088 if x_1.Op != OpARM64UBFX || x_1.AuxInt != armBFAuxInt(8, 8) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) { 10089 break 10090 } 10091 v.reset(OpARM64MOVHstore) 10092 v.AuxInt = i - 1 10093 v.Aux = s 10094 v.AddArg(ptr) 10095 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 10096 v0.AddArg(w) 10097 v.AddArg(v0) 10098 v.AddArg(mem) 10099 return true 10100 } 10101 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 8)] w) mem)) 10102 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10103 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 10104 for { 10105 if v.AuxInt != 1 { 10106 break 10107 } 10108 s := v.Aux 10109 _ = v.Args[2] 10110 v_0 := v.Args[0] 10111 if v_0.Op != OpARM64ADD { 10112 break 10113 } 10114 idx1 := v_0.Args[1] 10115 ptr1 := v_0.Args[0] 10116 w := v.Args[1] 10117 x := v.Args[2] 10118 if x.Op != OpARM64MOVBstoreidx { 10119 break 10120 } 10121 mem := x.Args[3] 10122 ptr0 := x.Args[0] 10123 idx0 := x.Args[1] 10124 x_2 := x.Args[2] 10125 if x_2.Op != OpARM64UBFX || x_2.AuxInt != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10126 break 10127 } 10128 v.reset(OpARM64MOVHstoreidx) 10129 v.AddArg(ptr0) 10130 v.AddArg(idx0) 10131 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 10132 v0.AddArg(w) 10133 v.AddArg(v0) 10134 v.AddArg(mem) 10135 return true 10136 } 10137 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem)) 10138 // cond: x.Uses == 1 && clobber(x) 10139 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 10140 for { 10141 i := v.AuxInt 10142 s := v.Aux 10143 _ = v.Args[2] 10144 ptr := v.Args[0] 10145 w := v.Args[1] 10146 x := v.Args[2] 10147 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 10148 break 10149 } 10150 mem := x.Args[2] 10151 if ptr != x.Args[0] { 10152 break 10153 } 10154 x_1 := x.Args[1] 10155 if x_1.Op != OpARM64SRLconst || x_1.AuxInt != 8 { 10156 break 10157 } 10158 x_1_0 := x_1.Args[0] 10159 if x_1_0.Op != OpARM64MOVDreg || w != x_1_0.Args[0] || !(x.Uses == 1 && clobber(x)) { 10160 break 10161 } 10162 v.reset(OpARM64MOVHstore) 10163 v.AuxInt = i - 1 10164 v.Aux = s 10165 v.AddArg(ptr) 10166 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 10167 v0.AddArg(w) 10168 v.AddArg(v0) 10169 v.AddArg(mem) 10170 return true 10171 } 10172 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem)) 10173 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10174 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 10175 for { 10176 if v.AuxInt != 1 { 10177 break 10178 } 10179 s := v.Aux 10180 _ = v.Args[2] 10181 v_0 := v.Args[0] 10182 if v_0.Op != OpARM64ADD { 10183 break 10184 } 10185 idx1 := v_0.Args[1] 10186 ptr1 := v_0.Args[0] 10187 w := v.Args[1] 10188 x := v.Args[2] 10189 if x.Op != OpARM64MOVBstoreidx { 10190 break 10191 } 10192 mem := x.Args[3] 10193 ptr0 := x.Args[0] 10194 idx0 := x.Args[1] 10195 x_2 := x.Args[2] 10196 if x_2.Op != OpARM64SRLconst || x_2.AuxInt != 8 { 10197 break 10198 } 10199 x_2_0 := x_2.Args[0] 10200 if x_2_0.Op != OpARM64MOVDreg || w != x_2_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10201 break 10202 } 10203 v.reset(OpARM64MOVHstoreidx) 10204 v.AddArg(ptr0) 10205 v.AddArg(idx0) 10206 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 10207 v0.AddArg(w) 10208 v.AddArg(v0) 10209 v.AddArg(mem) 10210 return true 10211 } 10212 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [armBFAuxInt(8, 24)] w) mem)) 10213 // cond: x.Uses == 1 && clobber(x) 10214 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 10215 for { 10216 i := v.AuxInt 10217 s := v.Aux 10218 _ = v.Args[2] 10219 ptr := v.Args[0] 10220 w := v.Args[1] 10221 x := v.Args[2] 10222 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 10223 break 10224 } 10225 mem := x.Args[2] 10226 if ptr != x.Args[0] { 10227 break 10228 } 10229 x_1 := x.Args[1] 10230 if x_1.Op != OpARM64UBFX || x_1.AuxInt != armBFAuxInt(8, 24) || w != x_1.Args[0] || !(x.Uses == 1 && clobber(x)) { 10231 break 10232 } 10233 v.reset(OpARM64MOVHstore) 10234 v.AuxInt = i - 1 10235 v.Aux = s 10236 v.AddArg(ptr) 10237 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 10238 v0.AddArg(w) 10239 v.AddArg(v0) 10240 v.AddArg(mem) 10241 return true 10242 } 10243 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [armBFAuxInt(8, 24)] w) mem)) 10244 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10245 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 10246 for { 10247 if v.AuxInt != 1 { 10248 break 10249 } 10250 s := v.Aux 10251 _ = v.Args[2] 10252 v_0 := v.Args[0] 10253 if v_0.Op != OpARM64ADD { 10254 break 10255 } 10256 idx1 := v_0.Args[1] 10257 ptr1 := v_0.Args[0] 10258 w := v.Args[1] 10259 x := v.Args[2] 10260 if x.Op != OpARM64MOVBstoreidx { 10261 break 10262 } 10263 mem := x.Args[3] 10264 ptr0 := x.Args[0] 10265 idx0 := x.Args[1] 10266 x_2 := x.Args[2] 10267 if x_2.Op != OpARM64UBFX || x_2.AuxInt != armBFAuxInt(8, 24) || w != x_2.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10268 break 10269 } 10270 v.reset(OpARM64MOVHstoreidx) 10271 v.AddArg(ptr0) 10272 v.AddArg(idx0) 10273 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 10274 v0.AddArg(w) 10275 v.AddArg(v0) 10276 v.AddArg(mem) 10277 return true 10278 } 10279 return false 10280 } 10281 func rewriteValueARM64_OpARM64MOVBstore_40(v *Value) bool { 10282 b := v.Block 10283 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem)) 10284 // cond: x.Uses == 1 && clobber(x) 10285 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 10286 for { 10287 i := v.AuxInt 10288 s := v.Aux 10289 _ = v.Args[2] 10290 ptr := v.Args[0] 10291 w := v.Args[1] 10292 x := v.Args[2] 10293 if x.Op != OpARM64MOVBstore || x.AuxInt != i-1 || x.Aux != s { 10294 break 10295 } 10296 mem := x.Args[2] 10297 if ptr != x.Args[0] { 10298 break 10299 } 10300 x_1 := x.Args[1] 10301 if x_1.Op != OpARM64SRLconst || x_1.AuxInt != 8 { 10302 break 10303 } 10304 x_1_0 := x_1.Args[0] 10305 if x_1_0.Op != OpARM64MOVDreg || w != x_1_0.Args[0] || !(x.Uses == 1 && clobber(x)) { 10306 break 10307 } 10308 v.reset(OpARM64MOVHstore) 10309 v.AuxInt = i - 1 10310 v.Aux = s 10311 v.AddArg(ptr) 10312 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 10313 v0.AddArg(w) 10314 v.AddArg(v0) 10315 v.AddArg(mem) 10316 return true 10317 } 10318 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem)) 10319 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10320 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 10321 for { 10322 if v.AuxInt != 1 { 10323 break 10324 } 10325 s := v.Aux 10326 _ = v.Args[2] 10327 v_0 := v.Args[0] 10328 if v_0.Op != OpARM64ADD { 10329 break 10330 } 10331 idx1 := v_0.Args[1] 10332 ptr1 := v_0.Args[0] 10333 w := v.Args[1] 10334 x := v.Args[2] 10335 if x.Op != OpARM64MOVBstoreidx { 10336 break 10337 } 10338 mem := x.Args[3] 10339 ptr0 := x.Args[0] 10340 idx0 := x.Args[1] 10341 x_2 := x.Args[2] 10342 if x_2.Op != OpARM64SRLconst || x_2.AuxInt != 8 { 10343 break 10344 } 10345 x_2_0 := x_2.Args[0] 10346 if x_2_0.Op != OpARM64MOVDreg || w != x_2_0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10347 break 10348 } 10349 v.reset(OpARM64MOVHstoreidx) 10350 v.AddArg(ptr0) 10351 v.AddArg(idx0) 10352 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 10353 v0.AddArg(w) 10354 v.AddArg(v0) 10355 v.AddArg(mem) 10356 return true 10357 } 10358 return false 10359 } 10360 func rewriteValueARM64_OpARM64MOVBstoreidx_0(v *Value) bool { 10361 // match: (MOVBstoreidx ptr (MOVDconst [c]) val mem) 10362 // result: (MOVBstore [c] ptr val mem) 10363 for { 10364 mem := v.Args[3] 10365 ptr := v.Args[0] 10366 v_1 := v.Args[1] 10367 if v_1.Op != OpARM64MOVDconst { 10368 break 10369 } 10370 c := v_1.AuxInt 10371 val := v.Args[2] 10372 v.reset(OpARM64MOVBstore) 10373 v.AuxInt = c 10374 v.AddArg(ptr) 10375 v.AddArg(val) 10376 v.AddArg(mem) 10377 return true 10378 } 10379 // match: (MOVBstoreidx (MOVDconst [c]) idx val mem) 10380 // result: (MOVBstore [c] idx val mem) 10381 for { 10382 mem := v.Args[3] 10383 v_0 := v.Args[0] 10384 if v_0.Op != OpARM64MOVDconst { 10385 break 10386 } 10387 c := v_0.AuxInt 10388 idx := v.Args[1] 10389 val := v.Args[2] 10390 v.reset(OpARM64MOVBstore) 10391 v.AuxInt = c 10392 v.AddArg(idx) 10393 v.AddArg(val) 10394 v.AddArg(mem) 10395 return true 10396 } 10397 // match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem) 10398 // result: (MOVBstorezeroidx ptr idx mem) 10399 for { 10400 mem := v.Args[3] 10401 ptr := v.Args[0] 10402 idx := v.Args[1] 10403 v_2 := v.Args[2] 10404 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 10405 break 10406 } 10407 v.reset(OpARM64MOVBstorezeroidx) 10408 v.AddArg(ptr) 10409 v.AddArg(idx) 10410 v.AddArg(mem) 10411 return true 10412 } 10413 // match: (MOVBstoreidx ptr idx (MOVBreg x) mem) 10414 // result: (MOVBstoreidx ptr idx x mem) 10415 for { 10416 mem := v.Args[3] 10417 ptr := v.Args[0] 10418 idx := v.Args[1] 10419 v_2 := v.Args[2] 10420 if v_2.Op != OpARM64MOVBreg { 10421 break 10422 } 10423 x := v_2.Args[0] 10424 v.reset(OpARM64MOVBstoreidx) 10425 v.AddArg(ptr) 10426 v.AddArg(idx) 10427 v.AddArg(x) 10428 v.AddArg(mem) 10429 return true 10430 } 10431 // match: (MOVBstoreidx ptr idx (MOVBUreg x) mem) 10432 // result: (MOVBstoreidx ptr idx x mem) 10433 for { 10434 mem := v.Args[3] 10435 ptr := v.Args[0] 10436 idx := v.Args[1] 10437 v_2 := v.Args[2] 10438 if v_2.Op != OpARM64MOVBUreg { 10439 break 10440 } 10441 x := v_2.Args[0] 10442 v.reset(OpARM64MOVBstoreidx) 10443 v.AddArg(ptr) 10444 v.AddArg(idx) 10445 v.AddArg(x) 10446 v.AddArg(mem) 10447 return true 10448 } 10449 // match: (MOVBstoreidx ptr idx (MOVHreg x) mem) 10450 // result: (MOVBstoreidx ptr idx x mem) 10451 for { 10452 mem := v.Args[3] 10453 ptr := v.Args[0] 10454 idx := v.Args[1] 10455 v_2 := v.Args[2] 10456 if v_2.Op != OpARM64MOVHreg { 10457 break 10458 } 10459 x := v_2.Args[0] 10460 v.reset(OpARM64MOVBstoreidx) 10461 v.AddArg(ptr) 10462 v.AddArg(idx) 10463 v.AddArg(x) 10464 v.AddArg(mem) 10465 return true 10466 } 10467 // match: (MOVBstoreidx ptr idx (MOVHUreg x) mem) 10468 // result: (MOVBstoreidx ptr idx x mem) 10469 for { 10470 mem := v.Args[3] 10471 ptr := v.Args[0] 10472 idx := v.Args[1] 10473 v_2 := v.Args[2] 10474 if v_2.Op != OpARM64MOVHUreg { 10475 break 10476 } 10477 x := v_2.Args[0] 10478 v.reset(OpARM64MOVBstoreidx) 10479 v.AddArg(ptr) 10480 v.AddArg(idx) 10481 v.AddArg(x) 10482 v.AddArg(mem) 10483 return true 10484 } 10485 // match: (MOVBstoreidx ptr idx (MOVWreg x) mem) 10486 // result: (MOVBstoreidx ptr idx x mem) 10487 for { 10488 mem := v.Args[3] 10489 ptr := v.Args[0] 10490 idx := v.Args[1] 10491 v_2 := v.Args[2] 10492 if v_2.Op != OpARM64MOVWreg { 10493 break 10494 } 10495 x := v_2.Args[0] 10496 v.reset(OpARM64MOVBstoreidx) 10497 v.AddArg(ptr) 10498 v.AddArg(idx) 10499 v.AddArg(x) 10500 v.AddArg(mem) 10501 return true 10502 } 10503 // match: (MOVBstoreidx ptr idx (MOVWUreg x) mem) 10504 // result: (MOVBstoreidx ptr idx x mem) 10505 for { 10506 mem := v.Args[3] 10507 ptr := v.Args[0] 10508 idx := v.Args[1] 10509 v_2 := v.Args[2] 10510 if v_2.Op != OpARM64MOVWUreg { 10511 break 10512 } 10513 x := v_2.Args[0] 10514 v.reset(OpARM64MOVBstoreidx) 10515 v.AddArg(ptr) 10516 v.AddArg(idx) 10517 v.AddArg(x) 10518 v.AddArg(mem) 10519 return true 10520 } 10521 // match: (MOVBstoreidx ptr (ADDconst [1] idx) (SRLconst [8] w) x:(MOVBstoreidx ptr idx w mem)) 10522 // cond: x.Uses == 1 && clobber(x) 10523 // result: (MOVHstoreidx ptr idx w mem) 10524 for { 10525 _ = v.Args[3] 10526 ptr := v.Args[0] 10527 v_1 := v.Args[1] 10528 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 1 { 10529 break 10530 } 10531 idx := v_1.Args[0] 10532 v_2 := v.Args[2] 10533 if v_2.Op != OpARM64SRLconst || v_2.AuxInt != 8 { 10534 break 10535 } 10536 w := v_2.Args[0] 10537 x := v.Args[3] 10538 if x.Op != OpARM64MOVBstoreidx { 10539 break 10540 } 10541 mem := x.Args[3] 10542 if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) { 10543 break 10544 } 10545 v.reset(OpARM64MOVHstoreidx) 10546 v.AddArg(ptr) 10547 v.AddArg(idx) 10548 v.AddArg(w) 10549 v.AddArg(mem) 10550 return true 10551 } 10552 return false 10553 } 10554 func rewriteValueARM64_OpARM64MOVBstoreidx_10(v *Value) bool { 10555 b := v.Block 10556 // match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(24, 8)] w) mem)))) 10557 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 10558 // result: (MOVWstoreidx ptr idx (REVW <w.Type> w) mem) 10559 for { 10560 _ = v.Args[3] 10561 ptr := v.Args[0] 10562 v_1 := v.Args[1] 10563 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 3 { 10564 break 10565 } 10566 idx := v_1.Args[0] 10567 w := v.Args[2] 10568 x0 := v.Args[3] 10569 if x0.Op != OpARM64MOVBstoreidx { 10570 break 10571 } 10572 _ = x0.Args[3] 10573 if ptr != x0.Args[0] { 10574 break 10575 } 10576 x0_1 := x0.Args[1] 10577 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 2 || idx != x0_1.Args[0] { 10578 break 10579 } 10580 x0_2 := x0.Args[2] 10581 if x0_2.Op != OpARM64UBFX || x0_2.AuxInt != armBFAuxInt(8, 24) || w != x0_2.Args[0] { 10582 break 10583 } 10584 x1 := x0.Args[3] 10585 if x1.Op != OpARM64MOVBstoreidx { 10586 break 10587 } 10588 _ = x1.Args[3] 10589 if ptr != x1.Args[0] { 10590 break 10591 } 10592 x1_1 := x1.Args[1] 10593 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] { 10594 break 10595 } 10596 x1_2 := x1.Args[2] 10597 if x1_2.Op != OpARM64UBFX || x1_2.AuxInt != armBFAuxInt(16, 16) || w != x1_2.Args[0] { 10598 break 10599 } 10600 x2 := x1.Args[3] 10601 if x2.Op != OpARM64MOVBstoreidx { 10602 break 10603 } 10604 mem := x2.Args[3] 10605 if ptr != x2.Args[0] || idx != x2.Args[1] { 10606 break 10607 } 10608 x2_2 := x2.Args[2] 10609 if x2_2.Op != OpARM64UBFX || x2_2.AuxInt != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 10610 break 10611 } 10612 v.reset(OpARM64MOVWstoreidx) 10613 v.AddArg(ptr) 10614 v.AddArg(idx) 10615 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 10616 v0.AddArg(w) 10617 v.AddArg(v0) 10618 v.AddArg(mem) 10619 return true 10620 } 10621 // match: (MOVBstoreidx ptr idx w x0:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [armBFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr (ADDconst [3] idx) (UBFX [armBFAuxInt(24, 8)] w) mem)))) 10622 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 10623 // result: (MOVWstoreidx ptr idx w mem) 10624 for { 10625 _ = v.Args[3] 10626 ptr := v.Args[0] 10627 idx := v.Args[1] 10628 w := v.Args[2] 10629 x0 := v.Args[3] 10630 if x0.Op != OpARM64MOVBstoreidx { 10631 break 10632 } 10633 _ = x0.Args[3] 10634 if ptr != x0.Args[0] { 10635 break 10636 } 10637 x0_1 := x0.Args[1] 10638 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 1 || idx != x0_1.Args[0] { 10639 break 10640 } 10641 x0_2 := x0.Args[2] 10642 if x0_2.Op != OpARM64UBFX || x0_2.AuxInt != armBFAuxInt(8, 24) || w != x0_2.Args[0] { 10643 break 10644 } 10645 x1 := x0.Args[3] 10646 if x1.Op != OpARM64MOVBstoreidx { 10647 break 10648 } 10649 _ = x1.Args[3] 10650 if ptr != x1.Args[0] { 10651 break 10652 } 10653 x1_1 := x1.Args[1] 10654 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 2 || idx != x1_1.Args[0] { 10655 break 10656 } 10657 x1_2 := x1.Args[2] 10658 if x1_2.Op != OpARM64UBFX || x1_2.AuxInt != armBFAuxInt(16, 16) || w != x1_2.Args[0] { 10659 break 10660 } 10661 x2 := x1.Args[3] 10662 if x2.Op != OpARM64MOVBstoreidx { 10663 break 10664 } 10665 mem := x2.Args[3] 10666 if ptr != x2.Args[0] { 10667 break 10668 } 10669 x2_1 := x2.Args[1] 10670 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 3 || idx != x2_1.Args[0] { 10671 break 10672 } 10673 x2_2 := x2.Args[2] 10674 if x2_2.Op != OpARM64UBFX || x2_2.AuxInt != armBFAuxInt(24, 8) || w != x2_2.Args[0] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 10675 break 10676 } 10677 v.reset(OpARM64MOVWstoreidx) 10678 v.AddArg(ptr) 10679 v.AddArg(idx) 10680 v.AddArg(w) 10681 v.AddArg(mem) 10682 return true 10683 } 10684 // match: (MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [armBFAuxInt(8, 8)] w) mem)) 10685 // cond: x.Uses == 1 && clobber(x) 10686 // result: (MOVHstoreidx ptr idx (REV16W <w.Type> w) mem) 10687 for { 10688 _ = v.Args[3] 10689 ptr := v.Args[0] 10690 v_1 := v.Args[1] 10691 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 1 { 10692 break 10693 } 10694 idx := v_1.Args[0] 10695 w := v.Args[2] 10696 x := v.Args[3] 10697 if x.Op != OpARM64MOVBstoreidx { 10698 break 10699 } 10700 mem := x.Args[3] 10701 if ptr != x.Args[0] || idx != x.Args[1] { 10702 break 10703 } 10704 x_2 := x.Args[2] 10705 if x_2.Op != OpARM64UBFX || x_2.AuxInt != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && clobber(x)) { 10706 break 10707 } 10708 v.reset(OpARM64MOVHstoreidx) 10709 v.AddArg(ptr) 10710 v.AddArg(idx) 10711 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 10712 v0.AddArg(w) 10713 v.AddArg(v0) 10714 v.AddArg(mem) 10715 return true 10716 } 10717 // match: (MOVBstoreidx ptr idx w x:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [armBFAuxInt(8, 8)] w) mem)) 10718 // cond: x.Uses == 1 && clobber(x) 10719 // result: (MOVHstoreidx ptr idx w mem) 10720 for { 10721 _ = v.Args[3] 10722 ptr := v.Args[0] 10723 idx := v.Args[1] 10724 w := v.Args[2] 10725 x := v.Args[3] 10726 if x.Op != OpARM64MOVBstoreidx { 10727 break 10728 } 10729 mem := x.Args[3] 10730 if ptr != x.Args[0] { 10731 break 10732 } 10733 x_1 := x.Args[1] 10734 if x_1.Op != OpARM64ADDconst || x_1.AuxInt != 1 || idx != x_1.Args[0] { 10735 break 10736 } 10737 x_2 := x.Args[2] 10738 if x_2.Op != OpARM64UBFX || x_2.AuxInt != armBFAuxInt(8, 8) || w != x_2.Args[0] || !(x.Uses == 1 && clobber(x)) { 10739 break 10740 } 10741 v.reset(OpARM64MOVHstoreidx) 10742 v.AddArg(ptr) 10743 v.AddArg(idx) 10744 v.AddArg(w) 10745 v.AddArg(mem) 10746 return true 10747 } 10748 return false 10749 } 10750 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 10751 b := v.Block 10752 config := b.Func.Config 10753 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 10754 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 10755 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 10756 for { 10757 off1 := v.AuxInt 10758 sym := v.Aux 10759 mem := v.Args[1] 10760 v_0 := v.Args[0] 10761 if v_0.Op != OpARM64ADDconst { 10762 break 10763 } 10764 off2 := v_0.AuxInt 10765 ptr := v_0.Args[0] 10766 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 10767 break 10768 } 10769 v.reset(OpARM64MOVBstorezero) 10770 v.AuxInt = off1 + off2 10771 v.Aux = sym 10772 v.AddArg(ptr) 10773 v.AddArg(mem) 10774 return true 10775 } 10776 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 10777 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 10778 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 10779 for { 10780 off1 := v.AuxInt 10781 sym1 := v.Aux 10782 mem := v.Args[1] 10783 v_0 := v.Args[0] 10784 if v_0.Op != OpARM64MOVDaddr { 10785 break 10786 } 10787 off2 := v_0.AuxInt 10788 sym2 := v_0.Aux 10789 ptr := v_0.Args[0] 10790 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 10791 break 10792 } 10793 v.reset(OpARM64MOVBstorezero) 10794 v.AuxInt = off1 + off2 10795 v.Aux = mergeSym(sym1, sym2) 10796 v.AddArg(ptr) 10797 v.AddArg(mem) 10798 return true 10799 } 10800 // match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem) 10801 // cond: off == 0 && sym == nil 10802 // result: (MOVBstorezeroidx ptr idx mem) 10803 for { 10804 off := v.AuxInt 10805 sym := v.Aux 10806 mem := v.Args[1] 10807 v_0 := v.Args[0] 10808 if v_0.Op != OpARM64ADD { 10809 break 10810 } 10811 idx := v_0.Args[1] 10812 ptr := v_0.Args[0] 10813 if !(off == 0 && sym == nil) { 10814 break 10815 } 10816 v.reset(OpARM64MOVBstorezeroidx) 10817 v.AddArg(ptr) 10818 v.AddArg(idx) 10819 v.AddArg(mem) 10820 return true 10821 } 10822 // match: (MOVBstorezero [i] {s} ptr0 x:(MOVBstorezero [j] {s} ptr1 mem)) 10823 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,1) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 10824 // result: (MOVHstorezero [min(i,j)] {s} ptr0 mem) 10825 for { 10826 i := v.AuxInt 10827 s := v.Aux 10828 _ = v.Args[1] 10829 ptr0 := v.Args[0] 10830 x := v.Args[1] 10831 if x.Op != OpARM64MOVBstorezero { 10832 break 10833 } 10834 j := x.AuxInt 10835 if x.Aux != s { 10836 break 10837 } 10838 mem := x.Args[1] 10839 ptr1 := x.Args[0] 10840 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 1) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 10841 break 10842 } 10843 v.reset(OpARM64MOVHstorezero) 10844 v.AuxInt = min(i, j) 10845 v.Aux = s 10846 v.AddArg(ptr0) 10847 v.AddArg(mem) 10848 return true 10849 } 10850 // match: (MOVBstorezero [1] {s} (ADD ptr0 idx0) x:(MOVBstorezeroidx ptr1 idx1 mem)) 10851 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10852 // result: (MOVHstorezeroidx ptr1 idx1 mem) 10853 for { 10854 if v.AuxInt != 1 { 10855 break 10856 } 10857 s := v.Aux 10858 _ = v.Args[1] 10859 v_0 := v.Args[0] 10860 if v_0.Op != OpARM64ADD { 10861 break 10862 } 10863 idx0 := v_0.Args[1] 10864 ptr0 := v_0.Args[0] 10865 x := v.Args[1] 10866 if x.Op != OpARM64MOVBstorezeroidx { 10867 break 10868 } 10869 mem := x.Args[2] 10870 ptr1 := x.Args[0] 10871 idx1 := x.Args[1] 10872 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10873 break 10874 } 10875 v.reset(OpARM64MOVHstorezeroidx) 10876 v.AddArg(ptr1) 10877 v.AddArg(idx1) 10878 v.AddArg(mem) 10879 return true 10880 } 10881 return false 10882 } 10883 func rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v *Value) bool { 10884 // match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem) 10885 // result: (MOVBstorezero [c] ptr mem) 10886 for { 10887 mem := v.Args[2] 10888 ptr := v.Args[0] 10889 v_1 := v.Args[1] 10890 if v_1.Op != OpARM64MOVDconst { 10891 break 10892 } 10893 c := v_1.AuxInt 10894 v.reset(OpARM64MOVBstorezero) 10895 v.AuxInt = c 10896 v.AddArg(ptr) 10897 v.AddArg(mem) 10898 return true 10899 } 10900 // match: (MOVBstorezeroidx (MOVDconst [c]) idx mem) 10901 // result: (MOVBstorezero [c] idx mem) 10902 for { 10903 mem := v.Args[2] 10904 v_0 := v.Args[0] 10905 if v_0.Op != OpARM64MOVDconst { 10906 break 10907 } 10908 c := v_0.AuxInt 10909 idx := v.Args[1] 10910 v.reset(OpARM64MOVBstorezero) 10911 v.AuxInt = c 10912 v.AddArg(idx) 10913 v.AddArg(mem) 10914 return true 10915 } 10916 // match: (MOVBstorezeroidx ptr (ADDconst [1] idx) x:(MOVBstorezeroidx ptr idx mem)) 10917 // cond: x.Uses == 1 && clobber(x) 10918 // result: (MOVHstorezeroidx ptr idx mem) 10919 for { 10920 _ = v.Args[2] 10921 ptr := v.Args[0] 10922 v_1 := v.Args[1] 10923 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 1 { 10924 break 10925 } 10926 idx := v_1.Args[0] 10927 x := v.Args[2] 10928 if x.Op != OpARM64MOVBstorezeroidx { 10929 break 10930 } 10931 mem := x.Args[2] 10932 if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) { 10933 break 10934 } 10935 v.reset(OpARM64MOVHstorezeroidx) 10936 v.AddArg(ptr) 10937 v.AddArg(idx) 10938 v.AddArg(mem) 10939 return true 10940 } 10941 return false 10942 } 10943 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 10944 b := v.Block 10945 config := b.Func.Config 10946 // match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _)) 10947 // result: (FMOVDfpgp val) 10948 for { 10949 off := v.AuxInt 10950 sym := v.Aux 10951 _ = v.Args[1] 10952 ptr := v.Args[0] 10953 v_1 := v.Args[1] 10954 if v_1.Op != OpARM64FMOVDstore || v_1.AuxInt != off || v_1.Aux != sym { 10955 break 10956 } 10957 _ = v_1.Args[2] 10958 if ptr != v_1.Args[0] { 10959 break 10960 } 10961 val := v_1.Args[1] 10962 v.reset(OpARM64FMOVDfpgp) 10963 v.AddArg(val) 10964 return true 10965 } 10966 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 10967 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 10968 // result: (MOVDload [off1+off2] {sym} ptr mem) 10969 for { 10970 off1 := v.AuxInt 10971 sym := v.Aux 10972 mem := v.Args[1] 10973 v_0 := v.Args[0] 10974 if v_0.Op != OpARM64ADDconst { 10975 break 10976 } 10977 off2 := v_0.AuxInt 10978 ptr := v_0.Args[0] 10979 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 10980 break 10981 } 10982 v.reset(OpARM64MOVDload) 10983 v.AuxInt = off1 + off2 10984 v.Aux = sym 10985 v.AddArg(ptr) 10986 v.AddArg(mem) 10987 return true 10988 } 10989 // match: (MOVDload [off] {sym} (ADD ptr idx) mem) 10990 // cond: off == 0 && sym == nil 10991 // result: (MOVDloadidx ptr idx mem) 10992 for { 10993 off := v.AuxInt 10994 sym := v.Aux 10995 mem := v.Args[1] 10996 v_0 := v.Args[0] 10997 if v_0.Op != OpARM64ADD { 10998 break 10999 } 11000 idx := v_0.Args[1] 11001 ptr := v_0.Args[0] 11002 if !(off == 0 && sym == nil) { 11003 break 11004 } 11005 v.reset(OpARM64MOVDloadidx) 11006 v.AddArg(ptr) 11007 v.AddArg(idx) 11008 v.AddArg(mem) 11009 return true 11010 } 11011 // match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem) 11012 // cond: off == 0 && sym == nil 11013 // result: (MOVDloadidx8 ptr idx mem) 11014 for { 11015 off := v.AuxInt 11016 sym := v.Aux 11017 mem := v.Args[1] 11018 v_0 := v.Args[0] 11019 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 3 { 11020 break 11021 } 11022 idx := v_0.Args[1] 11023 ptr := v_0.Args[0] 11024 if !(off == 0 && sym == nil) { 11025 break 11026 } 11027 v.reset(OpARM64MOVDloadidx8) 11028 v.AddArg(ptr) 11029 v.AddArg(idx) 11030 v.AddArg(mem) 11031 return true 11032 } 11033 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 11034 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11035 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 11036 for { 11037 off1 := v.AuxInt 11038 sym1 := v.Aux 11039 mem := v.Args[1] 11040 v_0 := v.Args[0] 11041 if v_0.Op != OpARM64MOVDaddr { 11042 break 11043 } 11044 off2 := v_0.AuxInt 11045 sym2 := v_0.Aux 11046 ptr := v_0.Args[0] 11047 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11048 break 11049 } 11050 v.reset(OpARM64MOVDload) 11051 v.AuxInt = off1 + off2 11052 v.Aux = mergeSym(sym1, sym2) 11053 v.AddArg(ptr) 11054 v.AddArg(mem) 11055 return true 11056 } 11057 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 11058 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 11059 // result: (MOVDconst [0]) 11060 for { 11061 off := v.AuxInt 11062 sym := v.Aux 11063 _ = v.Args[1] 11064 ptr := v.Args[0] 11065 v_1 := v.Args[1] 11066 if v_1.Op != OpARM64MOVDstorezero { 11067 break 11068 } 11069 off2 := v_1.AuxInt 11070 sym2 := v_1.Aux 11071 _ = v_1.Args[1] 11072 ptr2 := v_1.Args[0] 11073 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 11074 break 11075 } 11076 v.reset(OpARM64MOVDconst) 11077 v.AuxInt = 0 11078 return true 11079 } 11080 // match: (MOVDload [off] {sym} (SB) _) 11081 // cond: symIsRO(sym) 11082 // result: (MOVDconst [int64(read64(sym, off, config.BigEndian))]) 11083 for { 11084 off := v.AuxInt 11085 sym := v.Aux 11086 _ = v.Args[1] 11087 v_0 := v.Args[0] 11088 if v_0.Op != OpSB || !(symIsRO(sym)) { 11089 break 11090 } 11091 v.reset(OpARM64MOVDconst) 11092 v.AuxInt = int64(read64(sym, off, config.BigEndian)) 11093 return true 11094 } 11095 return false 11096 } 11097 func rewriteValueARM64_OpARM64MOVDloadidx_0(v *Value) bool { 11098 // match: (MOVDloadidx ptr (MOVDconst [c]) mem) 11099 // result: (MOVDload [c] ptr mem) 11100 for { 11101 mem := v.Args[2] 11102 ptr := v.Args[0] 11103 v_1 := v.Args[1] 11104 if v_1.Op != OpARM64MOVDconst { 11105 break 11106 } 11107 c := v_1.AuxInt 11108 v.reset(OpARM64MOVDload) 11109 v.AuxInt = c 11110 v.AddArg(ptr) 11111 v.AddArg(mem) 11112 return true 11113 } 11114 // match: (MOVDloadidx (MOVDconst [c]) ptr mem) 11115 // result: (MOVDload [c] ptr mem) 11116 for { 11117 mem := v.Args[2] 11118 v_0 := v.Args[0] 11119 if v_0.Op != OpARM64MOVDconst { 11120 break 11121 } 11122 c := v_0.AuxInt 11123 ptr := v.Args[1] 11124 v.reset(OpARM64MOVDload) 11125 v.AuxInt = c 11126 v.AddArg(ptr) 11127 v.AddArg(mem) 11128 return true 11129 } 11130 // match: (MOVDloadidx ptr (SLLconst [3] idx) mem) 11131 // result: (MOVDloadidx8 ptr idx mem) 11132 for { 11133 mem := v.Args[2] 11134 ptr := v.Args[0] 11135 v_1 := v.Args[1] 11136 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 3 { 11137 break 11138 } 11139 idx := v_1.Args[0] 11140 v.reset(OpARM64MOVDloadidx8) 11141 v.AddArg(ptr) 11142 v.AddArg(idx) 11143 v.AddArg(mem) 11144 return true 11145 } 11146 // match: (MOVDloadidx (SLLconst [3] idx) ptr mem) 11147 // result: (MOVDloadidx8 ptr idx mem) 11148 for { 11149 mem := v.Args[2] 11150 v_0 := v.Args[0] 11151 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 3 { 11152 break 11153 } 11154 idx := v_0.Args[0] 11155 ptr := v.Args[1] 11156 v.reset(OpARM64MOVDloadidx8) 11157 v.AddArg(ptr) 11158 v.AddArg(idx) 11159 v.AddArg(mem) 11160 return true 11161 } 11162 // match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _)) 11163 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 11164 // result: (MOVDconst [0]) 11165 for { 11166 _ = v.Args[2] 11167 ptr := v.Args[0] 11168 idx := v.Args[1] 11169 v_2 := v.Args[2] 11170 if v_2.Op != OpARM64MOVDstorezeroidx { 11171 break 11172 } 11173 _ = v_2.Args[2] 11174 ptr2 := v_2.Args[0] 11175 idx2 := v_2.Args[1] 11176 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 11177 break 11178 } 11179 v.reset(OpARM64MOVDconst) 11180 v.AuxInt = 0 11181 return true 11182 } 11183 return false 11184 } 11185 func rewriteValueARM64_OpARM64MOVDloadidx8_0(v *Value) bool { 11186 // match: (MOVDloadidx8 ptr (MOVDconst [c]) mem) 11187 // result: (MOVDload [c<<3] ptr mem) 11188 for { 11189 mem := v.Args[2] 11190 ptr := v.Args[0] 11191 v_1 := v.Args[1] 11192 if v_1.Op != OpARM64MOVDconst { 11193 break 11194 } 11195 c := v_1.AuxInt 11196 v.reset(OpARM64MOVDload) 11197 v.AuxInt = c << 3 11198 v.AddArg(ptr) 11199 v.AddArg(mem) 11200 return true 11201 } 11202 // match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _)) 11203 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 11204 // result: (MOVDconst [0]) 11205 for { 11206 _ = v.Args[2] 11207 ptr := v.Args[0] 11208 idx := v.Args[1] 11209 v_2 := v.Args[2] 11210 if v_2.Op != OpARM64MOVDstorezeroidx8 { 11211 break 11212 } 11213 _ = v_2.Args[2] 11214 ptr2 := v_2.Args[0] 11215 idx2 := v_2.Args[1] 11216 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 11217 break 11218 } 11219 v.reset(OpARM64MOVDconst) 11220 v.AuxInt = 0 11221 return true 11222 } 11223 return false 11224 } 11225 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 11226 // match: (MOVDreg x) 11227 // cond: x.Uses == 1 11228 // result: (MOVDnop x) 11229 for { 11230 x := v.Args[0] 11231 if !(x.Uses == 1) { 11232 break 11233 } 11234 v.reset(OpARM64MOVDnop) 11235 v.AddArg(x) 11236 return true 11237 } 11238 // match: (MOVDreg (MOVDconst [c])) 11239 // result: (MOVDconst [c]) 11240 for { 11241 v_0 := v.Args[0] 11242 if v_0.Op != OpARM64MOVDconst { 11243 break 11244 } 11245 c := v_0.AuxInt 11246 v.reset(OpARM64MOVDconst) 11247 v.AuxInt = c 11248 return true 11249 } 11250 return false 11251 } 11252 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 11253 b := v.Block 11254 config := b.Func.Config 11255 // match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem) 11256 // result: (FMOVDstore [off] {sym} ptr val mem) 11257 for { 11258 off := v.AuxInt 11259 sym := v.Aux 11260 mem := v.Args[2] 11261 ptr := v.Args[0] 11262 v_1 := v.Args[1] 11263 if v_1.Op != OpARM64FMOVDfpgp { 11264 break 11265 } 11266 val := v_1.Args[0] 11267 v.reset(OpARM64FMOVDstore) 11268 v.AuxInt = off 11269 v.Aux = sym 11270 v.AddArg(ptr) 11271 v.AddArg(val) 11272 v.AddArg(mem) 11273 return true 11274 } 11275 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 11276 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11277 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 11278 for { 11279 off1 := v.AuxInt 11280 sym := v.Aux 11281 mem := v.Args[2] 11282 v_0 := v.Args[0] 11283 if v_0.Op != OpARM64ADDconst { 11284 break 11285 } 11286 off2 := v_0.AuxInt 11287 ptr := v_0.Args[0] 11288 val := v.Args[1] 11289 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11290 break 11291 } 11292 v.reset(OpARM64MOVDstore) 11293 v.AuxInt = off1 + off2 11294 v.Aux = sym 11295 v.AddArg(ptr) 11296 v.AddArg(val) 11297 v.AddArg(mem) 11298 return true 11299 } 11300 // match: (MOVDstore [off] {sym} (ADD ptr idx) val mem) 11301 // cond: off == 0 && sym == nil 11302 // result: (MOVDstoreidx ptr idx val mem) 11303 for { 11304 off := v.AuxInt 11305 sym := v.Aux 11306 mem := v.Args[2] 11307 v_0 := v.Args[0] 11308 if v_0.Op != OpARM64ADD { 11309 break 11310 } 11311 idx := v_0.Args[1] 11312 ptr := v_0.Args[0] 11313 val := v.Args[1] 11314 if !(off == 0 && sym == nil) { 11315 break 11316 } 11317 v.reset(OpARM64MOVDstoreidx) 11318 v.AddArg(ptr) 11319 v.AddArg(idx) 11320 v.AddArg(val) 11321 v.AddArg(mem) 11322 return true 11323 } 11324 // match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem) 11325 // cond: off == 0 && sym == nil 11326 // result: (MOVDstoreidx8 ptr idx val mem) 11327 for { 11328 off := v.AuxInt 11329 sym := v.Aux 11330 mem := v.Args[2] 11331 v_0 := v.Args[0] 11332 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 3 { 11333 break 11334 } 11335 idx := v_0.Args[1] 11336 ptr := v_0.Args[0] 11337 val := v.Args[1] 11338 if !(off == 0 && sym == nil) { 11339 break 11340 } 11341 v.reset(OpARM64MOVDstoreidx8) 11342 v.AddArg(ptr) 11343 v.AddArg(idx) 11344 v.AddArg(val) 11345 v.AddArg(mem) 11346 return true 11347 } 11348 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 11349 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11350 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 11351 for { 11352 off1 := v.AuxInt 11353 sym1 := v.Aux 11354 mem := v.Args[2] 11355 v_0 := v.Args[0] 11356 if v_0.Op != OpARM64MOVDaddr { 11357 break 11358 } 11359 off2 := v_0.AuxInt 11360 sym2 := v_0.Aux 11361 ptr := v_0.Args[0] 11362 val := v.Args[1] 11363 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11364 break 11365 } 11366 v.reset(OpARM64MOVDstore) 11367 v.AuxInt = off1 + off2 11368 v.Aux = mergeSym(sym1, sym2) 11369 v.AddArg(ptr) 11370 v.AddArg(val) 11371 v.AddArg(mem) 11372 return true 11373 } 11374 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 11375 // result: (MOVDstorezero [off] {sym} ptr mem) 11376 for { 11377 off := v.AuxInt 11378 sym := v.Aux 11379 mem := v.Args[2] 11380 ptr := v.Args[0] 11381 v_1 := v.Args[1] 11382 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 11383 break 11384 } 11385 v.reset(OpARM64MOVDstorezero) 11386 v.AuxInt = off 11387 v.Aux = sym 11388 v.AddArg(ptr) 11389 v.AddArg(mem) 11390 return true 11391 } 11392 return false 11393 } 11394 func rewriteValueARM64_OpARM64MOVDstoreidx_0(v *Value) bool { 11395 // match: (MOVDstoreidx ptr (MOVDconst [c]) val mem) 11396 // result: (MOVDstore [c] ptr val mem) 11397 for { 11398 mem := v.Args[3] 11399 ptr := v.Args[0] 11400 v_1 := v.Args[1] 11401 if v_1.Op != OpARM64MOVDconst { 11402 break 11403 } 11404 c := v_1.AuxInt 11405 val := v.Args[2] 11406 v.reset(OpARM64MOVDstore) 11407 v.AuxInt = c 11408 v.AddArg(ptr) 11409 v.AddArg(val) 11410 v.AddArg(mem) 11411 return true 11412 } 11413 // match: (MOVDstoreidx (MOVDconst [c]) idx val mem) 11414 // result: (MOVDstore [c] idx val mem) 11415 for { 11416 mem := v.Args[3] 11417 v_0 := v.Args[0] 11418 if v_0.Op != OpARM64MOVDconst { 11419 break 11420 } 11421 c := v_0.AuxInt 11422 idx := v.Args[1] 11423 val := v.Args[2] 11424 v.reset(OpARM64MOVDstore) 11425 v.AuxInt = c 11426 v.AddArg(idx) 11427 v.AddArg(val) 11428 v.AddArg(mem) 11429 return true 11430 } 11431 // match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem) 11432 // result: (MOVDstoreidx8 ptr idx val mem) 11433 for { 11434 mem := v.Args[3] 11435 ptr := v.Args[0] 11436 v_1 := v.Args[1] 11437 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 3 { 11438 break 11439 } 11440 idx := v_1.Args[0] 11441 val := v.Args[2] 11442 v.reset(OpARM64MOVDstoreidx8) 11443 v.AddArg(ptr) 11444 v.AddArg(idx) 11445 v.AddArg(val) 11446 v.AddArg(mem) 11447 return true 11448 } 11449 // match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem) 11450 // result: (MOVDstoreidx8 ptr idx val mem) 11451 for { 11452 mem := v.Args[3] 11453 v_0 := v.Args[0] 11454 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 3 { 11455 break 11456 } 11457 idx := v_0.Args[0] 11458 ptr := v.Args[1] 11459 val := v.Args[2] 11460 v.reset(OpARM64MOVDstoreidx8) 11461 v.AddArg(ptr) 11462 v.AddArg(idx) 11463 v.AddArg(val) 11464 v.AddArg(mem) 11465 return true 11466 } 11467 // match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem) 11468 // result: (MOVDstorezeroidx ptr idx mem) 11469 for { 11470 mem := v.Args[3] 11471 ptr := v.Args[0] 11472 idx := v.Args[1] 11473 v_2 := v.Args[2] 11474 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 11475 break 11476 } 11477 v.reset(OpARM64MOVDstorezeroidx) 11478 v.AddArg(ptr) 11479 v.AddArg(idx) 11480 v.AddArg(mem) 11481 return true 11482 } 11483 return false 11484 } 11485 func rewriteValueARM64_OpARM64MOVDstoreidx8_0(v *Value) bool { 11486 // match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem) 11487 // result: (MOVDstore [c<<3] ptr val mem) 11488 for { 11489 mem := v.Args[3] 11490 ptr := v.Args[0] 11491 v_1 := v.Args[1] 11492 if v_1.Op != OpARM64MOVDconst { 11493 break 11494 } 11495 c := v_1.AuxInt 11496 val := v.Args[2] 11497 v.reset(OpARM64MOVDstore) 11498 v.AuxInt = c << 3 11499 v.AddArg(ptr) 11500 v.AddArg(val) 11501 v.AddArg(mem) 11502 return true 11503 } 11504 // match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem) 11505 // result: (MOVDstorezeroidx8 ptr idx mem) 11506 for { 11507 mem := v.Args[3] 11508 ptr := v.Args[0] 11509 idx := v.Args[1] 11510 v_2 := v.Args[2] 11511 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 11512 break 11513 } 11514 v.reset(OpARM64MOVDstorezeroidx8) 11515 v.AddArg(ptr) 11516 v.AddArg(idx) 11517 v.AddArg(mem) 11518 return true 11519 } 11520 return false 11521 } 11522 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 11523 b := v.Block 11524 config := b.Func.Config 11525 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 11526 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11527 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 11528 for { 11529 off1 := v.AuxInt 11530 sym := v.Aux 11531 mem := v.Args[1] 11532 v_0 := v.Args[0] 11533 if v_0.Op != OpARM64ADDconst { 11534 break 11535 } 11536 off2 := v_0.AuxInt 11537 ptr := v_0.Args[0] 11538 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11539 break 11540 } 11541 v.reset(OpARM64MOVDstorezero) 11542 v.AuxInt = off1 + off2 11543 v.Aux = sym 11544 v.AddArg(ptr) 11545 v.AddArg(mem) 11546 return true 11547 } 11548 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 11549 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11550 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 11551 for { 11552 off1 := v.AuxInt 11553 sym1 := v.Aux 11554 mem := v.Args[1] 11555 v_0 := v.Args[0] 11556 if v_0.Op != OpARM64MOVDaddr { 11557 break 11558 } 11559 off2 := v_0.AuxInt 11560 sym2 := v_0.Aux 11561 ptr := v_0.Args[0] 11562 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11563 break 11564 } 11565 v.reset(OpARM64MOVDstorezero) 11566 v.AuxInt = off1 + off2 11567 v.Aux = mergeSym(sym1, sym2) 11568 v.AddArg(ptr) 11569 v.AddArg(mem) 11570 return true 11571 } 11572 // match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem) 11573 // cond: off == 0 && sym == nil 11574 // result: (MOVDstorezeroidx ptr idx mem) 11575 for { 11576 off := v.AuxInt 11577 sym := v.Aux 11578 mem := v.Args[1] 11579 v_0 := v.Args[0] 11580 if v_0.Op != OpARM64ADD { 11581 break 11582 } 11583 idx := v_0.Args[1] 11584 ptr := v_0.Args[0] 11585 if !(off == 0 && sym == nil) { 11586 break 11587 } 11588 v.reset(OpARM64MOVDstorezeroidx) 11589 v.AddArg(ptr) 11590 v.AddArg(idx) 11591 v.AddArg(mem) 11592 return true 11593 } 11594 // match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem) 11595 // cond: off == 0 && sym == nil 11596 // result: (MOVDstorezeroidx8 ptr idx mem) 11597 for { 11598 off := v.AuxInt 11599 sym := v.Aux 11600 mem := v.Args[1] 11601 v_0 := v.Args[0] 11602 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 3 { 11603 break 11604 } 11605 idx := v_0.Args[1] 11606 ptr := v_0.Args[0] 11607 if !(off == 0 && sym == nil) { 11608 break 11609 } 11610 v.reset(OpARM64MOVDstorezeroidx8) 11611 v.AddArg(ptr) 11612 v.AddArg(idx) 11613 v.AddArg(mem) 11614 return true 11615 } 11616 // match: (MOVDstorezero [i] {s} ptr0 x:(MOVDstorezero [j] {s} ptr1 mem)) 11617 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,8) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 11618 // result: (MOVQstorezero [min(i,j)] {s} ptr0 mem) 11619 for { 11620 i := v.AuxInt 11621 s := v.Aux 11622 _ = v.Args[1] 11623 ptr0 := v.Args[0] 11624 x := v.Args[1] 11625 if x.Op != OpARM64MOVDstorezero { 11626 break 11627 } 11628 j := x.AuxInt 11629 if x.Aux != s { 11630 break 11631 } 11632 mem := x.Args[1] 11633 ptr1 := x.Args[0] 11634 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 8) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 11635 break 11636 } 11637 v.reset(OpARM64MOVQstorezero) 11638 v.AuxInt = min(i, j) 11639 v.Aux = s 11640 v.AddArg(ptr0) 11641 v.AddArg(mem) 11642 return true 11643 } 11644 // match: (MOVDstorezero [8] {s} p0:(ADD ptr0 idx0) x:(MOVDstorezeroidx ptr1 idx1 mem)) 11645 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11646 // result: (MOVQstorezero [0] {s} p0 mem) 11647 for { 11648 if v.AuxInt != 8 { 11649 break 11650 } 11651 s := v.Aux 11652 _ = v.Args[1] 11653 p0 := v.Args[0] 11654 if p0.Op != OpARM64ADD { 11655 break 11656 } 11657 idx0 := p0.Args[1] 11658 ptr0 := p0.Args[0] 11659 x := v.Args[1] 11660 if x.Op != OpARM64MOVDstorezeroidx { 11661 break 11662 } 11663 mem := x.Args[2] 11664 ptr1 := x.Args[0] 11665 idx1 := x.Args[1] 11666 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11667 break 11668 } 11669 v.reset(OpARM64MOVQstorezero) 11670 v.AuxInt = 0 11671 v.Aux = s 11672 v.AddArg(p0) 11673 v.AddArg(mem) 11674 return true 11675 } 11676 // match: (MOVDstorezero [8] {s} p0:(ADDshiftLL [3] ptr0 idx0) x:(MOVDstorezeroidx8 ptr1 idx1 mem)) 11677 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 11678 // result: (MOVQstorezero [0] {s} p0 mem) 11679 for { 11680 if v.AuxInt != 8 { 11681 break 11682 } 11683 s := v.Aux 11684 _ = v.Args[1] 11685 p0 := v.Args[0] 11686 if p0.Op != OpARM64ADDshiftLL || p0.AuxInt != 3 { 11687 break 11688 } 11689 idx0 := p0.Args[1] 11690 ptr0 := p0.Args[0] 11691 x := v.Args[1] 11692 if x.Op != OpARM64MOVDstorezeroidx8 { 11693 break 11694 } 11695 mem := x.Args[2] 11696 ptr1 := x.Args[0] 11697 idx1 := x.Args[1] 11698 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 11699 break 11700 } 11701 v.reset(OpARM64MOVQstorezero) 11702 v.AuxInt = 0 11703 v.Aux = s 11704 v.AddArg(p0) 11705 v.AddArg(mem) 11706 return true 11707 } 11708 return false 11709 } 11710 func rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v *Value) bool { 11711 // match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem) 11712 // result: (MOVDstorezero [c] ptr mem) 11713 for { 11714 mem := v.Args[2] 11715 ptr := v.Args[0] 11716 v_1 := v.Args[1] 11717 if v_1.Op != OpARM64MOVDconst { 11718 break 11719 } 11720 c := v_1.AuxInt 11721 v.reset(OpARM64MOVDstorezero) 11722 v.AuxInt = c 11723 v.AddArg(ptr) 11724 v.AddArg(mem) 11725 return true 11726 } 11727 // match: (MOVDstorezeroidx (MOVDconst [c]) idx mem) 11728 // result: (MOVDstorezero [c] idx mem) 11729 for { 11730 mem := v.Args[2] 11731 v_0 := v.Args[0] 11732 if v_0.Op != OpARM64MOVDconst { 11733 break 11734 } 11735 c := v_0.AuxInt 11736 idx := v.Args[1] 11737 v.reset(OpARM64MOVDstorezero) 11738 v.AuxInt = c 11739 v.AddArg(idx) 11740 v.AddArg(mem) 11741 return true 11742 } 11743 // match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem) 11744 // result: (MOVDstorezeroidx8 ptr idx mem) 11745 for { 11746 mem := v.Args[2] 11747 ptr := v.Args[0] 11748 v_1 := v.Args[1] 11749 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 3 { 11750 break 11751 } 11752 idx := v_1.Args[0] 11753 v.reset(OpARM64MOVDstorezeroidx8) 11754 v.AddArg(ptr) 11755 v.AddArg(idx) 11756 v.AddArg(mem) 11757 return true 11758 } 11759 // match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem) 11760 // result: (MOVDstorezeroidx8 ptr idx mem) 11761 for { 11762 mem := v.Args[2] 11763 v_0 := v.Args[0] 11764 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 3 { 11765 break 11766 } 11767 idx := v_0.Args[0] 11768 ptr := v.Args[1] 11769 v.reset(OpARM64MOVDstorezeroidx8) 11770 v.AddArg(ptr) 11771 v.AddArg(idx) 11772 v.AddArg(mem) 11773 return true 11774 } 11775 return false 11776 } 11777 func rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v *Value) bool { 11778 // match: (MOVDstorezeroidx8 ptr (MOVDconst [c]) mem) 11779 // result: (MOVDstorezero [c<<3] ptr mem) 11780 for { 11781 mem := v.Args[2] 11782 ptr := v.Args[0] 11783 v_1 := v.Args[1] 11784 if v_1.Op != OpARM64MOVDconst { 11785 break 11786 } 11787 c := v_1.AuxInt 11788 v.reset(OpARM64MOVDstorezero) 11789 v.AuxInt = c << 3 11790 v.AddArg(ptr) 11791 v.AddArg(mem) 11792 return true 11793 } 11794 return false 11795 } 11796 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 11797 b := v.Block 11798 config := b.Func.Config 11799 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 11800 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11801 // result: (MOVHUload [off1+off2] {sym} ptr mem) 11802 for { 11803 off1 := v.AuxInt 11804 sym := v.Aux 11805 mem := v.Args[1] 11806 v_0 := v.Args[0] 11807 if v_0.Op != OpARM64ADDconst { 11808 break 11809 } 11810 off2 := v_0.AuxInt 11811 ptr := v_0.Args[0] 11812 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11813 break 11814 } 11815 v.reset(OpARM64MOVHUload) 11816 v.AuxInt = off1 + off2 11817 v.Aux = sym 11818 v.AddArg(ptr) 11819 v.AddArg(mem) 11820 return true 11821 } 11822 // match: (MOVHUload [off] {sym} (ADD ptr idx) mem) 11823 // cond: off == 0 && sym == nil 11824 // result: (MOVHUloadidx ptr idx mem) 11825 for { 11826 off := v.AuxInt 11827 sym := v.Aux 11828 mem := v.Args[1] 11829 v_0 := v.Args[0] 11830 if v_0.Op != OpARM64ADD { 11831 break 11832 } 11833 idx := v_0.Args[1] 11834 ptr := v_0.Args[0] 11835 if !(off == 0 && sym == nil) { 11836 break 11837 } 11838 v.reset(OpARM64MOVHUloadidx) 11839 v.AddArg(ptr) 11840 v.AddArg(idx) 11841 v.AddArg(mem) 11842 return true 11843 } 11844 // match: (MOVHUload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 11845 // cond: off == 0 && sym == nil 11846 // result: (MOVHUloadidx2 ptr idx mem) 11847 for { 11848 off := v.AuxInt 11849 sym := v.Aux 11850 mem := v.Args[1] 11851 v_0 := v.Args[0] 11852 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 11853 break 11854 } 11855 idx := v_0.Args[1] 11856 ptr := v_0.Args[0] 11857 if !(off == 0 && sym == nil) { 11858 break 11859 } 11860 v.reset(OpARM64MOVHUloadidx2) 11861 v.AddArg(ptr) 11862 v.AddArg(idx) 11863 v.AddArg(mem) 11864 return true 11865 } 11866 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 11867 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 11868 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 11869 for { 11870 off1 := v.AuxInt 11871 sym1 := v.Aux 11872 mem := v.Args[1] 11873 v_0 := v.Args[0] 11874 if v_0.Op != OpARM64MOVDaddr { 11875 break 11876 } 11877 off2 := v_0.AuxInt 11878 sym2 := v_0.Aux 11879 ptr := v_0.Args[0] 11880 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 11881 break 11882 } 11883 v.reset(OpARM64MOVHUload) 11884 v.AuxInt = off1 + off2 11885 v.Aux = mergeSym(sym1, sym2) 11886 v.AddArg(ptr) 11887 v.AddArg(mem) 11888 return true 11889 } 11890 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 11891 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 11892 // result: (MOVDconst [0]) 11893 for { 11894 off := v.AuxInt 11895 sym := v.Aux 11896 _ = v.Args[1] 11897 ptr := v.Args[0] 11898 v_1 := v.Args[1] 11899 if v_1.Op != OpARM64MOVHstorezero { 11900 break 11901 } 11902 off2 := v_1.AuxInt 11903 sym2 := v_1.Aux 11904 _ = v_1.Args[1] 11905 ptr2 := v_1.Args[0] 11906 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 11907 break 11908 } 11909 v.reset(OpARM64MOVDconst) 11910 v.AuxInt = 0 11911 return true 11912 } 11913 // match: (MOVHUload [off] {sym} (SB) _) 11914 // cond: symIsRO(sym) 11915 // result: (MOVDconst [int64(read16(sym, off, config.BigEndian))]) 11916 for { 11917 off := v.AuxInt 11918 sym := v.Aux 11919 _ = v.Args[1] 11920 v_0 := v.Args[0] 11921 if v_0.Op != OpSB || !(symIsRO(sym)) { 11922 break 11923 } 11924 v.reset(OpARM64MOVDconst) 11925 v.AuxInt = int64(read16(sym, off, config.BigEndian)) 11926 return true 11927 } 11928 return false 11929 } 11930 func rewriteValueARM64_OpARM64MOVHUloadidx_0(v *Value) bool { 11931 // match: (MOVHUloadidx ptr (MOVDconst [c]) mem) 11932 // result: (MOVHUload [c] ptr mem) 11933 for { 11934 mem := v.Args[2] 11935 ptr := v.Args[0] 11936 v_1 := v.Args[1] 11937 if v_1.Op != OpARM64MOVDconst { 11938 break 11939 } 11940 c := v_1.AuxInt 11941 v.reset(OpARM64MOVHUload) 11942 v.AuxInt = c 11943 v.AddArg(ptr) 11944 v.AddArg(mem) 11945 return true 11946 } 11947 // match: (MOVHUloadidx (MOVDconst [c]) ptr mem) 11948 // result: (MOVHUload [c] ptr mem) 11949 for { 11950 mem := v.Args[2] 11951 v_0 := v.Args[0] 11952 if v_0.Op != OpARM64MOVDconst { 11953 break 11954 } 11955 c := v_0.AuxInt 11956 ptr := v.Args[1] 11957 v.reset(OpARM64MOVHUload) 11958 v.AuxInt = c 11959 v.AddArg(ptr) 11960 v.AddArg(mem) 11961 return true 11962 } 11963 // match: (MOVHUloadidx ptr (SLLconst [1] idx) mem) 11964 // result: (MOVHUloadidx2 ptr idx mem) 11965 for { 11966 mem := v.Args[2] 11967 ptr := v.Args[0] 11968 v_1 := v.Args[1] 11969 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 1 { 11970 break 11971 } 11972 idx := v_1.Args[0] 11973 v.reset(OpARM64MOVHUloadidx2) 11974 v.AddArg(ptr) 11975 v.AddArg(idx) 11976 v.AddArg(mem) 11977 return true 11978 } 11979 // match: (MOVHUloadidx ptr (ADD idx idx) mem) 11980 // result: (MOVHUloadidx2 ptr idx mem) 11981 for { 11982 mem := v.Args[2] 11983 ptr := v.Args[0] 11984 v_1 := v.Args[1] 11985 if v_1.Op != OpARM64ADD { 11986 break 11987 } 11988 idx := v_1.Args[1] 11989 if idx != v_1.Args[0] { 11990 break 11991 } 11992 v.reset(OpARM64MOVHUloadidx2) 11993 v.AddArg(ptr) 11994 v.AddArg(idx) 11995 v.AddArg(mem) 11996 return true 11997 } 11998 // match: (MOVHUloadidx (ADD idx idx) ptr mem) 11999 // result: (MOVHUloadidx2 ptr idx mem) 12000 for { 12001 mem := v.Args[2] 12002 v_0 := v.Args[0] 12003 if v_0.Op != OpARM64ADD { 12004 break 12005 } 12006 idx := v_0.Args[1] 12007 if idx != v_0.Args[0] { 12008 break 12009 } 12010 ptr := v.Args[1] 12011 v.reset(OpARM64MOVHUloadidx2) 12012 v.AddArg(ptr) 12013 v.AddArg(idx) 12014 v.AddArg(mem) 12015 return true 12016 } 12017 // match: (MOVHUloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 12018 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 12019 // result: (MOVDconst [0]) 12020 for { 12021 _ = v.Args[2] 12022 ptr := v.Args[0] 12023 idx := v.Args[1] 12024 v_2 := v.Args[2] 12025 if v_2.Op != OpARM64MOVHstorezeroidx { 12026 break 12027 } 12028 _ = v_2.Args[2] 12029 ptr2 := v_2.Args[0] 12030 idx2 := v_2.Args[1] 12031 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 12032 break 12033 } 12034 v.reset(OpARM64MOVDconst) 12035 v.AuxInt = 0 12036 return true 12037 } 12038 return false 12039 } 12040 func rewriteValueARM64_OpARM64MOVHUloadidx2_0(v *Value) bool { 12041 // match: (MOVHUloadidx2 ptr (MOVDconst [c]) mem) 12042 // result: (MOVHUload [c<<1] ptr mem) 12043 for { 12044 mem := v.Args[2] 12045 ptr := v.Args[0] 12046 v_1 := v.Args[1] 12047 if v_1.Op != OpARM64MOVDconst { 12048 break 12049 } 12050 c := v_1.AuxInt 12051 v.reset(OpARM64MOVHUload) 12052 v.AuxInt = c << 1 12053 v.AddArg(ptr) 12054 v.AddArg(mem) 12055 return true 12056 } 12057 // match: (MOVHUloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 12058 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 12059 // result: (MOVDconst [0]) 12060 for { 12061 _ = v.Args[2] 12062 ptr := v.Args[0] 12063 idx := v.Args[1] 12064 v_2 := v.Args[2] 12065 if v_2.Op != OpARM64MOVHstorezeroidx2 { 12066 break 12067 } 12068 _ = v_2.Args[2] 12069 ptr2 := v_2.Args[0] 12070 idx2 := v_2.Args[1] 12071 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 12072 break 12073 } 12074 v.reset(OpARM64MOVDconst) 12075 v.AuxInt = 0 12076 return true 12077 } 12078 return false 12079 } 12080 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 12081 // match: (MOVHUreg x:(MOVBUload _ _)) 12082 // result: (MOVDreg x) 12083 for { 12084 x := v.Args[0] 12085 if x.Op != OpARM64MOVBUload { 12086 break 12087 } 12088 _ = x.Args[1] 12089 v.reset(OpARM64MOVDreg) 12090 v.AddArg(x) 12091 return true 12092 } 12093 // match: (MOVHUreg x:(MOVHUload _ _)) 12094 // result: (MOVDreg x) 12095 for { 12096 x := v.Args[0] 12097 if x.Op != OpARM64MOVHUload { 12098 break 12099 } 12100 _ = x.Args[1] 12101 v.reset(OpARM64MOVDreg) 12102 v.AddArg(x) 12103 return true 12104 } 12105 // match: (MOVHUreg x:(MOVBUloadidx _ _ _)) 12106 // result: (MOVDreg x) 12107 for { 12108 x := v.Args[0] 12109 if x.Op != OpARM64MOVBUloadidx { 12110 break 12111 } 12112 _ = x.Args[2] 12113 v.reset(OpARM64MOVDreg) 12114 v.AddArg(x) 12115 return true 12116 } 12117 // match: (MOVHUreg x:(MOVHUloadidx _ _ _)) 12118 // result: (MOVDreg x) 12119 for { 12120 x := v.Args[0] 12121 if x.Op != OpARM64MOVHUloadidx { 12122 break 12123 } 12124 _ = x.Args[2] 12125 v.reset(OpARM64MOVDreg) 12126 v.AddArg(x) 12127 return true 12128 } 12129 // match: (MOVHUreg x:(MOVHUloadidx2 _ _ _)) 12130 // result: (MOVDreg x) 12131 for { 12132 x := v.Args[0] 12133 if x.Op != OpARM64MOVHUloadidx2 { 12134 break 12135 } 12136 _ = x.Args[2] 12137 v.reset(OpARM64MOVDreg) 12138 v.AddArg(x) 12139 return true 12140 } 12141 // match: (MOVHUreg x:(MOVBUreg _)) 12142 // result: (MOVDreg x) 12143 for { 12144 x := v.Args[0] 12145 if x.Op != OpARM64MOVBUreg { 12146 break 12147 } 12148 v.reset(OpARM64MOVDreg) 12149 v.AddArg(x) 12150 return true 12151 } 12152 // match: (MOVHUreg x:(MOVHUreg _)) 12153 // result: (MOVDreg x) 12154 for { 12155 x := v.Args[0] 12156 if x.Op != OpARM64MOVHUreg { 12157 break 12158 } 12159 v.reset(OpARM64MOVDreg) 12160 v.AddArg(x) 12161 return true 12162 } 12163 // match: (MOVHUreg (ANDconst [c] x)) 12164 // result: (ANDconst [c&(1<<16-1)] x) 12165 for { 12166 v_0 := v.Args[0] 12167 if v_0.Op != OpARM64ANDconst { 12168 break 12169 } 12170 c := v_0.AuxInt 12171 x := v_0.Args[0] 12172 v.reset(OpARM64ANDconst) 12173 v.AuxInt = c & (1<<16 - 1) 12174 v.AddArg(x) 12175 return true 12176 } 12177 // match: (MOVHUreg (MOVDconst [c])) 12178 // result: (MOVDconst [int64(uint16(c))]) 12179 for { 12180 v_0 := v.Args[0] 12181 if v_0.Op != OpARM64MOVDconst { 12182 break 12183 } 12184 c := v_0.AuxInt 12185 v.reset(OpARM64MOVDconst) 12186 v.AuxInt = int64(uint16(c)) 12187 return true 12188 } 12189 // match: (MOVHUreg (SLLconst [sc] x)) 12190 // cond: isARM64BFMask(sc, 1<<16-1, sc) 12191 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x) 12192 for { 12193 v_0 := v.Args[0] 12194 if v_0.Op != OpARM64SLLconst { 12195 break 12196 } 12197 sc := v_0.AuxInt 12198 x := v_0.Args[0] 12199 if !(isARM64BFMask(sc, 1<<16-1, sc)) { 12200 break 12201 } 12202 v.reset(OpARM64UBFIZ) 12203 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc)) 12204 v.AddArg(x) 12205 return true 12206 } 12207 return false 12208 } 12209 func rewriteValueARM64_OpARM64MOVHUreg_10(v *Value) bool { 12210 // match: (MOVHUreg (SRLconst [sc] x)) 12211 // cond: isARM64BFMask(sc, 1<<16-1, 0) 12212 // result: (UBFX [armBFAuxInt(sc, 16)] x) 12213 for { 12214 v_0 := v.Args[0] 12215 if v_0.Op != OpARM64SRLconst { 12216 break 12217 } 12218 sc := v_0.AuxInt 12219 x := v_0.Args[0] 12220 if !(isARM64BFMask(sc, 1<<16-1, 0)) { 12221 break 12222 } 12223 v.reset(OpARM64UBFX) 12224 v.AuxInt = armBFAuxInt(sc, 16) 12225 v.AddArg(x) 12226 return true 12227 } 12228 return false 12229 } 12230 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 12231 b := v.Block 12232 config := b.Func.Config 12233 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 12234 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12235 // result: (MOVHload [off1+off2] {sym} ptr mem) 12236 for { 12237 off1 := v.AuxInt 12238 sym := v.Aux 12239 mem := v.Args[1] 12240 v_0 := v.Args[0] 12241 if v_0.Op != OpARM64ADDconst { 12242 break 12243 } 12244 off2 := v_0.AuxInt 12245 ptr := v_0.Args[0] 12246 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12247 break 12248 } 12249 v.reset(OpARM64MOVHload) 12250 v.AuxInt = off1 + off2 12251 v.Aux = sym 12252 v.AddArg(ptr) 12253 v.AddArg(mem) 12254 return true 12255 } 12256 // match: (MOVHload [off] {sym} (ADD ptr idx) mem) 12257 // cond: off == 0 && sym == nil 12258 // result: (MOVHloadidx ptr idx mem) 12259 for { 12260 off := v.AuxInt 12261 sym := v.Aux 12262 mem := v.Args[1] 12263 v_0 := v.Args[0] 12264 if v_0.Op != OpARM64ADD { 12265 break 12266 } 12267 idx := v_0.Args[1] 12268 ptr := v_0.Args[0] 12269 if !(off == 0 && sym == nil) { 12270 break 12271 } 12272 v.reset(OpARM64MOVHloadidx) 12273 v.AddArg(ptr) 12274 v.AddArg(idx) 12275 v.AddArg(mem) 12276 return true 12277 } 12278 // match: (MOVHload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 12279 // cond: off == 0 && sym == nil 12280 // result: (MOVHloadidx2 ptr idx mem) 12281 for { 12282 off := v.AuxInt 12283 sym := v.Aux 12284 mem := v.Args[1] 12285 v_0 := v.Args[0] 12286 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 12287 break 12288 } 12289 idx := v_0.Args[1] 12290 ptr := v_0.Args[0] 12291 if !(off == 0 && sym == nil) { 12292 break 12293 } 12294 v.reset(OpARM64MOVHloadidx2) 12295 v.AddArg(ptr) 12296 v.AddArg(idx) 12297 v.AddArg(mem) 12298 return true 12299 } 12300 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 12301 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12302 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 12303 for { 12304 off1 := v.AuxInt 12305 sym1 := v.Aux 12306 mem := v.Args[1] 12307 v_0 := v.Args[0] 12308 if v_0.Op != OpARM64MOVDaddr { 12309 break 12310 } 12311 off2 := v_0.AuxInt 12312 sym2 := v_0.Aux 12313 ptr := v_0.Args[0] 12314 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12315 break 12316 } 12317 v.reset(OpARM64MOVHload) 12318 v.AuxInt = off1 + off2 12319 v.Aux = mergeSym(sym1, sym2) 12320 v.AddArg(ptr) 12321 v.AddArg(mem) 12322 return true 12323 } 12324 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 12325 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 12326 // result: (MOVDconst [0]) 12327 for { 12328 off := v.AuxInt 12329 sym := v.Aux 12330 _ = v.Args[1] 12331 ptr := v.Args[0] 12332 v_1 := v.Args[1] 12333 if v_1.Op != OpARM64MOVHstorezero { 12334 break 12335 } 12336 off2 := v_1.AuxInt 12337 sym2 := v_1.Aux 12338 _ = v_1.Args[1] 12339 ptr2 := v_1.Args[0] 12340 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 12341 break 12342 } 12343 v.reset(OpARM64MOVDconst) 12344 v.AuxInt = 0 12345 return true 12346 } 12347 return false 12348 } 12349 func rewriteValueARM64_OpARM64MOVHloadidx_0(v *Value) bool { 12350 // match: (MOVHloadidx ptr (MOVDconst [c]) mem) 12351 // result: (MOVHload [c] ptr mem) 12352 for { 12353 mem := v.Args[2] 12354 ptr := v.Args[0] 12355 v_1 := v.Args[1] 12356 if v_1.Op != OpARM64MOVDconst { 12357 break 12358 } 12359 c := v_1.AuxInt 12360 v.reset(OpARM64MOVHload) 12361 v.AuxInt = c 12362 v.AddArg(ptr) 12363 v.AddArg(mem) 12364 return true 12365 } 12366 // match: (MOVHloadidx (MOVDconst [c]) ptr mem) 12367 // result: (MOVHload [c] ptr mem) 12368 for { 12369 mem := v.Args[2] 12370 v_0 := v.Args[0] 12371 if v_0.Op != OpARM64MOVDconst { 12372 break 12373 } 12374 c := v_0.AuxInt 12375 ptr := v.Args[1] 12376 v.reset(OpARM64MOVHload) 12377 v.AuxInt = c 12378 v.AddArg(ptr) 12379 v.AddArg(mem) 12380 return true 12381 } 12382 // match: (MOVHloadidx ptr (SLLconst [1] idx) mem) 12383 // result: (MOVHloadidx2 ptr idx mem) 12384 for { 12385 mem := v.Args[2] 12386 ptr := v.Args[0] 12387 v_1 := v.Args[1] 12388 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 1 { 12389 break 12390 } 12391 idx := v_1.Args[0] 12392 v.reset(OpARM64MOVHloadidx2) 12393 v.AddArg(ptr) 12394 v.AddArg(idx) 12395 v.AddArg(mem) 12396 return true 12397 } 12398 // match: (MOVHloadidx ptr (ADD idx idx) mem) 12399 // result: (MOVHloadidx2 ptr idx mem) 12400 for { 12401 mem := v.Args[2] 12402 ptr := v.Args[0] 12403 v_1 := v.Args[1] 12404 if v_1.Op != OpARM64ADD { 12405 break 12406 } 12407 idx := v_1.Args[1] 12408 if idx != v_1.Args[0] { 12409 break 12410 } 12411 v.reset(OpARM64MOVHloadidx2) 12412 v.AddArg(ptr) 12413 v.AddArg(idx) 12414 v.AddArg(mem) 12415 return true 12416 } 12417 // match: (MOVHloadidx (ADD idx idx) ptr mem) 12418 // result: (MOVHloadidx2 ptr idx mem) 12419 for { 12420 mem := v.Args[2] 12421 v_0 := v.Args[0] 12422 if v_0.Op != OpARM64ADD { 12423 break 12424 } 12425 idx := v_0.Args[1] 12426 if idx != v_0.Args[0] { 12427 break 12428 } 12429 ptr := v.Args[1] 12430 v.reset(OpARM64MOVHloadidx2) 12431 v.AddArg(ptr) 12432 v.AddArg(idx) 12433 v.AddArg(mem) 12434 return true 12435 } 12436 // match: (MOVHloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 12437 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 12438 // result: (MOVDconst [0]) 12439 for { 12440 _ = v.Args[2] 12441 ptr := v.Args[0] 12442 idx := v.Args[1] 12443 v_2 := v.Args[2] 12444 if v_2.Op != OpARM64MOVHstorezeroidx { 12445 break 12446 } 12447 _ = v_2.Args[2] 12448 ptr2 := v_2.Args[0] 12449 idx2 := v_2.Args[1] 12450 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 12451 break 12452 } 12453 v.reset(OpARM64MOVDconst) 12454 v.AuxInt = 0 12455 return true 12456 } 12457 return false 12458 } 12459 func rewriteValueARM64_OpARM64MOVHloadidx2_0(v *Value) bool { 12460 // match: (MOVHloadidx2 ptr (MOVDconst [c]) mem) 12461 // result: (MOVHload [c<<1] ptr mem) 12462 for { 12463 mem := v.Args[2] 12464 ptr := v.Args[0] 12465 v_1 := v.Args[1] 12466 if v_1.Op != OpARM64MOVDconst { 12467 break 12468 } 12469 c := v_1.AuxInt 12470 v.reset(OpARM64MOVHload) 12471 v.AuxInt = c << 1 12472 v.AddArg(ptr) 12473 v.AddArg(mem) 12474 return true 12475 } 12476 // match: (MOVHloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 12477 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 12478 // result: (MOVDconst [0]) 12479 for { 12480 _ = v.Args[2] 12481 ptr := v.Args[0] 12482 idx := v.Args[1] 12483 v_2 := v.Args[2] 12484 if v_2.Op != OpARM64MOVHstorezeroidx2 { 12485 break 12486 } 12487 _ = v_2.Args[2] 12488 ptr2 := v_2.Args[0] 12489 idx2 := v_2.Args[1] 12490 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 12491 break 12492 } 12493 v.reset(OpARM64MOVDconst) 12494 v.AuxInt = 0 12495 return true 12496 } 12497 return false 12498 } 12499 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 12500 // match: (MOVHreg x:(MOVBload _ _)) 12501 // result: (MOVDreg x) 12502 for { 12503 x := v.Args[0] 12504 if x.Op != OpARM64MOVBload { 12505 break 12506 } 12507 _ = x.Args[1] 12508 v.reset(OpARM64MOVDreg) 12509 v.AddArg(x) 12510 return true 12511 } 12512 // match: (MOVHreg x:(MOVBUload _ _)) 12513 // result: (MOVDreg x) 12514 for { 12515 x := v.Args[0] 12516 if x.Op != OpARM64MOVBUload { 12517 break 12518 } 12519 _ = x.Args[1] 12520 v.reset(OpARM64MOVDreg) 12521 v.AddArg(x) 12522 return true 12523 } 12524 // match: (MOVHreg x:(MOVHload _ _)) 12525 // result: (MOVDreg x) 12526 for { 12527 x := v.Args[0] 12528 if x.Op != OpARM64MOVHload { 12529 break 12530 } 12531 _ = x.Args[1] 12532 v.reset(OpARM64MOVDreg) 12533 v.AddArg(x) 12534 return true 12535 } 12536 // match: (MOVHreg x:(MOVBloadidx _ _ _)) 12537 // result: (MOVDreg x) 12538 for { 12539 x := v.Args[0] 12540 if x.Op != OpARM64MOVBloadidx { 12541 break 12542 } 12543 _ = x.Args[2] 12544 v.reset(OpARM64MOVDreg) 12545 v.AddArg(x) 12546 return true 12547 } 12548 // match: (MOVHreg x:(MOVBUloadidx _ _ _)) 12549 // result: (MOVDreg x) 12550 for { 12551 x := v.Args[0] 12552 if x.Op != OpARM64MOVBUloadidx { 12553 break 12554 } 12555 _ = x.Args[2] 12556 v.reset(OpARM64MOVDreg) 12557 v.AddArg(x) 12558 return true 12559 } 12560 // match: (MOVHreg x:(MOVHloadidx _ _ _)) 12561 // result: (MOVDreg x) 12562 for { 12563 x := v.Args[0] 12564 if x.Op != OpARM64MOVHloadidx { 12565 break 12566 } 12567 _ = x.Args[2] 12568 v.reset(OpARM64MOVDreg) 12569 v.AddArg(x) 12570 return true 12571 } 12572 // match: (MOVHreg x:(MOVHloadidx2 _ _ _)) 12573 // result: (MOVDreg x) 12574 for { 12575 x := v.Args[0] 12576 if x.Op != OpARM64MOVHloadidx2 { 12577 break 12578 } 12579 _ = x.Args[2] 12580 v.reset(OpARM64MOVDreg) 12581 v.AddArg(x) 12582 return true 12583 } 12584 // match: (MOVHreg x:(MOVBreg _)) 12585 // result: (MOVDreg x) 12586 for { 12587 x := v.Args[0] 12588 if x.Op != OpARM64MOVBreg { 12589 break 12590 } 12591 v.reset(OpARM64MOVDreg) 12592 v.AddArg(x) 12593 return true 12594 } 12595 // match: (MOVHreg x:(MOVBUreg _)) 12596 // result: (MOVDreg x) 12597 for { 12598 x := v.Args[0] 12599 if x.Op != OpARM64MOVBUreg { 12600 break 12601 } 12602 v.reset(OpARM64MOVDreg) 12603 v.AddArg(x) 12604 return true 12605 } 12606 // match: (MOVHreg x:(MOVHreg _)) 12607 // result: (MOVDreg x) 12608 for { 12609 x := v.Args[0] 12610 if x.Op != OpARM64MOVHreg { 12611 break 12612 } 12613 v.reset(OpARM64MOVDreg) 12614 v.AddArg(x) 12615 return true 12616 } 12617 return false 12618 } 12619 func rewriteValueARM64_OpARM64MOVHreg_10(v *Value) bool { 12620 // match: (MOVHreg (MOVDconst [c])) 12621 // result: (MOVDconst [int64(int16(c))]) 12622 for { 12623 v_0 := v.Args[0] 12624 if v_0.Op != OpARM64MOVDconst { 12625 break 12626 } 12627 c := v_0.AuxInt 12628 v.reset(OpARM64MOVDconst) 12629 v.AuxInt = int64(int16(c)) 12630 return true 12631 } 12632 // match: (MOVHreg (SLLconst [lc] x)) 12633 // cond: lc < 16 12634 // result: (SBFIZ [armBFAuxInt(lc, 16-lc)] x) 12635 for { 12636 v_0 := v.Args[0] 12637 if v_0.Op != OpARM64SLLconst { 12638 break 12639 } 12640 lc := v_0.AuxInt 12641 x := v_0.Args[0] 12642 if !(lc < 16) { 12643 break 12644 } 12645 v.reset(OpARM64SBFIZ) 12646 v.AuxInt = armBFAuxInt(lc, 16-lc) 12647 v.AddArg(x) 12648 return true 12649 } 12650 return false 12651 } 12652 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 12653 b := v.Block 12654 config := b.Func.Config 12655 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 12656 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12657 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 12658 for { 12659 off1 := v.AuxInt 12660 sym := v.Aux 12661 mem := v.Args[2] 12662 v_0 := v.Args[0] 12663 if v_0.Op != OpARM64ADDconst { 12664 break 12665 } 12666 off2 := v_0.AuxInt 12667 ptr := v_0.Args[0] 12668 val := v.Args[1] 12669 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12670 break 12671 } 12672 v.reset(OpARM64MOVHstore) 12673 v.AuxInt = off1 + off2 12674 v.Aux = sym 12675 v.AddArg(ptr) 12676 v.AddArg(val) 12677 v.AddArg(mem) 12678 return true 12679 } 12680 // match: (MOVHstore [off] {sym} (ADD ptr idx) val mem) 12681 // cond: off == 0 && sym == nil 12682 // result: (MOVHstoreidx ptr idx val mem) 12683 for { 12684 off := v.AuxInt 12685 sym := v.Aux 12686 mem := v.Args[2] 12687 v_0 := v.Args[0] 12688 if v_0.Op != OpARM64ADD { 12689 break 12690 } 12691 idx := v_0.Args[1] 12692 ptr := v_0.Args[0] 12693 val := v.Args[1] 12694 if !(off == 0 && sym == nil) { 12695 break 12696 } 12697 v.reset(OpARM64MOVHstoreidx) 12698 v.AddArg(ptr) 12699 v.AddArg(idx) 12700 v.AddArg(val) 12701 v.AddArg(mem) 12702 return true 12703 } 12704 // match: (MOVHstore [off] {sym} (ADDshiftLL [1] ptr idx) val mem) 12705 // cond: off == 0 && sym == nil 12706 // result: (MOVHstoreidx2 ptr idx val mem) 12707 for { 12708 off := v.AuxInt 12709 sym := v.Aux 12710 mem := v.Args[2] 12711 v_0 := v.Args[0] 12712 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 12713 break 12714 } 12715 idx := v_0.Args[1] 12716 ptr := v_0.Args[0] 12717 val := v.Args[1] 12718 if !(off == 0 && sym == nil) { 12719 break 12720 } 12721 v.reset(OpARM64MOVHstoreidx2) 12722 v.AddArg(ptr) 12723 v.AddArg(idx) 12724 v.AddArg(val) 12725 v.AddArg(mem) 12726 return true 12727 } 12728 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 12729 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12730 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 12731 for { 12732 off1 := v.AuxInt 12733 sym1 := v.Aux 12734 mem := v.Args[2] 12735 v_0 := v.Args[0] 12736 if v_0.Op != OpARM64MOVDaddr { 12737 break 12738 } 12739 off2 := v_0.AuxInt 12740 sym2 := v_0.Aux 12741 ptr := v_0.Args[0] 12742 val := v.Args[1] 12743 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12744 break 12745 } 12746 v.reset(OpARM64MOVHstore) 12747 v.AuxInt = off1 + off2 12748 v.Aux = mergeSym(sym1, sym2) 12749 v.AddArg(ptr) 12750 v.AddArg(val) 12751 v.AddArg(mem) 12752 return true 12753 } 12754 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 12755 // result: (MOVHstorezero [off] {sym} ptr mem) 12756 for { 12757 off := v.AuxInt 12758 sym := v.Aux 12759 mem := v.Args[2] 12760 ptr := v.Args[0] 12761 v_1 := v.Args[1] 12762 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 12763 break 12764 } 12765 v.reset(OpARM64MOVHstorezero) 12766 v.AuxInt = off 12767 v.Aux = sym 12768 v.AddArg(ptr) 12769 v.AddArg(mem) 12770 return true 12771 } 12772 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 12773 // result: (MOVHstore [off] {sym} ptr x mem) 12774 for { 12775 off := v.AuxInt 12776 sym := v.Aux 12777 mem := v.Args[2] 12778 ptr := v.Args[0] 12779 v_1 := v.Args[1] 12780 if v_1.Op != OpARM64MOVHreg { 12781 break 12782 } 12783 x := v_1.Args[0] 12784 v.reset(OpARM64MOVHstore) 12785 v.AuxInt = off 12786 v.Aux = sym 12787 v.AddArg(ptr) 12788 v.AddArg(x) 12789 v.AddArg(mem) 12790 return true 12791 } 12792 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 12793 // result: (MOVHstore [off] {sym} ptr x mem) 12794 for { 12795 off := v.AuxInt 12796 sym := v.Aux 12797 mem := v.Args[2] 12798 ptr := v.Args[0] 12799 v_1 := v.Args[1] 12800 if v_1.Op != OpARM64MOVHUreg { 12801 break 12802 } 12803 x := v_1.Args[0] 12804 v.reset(OpARM64MOVHstore) 12805 v.AuxInt = off 12806 v.Aux = sym 12807 v.AddArg(ptr) 12808 v.AddArg(x) 12809 v.AddArg(mem) 12810 return true 12811 } 12812 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 12813 // result: (MOVHstore [off] {sym} ptr x mem) 12814 for { 12815 off := v.AuxInt 12816 sym := v.Aux 12817 mem := v.Args[2] 12818 ptr := v.Args[0] 12819 v_1 := v.Args[1] 12820 if v_1.Op != OpARM64MOVWreg { 12821 break 12822 } 12823 x := v_1.Args[0] 12824 v.reset(OpARM64MOVHstore) 12825 v.AuxInt = off 12826 v.Aux = sym 12827 v.AddArg(ptr) 12828 v.AddArg(x) 12829 v.AddArg(mem) 12830 return true 12831 } 12832 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 12833 // result: (MOVHstore [off] {sym} ptr x mem) 12834 for { 12835 off := v.AuxInt 12836 sym := v.Aux 12837 mem := v.Args[2] 12838 ptr := v.Args[0] 12839 v_1 := v.Args[1] 12840 if v_1.Op != OpARM64MOVWUreg { 12841 break 12842 } 12843 x := v_1.Args[0] 12844 v.reset(OpARM64MOVHstore) 12845 v.AuxInt = off 12846 v.Aux = sym 12847 v.AddArg(ptr) 12848 v.AddArg(x) 12849 v.AddArg(mem) 12850 return true 12851 } 12852 // match: (MOVHstore [i] {s} ptr0 (SRLconst [16] w) x:(MOVHstore [i-2] {s} ptr1 w mem)) 12853 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 12854 // result: (MOVWstore [i-2] {s} ptr0 w mem) 12855 for { 12856 i := v.AuxInt 12857 s := v.Aux 12858 _ = v.Args[2] 12859 ptr0 := v.Args[0] 12860 v_1 := v.Args[1] 12861 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 16 { 12862 break 12863 } 12864 w := v_1.Args[0] 12865 x := v.Args[2] 12866 if x.Op != OpARM64MOVHstore || x.AuxInt != i-2 || x.Aux != s { 12867 break 12868 } 12869 mem := x.Args[2] 12870 ptr1 := x.Args[0] 12871 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 12872 break 12873 } 12874 v.reset(OpARM64MOVWstore) 12875 v.AuxInt = i - 2 12876 v.Aux = s 12877 v.AddArg(ptr0) 12878 v.AddArg(w) 12879 v.AddArg(mem) 12880 return true 12881 } 12882 return false 12883 } 12884 func rewriteValueARM64_OpARM64MOVHstore_10(v *Value) bool { 12885 b := v.Block 12886 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx ptr1 idx1 w mem)) 12887 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 12888 // result: (MOVWstoreidx ptr1 idx1 w mem) 12889 for { 12890 if v.AuxInt != 2 { 12891 break 12892 } 12893 s := v.Aux 12894 _ = v.Args[2] 12895 v_0 := v.Args[0] 12896 if v_0.Op != OpARM64ADD { 12897 break 12898 } 12899 idx0 := v_0.Args[1] 12900 ptr0 := v_0.Args[0] 12901 v_1 := v.Args[1] 12902 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 16 { 12903 break 12904 } 12905 w := v_1.Args[0] 12906 x := v.Args[2] 12907 if x.Op != OpARM64MOVHstoreidx { 12908 break 12909 } 12910 mem := x.Args[3] 12911 ptr1 := x.Args[0] 12912 idx1 := x.Args[1] 12913 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 12914 break 12915 } 12916 v.reset(OpARM64MOVWstoreidx) 12917 v.AddArg(ptr1) 12918 v.AddArg(idx1) 12919 v.AddArg(w) 12920 v.AddArg(mem) 12921 return true 12922 } 12923 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 12924 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 12925 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 12926 for { 12927 if v.AuxInt != 2 { 12928 break 12929 } 12930 s := v.Aux 12931 _ = v.Args[2] 12932 v_0 := v.Args[0] 12933 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 12934 break 12935 } 12936 idx0 := v_0.Args[1] 12937 ptr0 := v_0.Args[0] 12938 v_1 := v.Args[1] 12939 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 16 { 12940 break 12941 } 12942 w := v_1.Args[0] 12943 x := v.Args[2] 12944 if x.Op != OpARM64MOVHstoreidx2 { 12945 break 12946 } 12947 mem := x.Args[3] 12948 ptr1 := x.Args[0] 12949 idx1 := x.Args[1] 12950 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 12951 break 12952 } 12953 v.reset(OpARM64MOVWstoreidx) 12954 v.AddArg(ptr1) 12955 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 12956 v0.AuxInt = 1 12957 v0.AddArg(idx1) 12958 v.AddArg(v0) 12959 v.AddArg(w) 12960 v.AddArg(mem) 12961 return true 12962 } 12963 // match: (MOVHstore [i] {s} ptr0 (UBFX [armBFAuxInt(16, 16)] w) x:(MOVHstore [i-2] {s} ptr1 w mem)) 12964 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 12965 // result: (MOVWstore [i-2] {s} ptr0 w mem) 12966 for { 12967 i := v.AuxInt 12968 s := v.Aux 12969 _ = v.Args[2] 12970 ptr0 := v.Args[0] 12971 v_1 := v.Args[1] 12972 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(16, 16) { 12973 break 12974 } 12975 w := v_1.Args[0] 12976 x := v.Args[2] 12977 if x.Op != OpARM64MOVHstore || x.AuxInt != i-2 || x.Aux != s { 12978 break 12979 } 12980 mem := x.Args[2] 12981 ptr1 := x.Args[0] 12982 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 12983 break 12984 } 12985 v.reset(OpARM64MOVWstore) 12986 v.AuxInt = i - 2 12987 v.Aux = s 12988 v.AddArg(ptr0) 12989 v.AddArg(w) 12990 v.AddArg(mem) 12991 return true 12992 } 12993 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (UBFX [armBFAuxInt(16, 16)] w) x:(MOVHstoreidx ptr1 idx1 w mem)) 12994 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 12995 // result: (MOVWstoreidx ptr1 idx1 w mem) 12996 for { 12997 if v.AuxInt != 2 { 12998 break 12999 } 13000 s := v.Aux 13001 _ = v.Args[2] 13002 v_0 := v.Args[0] 13003 if v_0.Op != OpARM64ADD { 13004 break 13005 } 13006 idx0 := v_0.Args[1] 13007 ptr0 := v_0.Args[0] 13008 v_1 := v.Args[1] 13009 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(16, 16) { 13010 break 13011 } 13012 w := v_1.Args[0] 13013 x := v.Args[2] 13014 if x.Op != OpARM64MOVHstoreidx { 13015 break 13016 } 13017 mem := x.Args[3] 13018 ptr1 := x.Args[0] 13019 idx1 := x.Args[1] 13020 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 13021 break 13022 } 13023 v.reset(OpARM64MOVWstoreidx) 13024 v.AddArg(ptr1) 13025 v.AddArg(idx1) 13026 v.AddArg(w) 13027 v.AddArg(mem) 13028 return true 13029 } 13030 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (UBFX [armBFAuxInt(16, 16)] w) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 13031 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 13032 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 13033 for { 13034 if v.AuxInt != 2 { 13035 break 13036 } 13037 s := v.Aux 13038 _ = v.Args[2] 13039 v_0 := v.Args[0] 13040 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 13041 break 13042 } 13043 idx0 := v_0.Args[1] 13044 ptr0 := v_0.Args[0] 13045 v_1 := v.Args[1] 13046 if v_1.Op != OpARM64UBFX || v_1.AuxInt != armBFAuxInt(16, 16) { 13047 break 13048 } 13049 w := v_1.Args[0] 13050 x := v.Args[2] 13051 if x.Op != OpARM64MOVHstoreidx2 { 13052 break 13053 } 13054 mem := x.Args[3] 13055 ptr1 := x.Args[0] 13056 idx1 := x.Args[1] 13057 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 13058 break 13059 } 13060 v.reset(OpARM64MOVWstoreidx) 13061 v.AddArg(ptr1) 13062 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 13063 v0.AuxInt = 1 13064 v0.AddArg(idx1) 13065 v.AddArg(v0) 13066 v.AddArg(w) 13067 v.AddArg(mem) 13068 return true 13069 } 13070 // match: (MOVHstore [i] {s} ptr0 (SRLconst [16] (MOVDreg w)) x:(MOVHstore [i-2] {s} ptr1 w mem)) 13071 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 13072 // result: (MOVWstore [i-2] {s} ptr0 w mem) 13073 for { 13074 i := v.AuxInt 13075 s := v.Aux 13076 _ = v.Args[2] 13077 ptr0 := v.Args[0] 13078 v_1 := v.Args[1] 13079 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 16 { 13080 break 13081 } 13082 v_1_0 := v_1.Args[0] 13083 if v_1_0.Op != OpARM64MOVDreg { 13084 break 13085 } 13086 w := v_1_0.Args[0] 13087 x := v.Args[2] 13088 if x.Op != OpARM64MOVHstore || x.AuxInt != i-2 || x.Aux != s { 13089 break 13090 } 13091 mem := x.Args[2] 13092 ptr1 := x.Args[0] 13093 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 13094 break 13095 } 13096 v.reset(OpARM64MOVWstore) 13097 v.AuxInt = i - 2 13098 v.Aux = s 13099 v.AddArg(ptr0) 13100 v.AddArg(w) 13101 v.AddArg(mem) 13102 return true 13103 } 13104 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx ptr1 idx1 w mem)) 13105 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 13106 // result: (MOVWstoreidx ptr1 idx1 w mem) 13107 for { 13108 if v.AuxInt != 2 { 13109 break 13110 } 13111 s := v.Aux 13112 _ = v.Args[2] 13113 v_0 := v.Args[0] 13114 if v_0.Op != OpARM64ADD { 13115 break 13116 } 13117 idx0 := v_0.Args[1] 13118 ptr0 := v_0.Args[0] 13119 v_1 := v.Args[1] 13120 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 16 { 13121 break 13122 } 13123 v_1_0 := v_1.Args[0] 13124 if v_1_0.Op != OpARM64MOVDreg { 13125 break 13126 } 13127 w := v_1_0.Args[0] 13128 x := v.Args[2] 13129 if x.Op != OpARM64MOVHstoreidx { 13130 break 13131 } 13132 mem := x.Args[3] 13133 ptr1 := x.Args[0] 13134 idx1 := x.Args[1] 13135 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 13136 break 13137 } 13138 v.reset(OpARM64MOVWstoreidx) 13139 v.AddArg(ptr1) 13140 v.AddArg(idx1) 13141 v.AddArg(w) 13142 v.AddArg(mem) 13143 return true 13144 } 13145 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 13146 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 13147 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 13148 for { 13149 if v.AuxInt != 2 { 13150 break 13151 } 13152 s := v.Aux 13153 _ = v.Args[2] 13154 v_0 := v.Args[0] 13155 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 13156 break 13157 } 13158 idx0 := v_0.Args[1] 13159 ptr0 := v_0.Args[0] 13160 v_1 := v.Args[1] 13161 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 16 { 13162 break 13163 } 13164 v_1_0 := v_1.Args[0] 13165 if v_1_0.Op != OpARM64MOVDreg { 13166 break 13167 } 13168 w := v_1_0.Args[0] 13169 x := v.Args[2] 13170 if x.Op != OpARM64MOVHstoreidx2 { 13171 break 13172 } 13173 mem := x.Args[3] 13174 ptr1 := x.Args[0] 13175 idx1 := x.Args[1] 13176 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 13177 break 13178 } 13179 v.reset(OpARM64MOVWstoreidx) 13180 v.AddArg(ptr1) 13181 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 13182 v0.AuxInt = 1 13183 v0.AddArg(idx1) 13184 v.AddArg(v0) 13185 v.AddArg(w) 13186 v.AddArg(mem) 13187 return true 13188 } 13189 // match: (MOVHstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVHstore [i-2] {s} ptr1 w0:(SRLconst [j-16] w) mem)) 13190 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 13191 // result: (MOVWstore [i-2] {s} ptr0 w0 mem) 13192 for { 13193 i := v.AuxInt 13194 s := v.Aux 13195 _ = v.Args[2] 13196 ptr0 := v.Args[0] 13197 v_1 := v.Args[1] 13198 if v_1.Op != OpARM64SRLconst { 13199 break 13200 } 13201 j := v_1.AuxInt 13202 w := v_1.Args[0] 13203 x := v.Args[2] 13204 if x.Op != OpARM64MOVHstore || x.AuxInt != i-2 || x.Aux != s { 13205 break 13206 } 13207 mem := x.Args[2] 13208 ptr1 := x.Args[0] 13209 w0 := x.Args[1] 13210 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-16 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 13211 break 13212 } 13213 v.reset(OpARM64MOVWstore) 13214 v.AuxInt = i - 2 13215 v.Aux = s 13216 v.AddArg(ptr0) 13217 v.AddArg(w0) 13218 v.AddArg(mem) 13219 return true 13220 } 13221 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx ptr1 idx1 w0:(SRLconst [j-16] w) mem)) 13222 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 13223 // result: (MOVWstoreidx ptr1 idx1 w0 mem) 13224 for { 13225 if v.AuxInt != 2 { 13226 break 13227 } 13228 s := v.Aux 13229 _ = v.Args[2] 13230 v_0 := v.Args[0] 13231 if v_0.Op != OpARM64ADD { 13232 break 13233 } 13234 idx0 := v_0.Args[1] 13235 ptr0 := v_0.Args[0] 13236 v_1 := v.Args[1] 13237 if v_1.Op != OpARM64SRLconst { 13238 break 13239 } 13240 j := v_1.AuxInt 13241 w := v_1.Args[0] 13242 x := v.Args[2] 13243 if x.Op != OpARM64MOVHstoreidx { 13244 break 13245 } 13246 mem := x.Args[3] 13247 ptr1 := x.Args[0] 13248 idx1 := x.Args[1] 13249 w0 := x.Args[2] 13250 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-16 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 13251 break 13252 } 13253 v.reset(OpARM64MOVWstoreidx) 13254 v.AddArg(ptr1) 13255 v.AddArg(idx1) 13256 v.AddArg(w0) 13257 v.AddArg(mem) 13258 return true 13259 } 13260 return false 13261 } 13262 func rewriteValueARM64_OpARM64MOVHstore_20(v *Value) bool { 13263 b := v.Block 13264 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx2 ptr1 idx1 w0:(SRLconst [j-16] w) mem)) 13265 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 13266 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w0 mem) 13267 for { 13268 if v.AuxInt != 2 { 13269 break 13270 } 13271 s := v.Aux 13272 _ = v.Args[2] 13273 v_0 := v.Args[0] 13274 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 13275 break 13276 } 13277 idx0 := v_0.Args[1] 13278 ptr0 := v_0.Args[0] 13279 v_1 := v.Args[1] 13280 if v_1.Op != OpARM64SRLconst { 13281 break 13282 } 13283 j := v_1.AuxInt 13284 w := v_1.Args[0] 13285 x := v.Args[2] 13286 if x.Op != OpARM64MOVHstoreidx2 { 13287 break 13288 } 13289 mem := x.Args[3] 13290 ptr1 := x.Args[0] 13291 idx1 := x.Args[1] 13292 w0 := x.Args[2] 13293 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-16 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 13294 break 13295 } 13296 v.reset(OpARM64MOVWstoreidx) 13297 v.AddArg(ptr1) 13298 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 13299 v0.AuxInt = 1 13300 v0.AddArg(idx1) 13301 v.AddArg(v0) 13302 v.AddArg(w0) 13303 v.AddArg(mem) 13304 return true 13305 } 13306 return false 13307 } 13308 func rewriteValueARM64_OpARM64MOVHstoreidx_0(v *Value) bool { 13309 // match: (MOVHstoreidx ptr (MOVDconst [c]) val mem) 13310 // result: (MOVHstore [c] ptr val mem) 13311 for { 13312 mem := v.Args[3] 13313 ptr := v.Args[0] 13314 v_1 := v.Args[1] 13315 if v_1.Op != OpARM64MOVDconst { 13316 break 13317 } 13318 c := v_1.AuxInt 13319 val := v.Args[2] 13320 v.reset(OpARM64MOVHstore) 13321 v.AuxInt = c 13322 v.AddArg(ptr) 13323 v.AddArg(val) 13324 v.AddArg(mem) 13325 return true 13326 } 13327 // match: (MOVHstoreidx (MOVDconst [c]) idx val mem) 13328 // result: (MOVHstore [c] idx val mem) 13329 for { 13330 mem := v.Args[3] 13331 v_0 := v.Args[0] 13332 if v_0.Op != OpARM64MOVDconst { 13333 break 13334 } 13335 c := v_0.AuxInt 13336 idx := v.Args[1] 13337 val := v.Args[2] 13338 v.reset(OpARM64MOVHstore) 13339 v.AuxInt = c 13340 v.AddArg(idx) 13341 v.AddArg(val) 13342 v.AddArg(mem) 13343 return true 13344 } 13345 // match: (MOVHstoreidx ptr (SLLconst [1] idx) val mem) 13346 // result: (MOVHstoreidx2 ptr idx val mem) 13347 for { 13348 mem := v.Args[3] 13349 ptr := v.Args[0] 13350 v_1 := v.Args[1] 13351 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 1 { 13352 break 13353 } 13354 idx := v_1.Args[0] 13355 val := v.Args[2] 13356 v.reset(OpARM64MOVHstoreidx2) 13357 v.AddArg(ptr) 13358 v.AddArg(idx) 13359 v.AddArg(val) 13360 v.AddArg(mem) 13361 return true 13362 } 13363 // match: (MOVHstoreidx ptr (ADD idx idx) val mem) 13364 // result: (MOVHstoreidx2 ptr idx val mem) 13365 for { 13366 mem := v.Args[3] 13367 ptr := v.Args[0] 13368 v_1 := v.Args[1] 13369 if v_1.Op != OpARM64ADD { 13370 break 13371 } 13372 idx := v_1.Args[1] 13373 if idx != v_1.Args[0] { 13374 break 13375 } 13376 val := v.Args[2] 13377 v.reset(OpARM64MOVHstoreidx2) 13378 v.AddArg(ptr) 13379 v.AddArg(idx) 13380 v.AddArg(val) 13381 v.AddArg(mem) 13382 return true 13383 } 13384 // match: (MOVHstoreidx (SLLconst [1] idx) ptr val mem) 13385 // result: (MOVHstoreidx2 ptr idx val mem) 13386 for { 13387 mem := v.Args[3] 13388 v_0 := v.Args[0] 13389 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 1 { 13390 break 13391 } 13392 idx := v_0.Args[0] 13393 ptr := v.Args[1] 13394 val := v.Args[2] 13395 v.reset(OpARM64MOVHstoreidx2) 13396 v.AddArg(ptr) 13397 v.AddArg(idx) 13398 v.AddArg(val) 13399 v.AddArg(mem) 13400 return true 13401 } 13402 // match: (MOVHstoreidx (ADD idx idx) ptr val mem) 13403 // result: (MOVHstoreidx2 ptr idx val mem) 13404 for { 13405 mem := v.Args[3] 13406 v_0 := v.Args[0] 13407 if v_0.Op != OpARM64ADD { 13408 break 13409 } 13410 idx := v_0.Args[1] 13411 if idx != v_0.Args[0] { 13412 break 13413 } 13414 ptr := v.Args[1] 13415 val := v.Args[2] 13416 v.reset(OpARM64MOVHstoreidx2) 13417 v.AddArg(ptr) 13418 v.AddArg(idx) 13419 v.AddArg(val) 13420 v.AddArg(mem) 13421 return true 13422 } 13423 // match: (MOVHstoreidx ptr idx (MOVDconst [0]) mem) 13424 // result: (MOVHstorezeroidx ptr idx mem) 13425 for { 13426 mem := v.Args[3] 13427 ptr := v.Args[0] 13428 idx := v.Args[1] 13429 v_2 := v.Args[2] 13430 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 13431 break 13432 } 13433 v.reset(OpARM64MOVHstorezeroidx) 13434 v.AddArg(ptr) 13435 v.AddArg(idx) 13436 v.AddArg(mem) 13437 return true 13438 } 13439 // match: (MOVHstoreidx ptr idx (MOVHreg x) mem) 13440 // result: (MOVHstoreidx ptr idx x mem) 13441 for { 13442 mem := v.Args[3] 13443 ptr := v.Args[0] 13444 idx := v.Args[1] 13445 v_2 := v.Args[2] 13446 if v_2.Op != OpARM64MOVHreg { 13447 break 13448 } 13449 x := v_2.Args[0] 13450 v.reset(OpARM64MOVHstoreidx) 13451 v.AddArg(ptr) 13452 v.AddArg(idx) 13453 v.AddArg(x) 13454 v.AddArg(mem) 13455 return true 13456 } 13457 // match: (MOVHstoreidx ptr idx (MOVHUreg x) mem) 13458 // result: (MOVHstoreidx ptr idx x mem) 13459 for { 13460 mem := v.Args[3] 13461 ptr := v.Args[0] 13462 idx := v.Args[1] 13463 v_2 := v.Args[2] 13464 if v_2.Op != OpARM64MOVHUreg { 13465 break 13466 } 13467 x := v_2.Args[0] 13468 v.reset(OpARM64MOVHstoreidx) 13469 v.AddArg(ptr) 13470 v.AddArg(idx) 13471 v.AddArg(x) 13472 v.AddArg(mem) 13473 return true 13474 } 13475 // match: (MOVHstoreidx ptr idx (MOVWreg x) mem) 13476 // result: (MOVHstoreidx ptr idx x mem) 13477 for { 13478 mem := v.Args[3] 13479 ptr := v.Args[0] 13480 idx := v.Args[1] 13481 v_2 := v.Args[2] 13482 if v_2.Op != OpARM64MOVWreg { 13483 break 13484 } 13485 x := v_2.Args[0] 13486 v.reset(OpARM64MOVHstoreidx) 13487 v.AddArg(ptr) 13488 v.AddArg(idx) 13489 v.AddArg(x) 13490 v.AddArg(mem) 13491 return true 13492 } 13493 return false 13494 } 13495 func rewriteValueARM64_OpARM64MOVHstoreidx_10(v *Value) bool { 13496 // match: (MOVHstoreidx ptr idx (MOVWUreg x) mem) 13497 // result: (MOVHstoreidx ptr idx x mem) 13498 for { 13499 mem := v.Args[3] 13500 ptr := v.Args[0] 13501 idx := v.Args[1] 13502 v_2 := v.Args[2] 13503 if v_2.Op != OpARM64MOVWUreg { 13504 break 13505 } 13506 x := v_2.Args[0] 13507 v.reset(OpARM64MOVHstoreidx) 13508 v.AddArg(ptr) 13509 v.AddArg(idx) 13510 v.AddArg(x) 13511 v.AddArg(mem) 13512 return true 13513 } 13514 // match: (MOVHstoreidx ptr (ADDconst [2] idx) (SRLconst [16] w) x:(MOVHstoreidx ptr idx w mem)) 13515 // cond: x.Uses == 1 && clobber(x) 13516 // result: (MOVWstoreidx ptr idx w mem) 13517 for { 13518 _ = v.Args[3] 13519 ptr := v.Args[0] 13520 v_1 := v.Args[1] 13521 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 2 { 13522 break 13523 } 13524 idx := v_1.Args[0] 13525 v_2 := v.Args[2] 13526 if v_2.Op != OpARM64SRLconst || v_2.AuxInt != 16 { 13527 break 13528 } 13529 w := v_2.Args[0] 13530 x := v.Args[3] 13531 if x.Op != OpARM64MOVHstoreidx { 13532 break 13533 } 13534 mem := x.Args[3] 13535 if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) { 13536 break 13537 } 13538 v.reset(OpARM64MOVWstoreidx) 13539 v.AddArg(ptr) 13540 v.AddArg(idx) 13541 v.AddArg(w) 13542 v.AddArg(mem) 13543 return true 13544 } 13545 return false 13546 } 13547 func rewriteValueARM64_OpARM64MOVHstoreidx2_0(v *Value) bool { 13548 // match: (MOVHstoreidx2 ptr (MOVDconst [c]) val mem) 13549 // result: (MOVHstore [c<<1] ptr val mem) 13550 for { 13551 mem := v.Args[3] 13552 ptr := v.Args[0] 13553 v_1 := v.Args[1] 13554 if v_1.Op != OpARM64MOVDconst { 13555 break 13556 } 13557 c := v_1.AuxInt 13558 val := v.Args[2] 13559 v.reset(OpARM64MOVHstore) 13560 v.AuxInt = c << 1 13561 v.AddArg(ptr) 13562 v.AddArg(val) 13563 v.AddArg(mem) 13564 return true 13565 } 13566 // match: (MOVHstoreidx2 ptr idx (MOVDconst [0]) mem) 13567 // result: (MOVHstorezeroidx2 ptr idx mem) 13568 for { 13569 mem := v.Args[3] 13570 ptr := v.Args[0] 13571 idx := v.Args[1] 13572 v_2 := v.Args[2] 13573 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 13574 break 13575 } 13576 v.reset(OpARM64MOVHstorezeroidx2) 13577 v.AddArg(ptr) 13578 v.AddArg(idx) 13579 v.AddArg(mem) 13580 return true 13581 } 13582 // match: (MOVHstoreidx2 ptr idx (MOVHreg x) mem) 13583 // result: (MOVHstoreidx2 ptr idx x mem) 13584 for { 13585 mem := v.Args[3] 13586 ptr := v.Args[0] 13587 idx := v.Args[1] 13588 v_2 := v.Args[2] 13589 if v_2.Op != OpARM64MOVHreg { 13590 break 13591 } 13592 x := v_2.Args[0] 13593 v.reset(OpARM64MOVHstoreidx2) 13594 v.AddArg(ptr) 13595 v.AddArg(idx) 13596 v.AddArg(x) 13597 v.AddArg(mem) 13598 return true 13599 } 13600 // match: (MOVHstoreidx2 ptr idx (MOVHUreg x) mem) 13601 // result: (MOVHstoreidx2 ptr idx x mem) 13602 for { 13603 mem := v.Args[3] 13604 ptr := v.Args[0] 13605 idx := v.Args[1] 13606 v_2 := v.Args[2] 13607 if v_2.Op != OpARM64MOVHUreg { 13608 break 13609 } 13610 x := v_2.Args[0] 13611 v.reset(OpARM64MOVHstoreidx2) 13612 v.AddArg(ptr) 13613 v.AddArg(idx) 13614 v.AddArg(x) 13615 v.AddArg(mem) 13616 return true 13617 } 13618 // match: (MOVHstoreidx2 ptr idx (MOVWreg x) mem) 13619 // result: (MOVHstoreidx2 ptr idx x mem) 13620 for { 13621 mem := v.Args[3] 13622 ptr := v.Args[0] 13623 idx := v.Args[1] 13624 v_2 := v.Args[2] 13625 if v_2.Op != OpARM64MOVWreg { 13626 break 13627 } 13628 x := v_2.Args[0] 13629 v.reset(OpARM64MOVHstoreidx2) 13630 v.AddArg(ptr) 13631 v.AddArg(idx) 13632 v.AddArg(x) 13633 v.AddArg(mem) 13634 return true 13635 } 13636 // match: (MOVHstoreidx2 ptr idx (MOVWUreg x) mem) 13637 // result: (MOVHstoreidx2 ptr idx x mem) 13638 for { 13639 mem := v.Args[3] 13640 ptr := v.Args[0] 13641 idx := v.Args[1] 13642 v_2 := v.Args[2] 13643 if v_2.Op != OpARM64MOVWUreg { 13644 break 13645 } 13646 x := v_2.Args[0] 13647 v.reset(OpARM64MOVHstoreidx2) 13648 v.AddArg(ptr) 13649 v.AddArg(idx) 13650 v.AddArg(x) 13651 v.AddArg(mem) 13652 return true 13653 } 13654 return false 13655 } 13656 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 13657 b := v.Block 13658 config := b.Func.Config 13659 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 13660 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13661 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 13662 for { 13663 off1 := v.AuxInt 13664 sym := v.Aux 13665 mem := v.Args[1] 13666 v_0 := v.Args[0] 13667 if v_0.Op != OpARM64ADDconst { 13668 break 13669 } 13670 off2 := v_0.AuxInt 13671 ptr := v_0.Args[0] 13672 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13673 break 13674 } 13675 v.reset(OpARM64MOVHstorezero) 13676 v.AuxInt = off1 + off2 13677 v.Aux = sym 13678 v.AddArg(ptr) 13679 v.AddArg(mem) 13680 return true 13681 } 13682 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 13683 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13684 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 13685 for { 13686 off1 := v.AuxInt 13687 sym1 := v.Aux 13688 mem := v.Args[1] 13689 v_0 := v.Args[0] 13690 if v_0.Op != OpARM64MOVDaddr { 13691 break 13692 } 13693 off2 := v_0.AuxInt 13694 sym2 := v_0.Aux 13695 ptr := v_0.Args[0] 13696 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13697 break 13698 } 13699 v.reset(OpARM64MOVHstorezero) 13700 v.AuxInt = off1 + off2 13701 v.Aux = mergeSym(sym1, sym2) 13702 v.AddArg(ptr) 13703 v.AddArg(mem) 13704 return true 13705 } 13706 // match: (MOVHstorezero [off] {sym} (ADD ptr idx) mem) 13707 // cond: off == 0 && sym == nil 13708 // result: (MOVHstorezeroidx ptr idx mem) 13709 for { 13710 off := v.AuxInt 13711 sym := v.Aux 13712 mem := v.Args[1] 13713 v_0 := v.Args[0] 13714 if v_0.Op != OpARM64ADD { 13715 break 13716 } 13717 idx := v_0.Args[1] 13718 ptr := v_0.Args[0] 13719 if !(off == 0 && sym == nil) { 13720 break 13721 } 13722 v.reset(OpARM64MOVHstorezeroidx) 13723 v.AddArg(ptr) 13724 v.AddArg(idx) 13725 v.AddArg(mem) 13726 return true 13727 } 13728 // match: (MOVHstorezero [off] {sym} (ADDshiftLL [1] ptr idx) mem) 13729 // cond: off == 0 && sym == nil 13730 // result: (MOVHstorezeroidx2 ptr idx mem) 13731 for { 13732 off := v.AuxInt 13733 sym := v.Aux 13734 mem := v.Args[1] 13735 v_0 := v.Args[0] 13736 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 13737 break 13738 } 13739 idx := v_0.Args[1] 13740 ptr := v_0.Args[0] 13741 if !(off == 0 && sym == nil) { 13742 break 13743 } 13744 v.reset(OpARM64MOVHstorezeroidx2) 13745 v.AddArg(ptr) 13746 v.AddArg(idx) 13747 v.AddArg(mem) 13748 return true 13749 } 13750 // match: (MOVHstorezero [i] {s} ptr0 x:(MOVHstorezero [j] {s} ptr1 mem)) 13751 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,2) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 13752 // result: (MOVWstorezero [min(i,j)] {s} ptr0 mem) 13753 for { 13754 i := v.AuxInt 13755 s := v.Aux 13756 _ = v.Args[1] 13757 ptr0 := v.Args[0] 13758 x := v.Args[1] 13759 if x.Op != OpARM64MOVHstorezero { 13760 break 13761 } 13762 j := x.AuxInt 13763 if x.Aux != s { 13764 break 13765 } 13766 mem := x.Args[1] 13767 ptr1 := x.Args[0] 13768 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 2) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 13769 break 13770 } 13771 v.reset(OpARM64MOVWstorezero) 13772 v.AuxInt = min(i, j) 13773 v.Aux = s 13774 v.AddArg(ptr0) 13775 v.AddArg(mem) 13776 return true 13777 } 13778 // match: (MOVHstorezero [2] {s} (ADD ptr0 idx0) x:(MOVHstorezeroidx ptr1 idx1 mem)) 13779 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 13780 // result: (MOVWstorezeroidx ptr1 idx1 mem) 13781 for { 13782 if v.AuxInt != 2 { 13783 break 13784 } 13785 s := v.Aux 13786 _ = v.Args[1] 13787 v_0 := v.Args[0] 13788 if v_0.Op != OpARM64ADD { 13789 break 13790 } 13791 idx0 := v_0.Args[1] 13792 ptr0 := v_0.Args[0] 13793 x := v.Args[1] 13794 if x.Op != OpARM64MOVHstorezeroidx { 13795 break 13796 } 13797 mem := x.Args[2] 13798 ptr1 := x.Args[0] 13799 idx1 := x.Args[1] 13800 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 13801 break 13802 } 13803 v.reset(OpARM64MOVWstorezeroidx) 13804 v.AddArg(ptr1) 13805 v.AddArg(idx1) 13806 v.AddArg(mem) 13807 return true 13808 } 13809 // match: (MOVHstorezero [2] {s} (ADDshiftLL [1] ptr0 idx0) x:(MOVHstorezeroidx2 ptr1 idx1 mem)) 13810 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 13811 // result: (MOVWstorezeroidx ptr1 (SLLconst <idx1.Type> [1] idx1) mem) 13812 for { 13813 if v.AuxInt != 2 { 13814 break 13815 } 13816 s := v.Aux 13817 _ = v.Args[1] 13818 v_0 := v.Args[0] 13819 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 1 { 13820 break 13821 } 13822 idx0 := v_0.Args[1] 13823 ptr0 := v_0.Args[0] 13824 x := v.Args[1] 13825 if x.Op != OpARM64MOVHstorezeroidx2 { 13826 break 13827 } 13828 mem := x.Args[2] 13829 ptr1 := x.Args[0] 13830 idx1 := x.Args[1] 13831 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 13832 break 13833 } 13834 v.reset(OpARM64MOVWstorezeroidx) 13835 v.AddArg(ptr1) 13836 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 13837 v0.AuxInt = 1 13838 v0.AddArg(idx1) 13839 v.AddArg(v0) 13840 v.AddArg(mem) 13841 return true 13842 } 13843 return false 13844 } 13845 func rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v *Value) bool { 13846 // match: (MOVHstorezeroidx ptr (MOVDconst [c]) mem) 13847 // result: (MOVHstorezero [c] ptr mem) 13848 for { 13849 mem := v.Args[2] 13850 ptr := v.Args[0] 13851 v_1 := v.Args[1] 13852 if v_1.Op != OpARM64MOVDconst { 13853 break 13854 } 13855 c := v_1.AuxInt 13856 v.reset(OpARM64MOVHstorezero) 13857 v.AuxInt = c 13858 v.AddArg(ptr) 13859 v.AddArg(mem) 13860 return true 13861 } 13862 // match: (MOVHstorezeroidx (MOVDconst [c]) idx mem) 13863 // result: (MOVHstorezero [c] idx mem) 13864 for { 13865 mem := v.Args[2] 13866 v_0 := v.Args[0] 13867 if v_0.Op != OpARM64MOVDconst { 13868 break 13869 } 13870 c := v_0.AuxInt 13871 idx := v.Args[1] 13872 v.reset(OpARM64MOVHstorezero) 13873 v.AuxInt = c 13874 v.AddArg(idx) 13875 v.AddArg(mem) 13876 return true 13877 } 13878 // match: (MOVHstorezeroidx ptr (SLLconst [1] idx) mem) 13879 // result: (MOVHstorezeroidx2 ptr idx mem) 13880 for { 13881 mem := v.Args[2] 13882 ptr := v.Args[0] 13883 v_1 := v.Args[1] 13884 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 1 { 13885 break 13886 } 13887 idx := v_1.Args[0] 13888 v.reset(OpARM64MOVHstorezeroidx2) 13889 v.AddArg(ptr) 13890 v.AddArg(idx) 13891 v.AddArg(mem) 13892 return true 13893 } 13894 // match: (MOVHstorezeroidx ptr (ADD idx idx) mem) 13895 // result: (MOVHstorezeroidx2 ptr idx mem) 13896 for { 13897 mem := v.Args[2] 13898 ptr := v.Args[0] 13899 v_1 := v.Args[1] 13900 if v_1.Op != OpARM64ADD { 13901 break 13902 } 13903 idx := v_1.Args[1] 13904 if idx != v_1.Args[0] { 13905 break 13906 } 13907 v.reset(OpARM64MOVHstorezeroidx2) 13908 v.AddArg(ptr) 13909 v.AddArg(idx) 13910 v.AddArg(mem) 13911 return true 13912 } 13913 // match: (MOVHstorezeroidx (SLLconst [1] idx) ptr mem) 13914 // result: (MOVHstorezeroidx2 ptr idx mem) 13915 for { 13916 mem := v.Args[2] 13917 v_0 := v.Args[0] 13918 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 1 { 13919 break 13920 } 13921 idx := v_0.Args[0] 13922 ptr := v.Args[1] 13923 v.reset(OpARM64MOVHstorezeroidx2) 13924 v.AddArg(ptr) 13925 v.AddArg(idx) 13926 v.AddArg(mem) 13927 return true 13928 } 13929 // match: (MOVHstorezeroidx (ADD idx idx) ptr mem) 13930 // result: (MOVHstorezeroidx2 ptr idx mem) 13931 for { 13932 mem := v.Args[2] 13933 v_0 := v.Args[0] 13934 if v_0.Op != OpARM64ADD { 13935 break 13936 } 13937 idx := v_0.Args[1] 13938 if idx != v_0.Args[0] { 13939 break 13940 } 13941 ptr := v.Args[1] 13942 v.reset(OpARM64MOVHstorezeroidx2) 13943 v.AddArg(ptr) 13944 v.AddArg(idx) 13945 v.AddArg(mem) 13946 return true 13947 } 13948 // match: (MOVHstorezeroidx ptr (ADDconst [2] idx) x:(MOVHstorezeroidx ptr idx mem)) 13949 // cond: x.Uses == 1 && clobber(x) 13950 // result: (MOVWstorezeroidx ptr idx mem) 13951 for { 13952 _ = v.Args[2] 13953 ptr := v.Args[0] 13954 v_1 := v.Args[1] 13955 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 2 { 13956 break 13957 } 13958 idx := v_1.Args[0] 13959 x := v.Args[2] 13960 if x.Op != OpARM64MOVHstorezeroidx { 13961 break 13962 } 13963 mem := x.Args[2] 13964 if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) { 13965 break 13966 } 13967 v.reset(OpARM64MOVWstorezeroidx) 13968 v.AddArg(ptr) 13969 v.AddArg(idx) 13970 v.AddArg(mem) 13971 return true 13972 } 13973 return false 13974 } 13975 func rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v *Value) bool { 13976 // match: (MOVHstorezeroidx2 ptr (MOVDconst [c]) mem) 13977 // result: (MOVHstorezero [c<<1] ptr mem) 13978 for { 13979 mem := v.Args[2] 13980 ptr := v.Args[0] 13981 v_1 := v.Args[1] 13982 if v_1.Op != OpARM64MOVDconst { 13983 break 13984 } 13985 c := v_1.AuxInt 13986 v.reset(OpARM64MOVHstorezero) 13987 v.AuxInt = c << 1 13988 v.AddArg(ptr) 13989 v.AddArg(mem) 13990 return true 13991 } 13992 return false 13993 } 13994 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool { 13995 b := v.Block 13996 config := b.Func.Config 13997 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 13998 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13999 // result: (MOVQstorezero [off1+off2] {sym} ptr mem) 14000 for { 14001 off1 := v.AuxInt 14002 sym := v.Aux 14003 mem := v.Args[1] 14004 v_0 := v.Args[0] 14005 if v_0.Op != OpARM64ADDconst { 14006 break 14007 } 14008 off2 := v_0.AuxInt 14009 ptr := v_0.Args[0] 14010 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14011 break 14012 } 14013 v.reset(OpARM64MOVQstorezero) 14014 v.AuxInt = off1 + off2 14015 v.Aux = sym 14016 v.AddArg(ptr) 14017 v.AddArg(mem) 14018 return true 14019 } 14020 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 14021 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14022 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 14023 for { 14024 off1 := v.AuxInt 14025 sym1 := v.Aux 14026 mem := v.Args[1] 14027 v_0 := v.Args[0] 14028 if v_0.Op != OpARM64MOVDaddr { 14029 break 14030 } 14031 off2 := v_0.AuxInt 14032 sym2 := v_0.Aux 14033 ptr := v_0.Args[0] 14034 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14035 break 14036 } 14037 v.reset(OpARM64MOVQstorezero) 14038 v.AuxInt = off1 + off2 14039 v.Aux = mergeSym(sym1, sym2) 14040 v.AddArg(ptr) 14041 v.AddArg(mem) 14042 return true 14043 } 14044 return false 14045 } 14046 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 14047 b := v.Block 14048 config := b.Func.Config 14049 // match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _)) 14050 // result: (FMOVSfpgp val) 14051 for { 14052 off := v.AuxInt 14053 sym := v.Aux 14054 _ = v.Args[1] 14055 ptr := v.Args[0] 14056 v_1 := v.Args[1] 14057 if v_1.Op != OpARM64FMOVSstore || v_1.AuxInt != off || v_1.Aux != sym { 14058 break 14059 } 14060 _ = v_1.Args[2] 14061 if ptr != v_1.Args[0] { 14062 break 14063 } 14064 val := v_1.Args[1] 14065 v.reset(OpARM64FMOVSfpgp) 14066 v.AddArg(val) 14067 return true 14068 } 14069 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 14070 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14071 // result: (MOVWUload [off1+off2] {sym} ptr mem) 14072 for { 14073 off1 := v.AuxInt 14074 sym := v.Aux 14075 mem := v.Args[1] 14076 v_0 := v.Args[0] 14077 if v_0.Op != OpARM64ADDconst { 14078 break 14079 } 14080 off2 := v_0.AuxInt 14081 ptr := v_0.Args[0] 14082 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14083 break 14084 } 14085 v.reset(OpARM64MOVWUload) 14086 v.AuxInt = off1 + off2 14087 v.Aux = sym 14088 v.AddArg(ptr) 14089 v.AddArg(mem) 14090 return true 14091 } 14092 // match: (MOVWUload [off] {sym} (ADD ptr idx) mem) 14093 // cond: off == 0 && sym == nil 14094 // result: (MOVWUloadidx ptr idx mem) 14095 for { 14096 off := v.AuxInt 14097 sym := v.Aux 14098 mem := v.Args[1] 14099 v_0 := v.Args[0] 14100 if v_0.Op != OpARM64ADD { 14101 break 14102 } 14103 idx := v_0.Args[1] 14104 ptr := v_0.Args[0] 14105 if !(off == 0 && sym == nil) { 14106 break 14107 } 14108 v.reset(OpARM64MOVWUloadidx) 14109 v.AddArg(ptr) 14110 v.AddArg(idx) 14111 v.AddArg(mem) 14112 return true 14113 } 14114 // match: (MOVWUload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 14115 // cond: off == 0 && sym == nil 14116 // result: (MOVWUloadidx4 ptr idx mem) 14117 for { 14118 off := v.AuxInt 14119 sym := v.Aux 14120 mem := v.Args[1] 14121 v_0 := v.Args[0] 14122 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 14123 break 14124 } 14125 idx := v_0.Args[1] 14126 ptr := v_0.Args[0] 14127 if !(off == 0 && sym == nil) { 14128 break 14129 } 14130 v.reset(OpARM64MOVWUloadidx4) 14131 v.AddArg(ptr) 14132 v.AddArg(idx) 14133 v.AddArg(mem) 14134 return true 14135 } 14136 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 14137 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14138 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 14139 for { 14140 off1 := v.AuxInt 14141 sym1 := v.Aux 14142 mem := v.Args[1] 14143 v_0 := v.Args[0] 14144 if v_0.Op != OpARM64MOVDaddr { 14145 break 14146 } 14147 off2 := v_0.AuxInt 14148 sym2 := v_0.Aux 14149 ptr := v_0.Args[0] 14150 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14151 break 14152 } 14153 v.reset(OpARM64MOVWUload) 14154 v.AuxInt = off1 + off2 14155 v.Aux = mergeSym(sym1, sym2) 14156 v.AddArg(ptr) 14157 v.AddArg(mem) 14158 return true 14159 } 14160 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 14161 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 14162 // result: (MOVDconst [0]) 14163 for { 14164 off := v.AuxInt 14165 sym := v.Aux 14166 _ = v.Args[1] 14167 ptr := v.Args[0] 14168 v_1 := v.Args[1] 14169 if v_1.Op != OpARM64MOVWstorezero { 14170 break 14171 } 14172 off2 := v_1.AuxInt 14173 sym2 := v_1.Aux 14174 _ = v_1.Args[1] 14175 ptr2 := v_1.Args[0] 14176 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 14177 break 14178 } 14179 v.reset(OpARM64MOVDconst) 14180 v.AuxInt = 0 14181 return true 14182 } 14183 // match: (MOVWUload [off] {sym} (SB) _) 14184 // cond: symIsRO(sym) 14185 // result: (MOVDconst [int64(read32(sym, off, config.BigEndian))]) 14186 for { 14187 off := v.AuxInt 14188 sym := v.Aux 14189 _ = v.Args[1] 14190 v_0 := v.Args[0] 14191 if v_0.Op != OpSB || !(symIsRO(sym)) { 14192 break 14193 } 14194 v.reset(OpARM64MOVDconst) 14195 v.AuxInt = int64(read32(sym, off, config.BigEndian)) 14196 return true 14197 } 14198 return false 14199 } 14200 func rewriteValueARM64_OpARM64MOVWUloadidx_0(v *Value) bool { 14201 // match: (MOVWUloadidx ptr (MOVDconst [c]) mem) 14202 // result: (MOVWUload [c] ptr mem) 14203 for { 14204 mem := v.Args[2] 14205 ptr := v.Args[0] 14206 v_1 := v.Args[1] 14207 if v_1.Op != OpARM64MOVDconst { 14208 break 14209 } 14210 c := v_1.AuxInt 14211 v.reset(OpARM64MOVWUload) 14212 v.AuxInt = c 14213 v.AddArg(ptr) 14214 v.AddArg(mem) 14215 return true 14216 } 14217 // match: (MOVWUloadidx (MOVDconst [c]) ptr mem) 14218 // result: (MOVWUload [c] ptr mem) 14219 for { 14220 mem := v.Args[2] 14221 v_0 := v.Args[0] 14222 if v_0.Op != OpARM64MOVDconst { 14223 break 14224 } 14225 c := v_0.AuxInt 14226 ptr := v.Args[1] 14227 v.reset(OpARM64MOVWUload) 14228 v.AuxInt = c 14229 v.AddArg(ptr) 14230 v.AddArg(mem) 14231 return true 14232 } 14233 // match: (MOVWUloadidx ptr (SLLconst [2] idx) mem) 14234 // result: (MOVWUloadidx4 ptr idx mem) 14235 for { 14236 mem := v.Args[2] 14237 ptr := v.Args[0] 14238 v_1 := v.Args[1] 14239 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 2 { 14240 break 14241 } 14242 idx := v_1.Args[0] 14243 v.reset(OpARM64MOVWUloadidx4) 14244 v.AddArg(ptr) 14245 v.AddArg(idx) 14246 v.AddArg(mem) 14247 return true 14248 } 14249 // match: (MOVWUloadidx (SLLconst [2] idx) ptr mem) 14250 // result: (MOVWUloadidx4 ptr idx mem) 14251 for { 14252 mem := v.Args[2] 14253 v_0 := v.Args[0] 14254 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 2 { 14255 break 14256 } 14257 idx := v_0.Args[0] 14258 ptr := v.Args[1] 14259 v.reset(OpARM64MOVWUloadidx4) 14260 v.AddArg(ptr) 14261 v.AddArg(idx) 14262 v.AddArg(mem) 14263 return true 14264 } 14265 // match: (MOVWUloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 14266 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 14267 // result: (MOVDconst [0]) 14268 for { 14269 _ = v.Args[2] 14270 ptr := v.Args[0] 14271 idx := v.Args[1] 14272 v_2 := v.Args[2] 14273 if v_2.Op != OpARM64MOVWstorezeroidx { 14274 break 14275 } 14276 _ = v_2.Args[2] 14277 ptr2 := v_2.Args[0] 14278 idx2 := v_2.Args[1] 14279 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 14280 break 14281 } 14282 v.reset(OpARM64MOVDconst) 14283 v.AuxInt = 0 14284 return true 14285 } 14286 return false 14287 } 14288 func rewriteValueARM64_OpARM64MOVWUloadidx4_0(v *Value) bool { 14289 // match: (MOVWUloadidx4 ptr (MOVDconst [c]) mem) 14290 // result: (MOVWUload [c<<2] ptr mem) 14291 for { 14292 mem := v.Args[2] 14293 ptr := v.Args[0] 14294 v_1 := v.Args[1] 14295 if v_1.Op != OpARM64MOVDconst { 14296 break 14297 } 14298 c := v_1.AuxInt 14299 v.reset(OpARM64MOVWUload) 14300 v.AuxInt = c << 2 14301 v.AddArg(ptr) 14302 v.AddArg(mem) 14303 return true 14304 } 14305 // match: (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 14306 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 14307 // result: (MOVDconst [0]) 14308 for { 14309 _ = v.Args[2] 14310 ptr := v.Args[0] 14311 idx := v.Args[1] 14312 v_2 := v.Args[2] 14313 if v_2.Op != OpARM64MOVWstorezeroidx4 { 14314 break 14315 } 14316 _ = v_2.Args[2] 14317 ptr2 := v_2.Args[0] 14318 idx2 := v_2.Args[1] 14319 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 14320 break 14321 } 14322 v.reset(OpARM64MOVDconst) 14323 v.AuxInt = 0 14324 return true 14325 } 14326 return false 14327 } 14328 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 14329 // match: (MOVWUreg x:(MOVBUload _ _)) 14330 // result: (MOVDreg x) 14331 for { 14332 x := v.Args[0] 14333 if x.Op != OpARM64MOVBUload { 14334 break 14335 } 14336 _ = x.Args[1] 14337 v.reset(OpARM64MOVDreg) 14338 v.AddArg(x) 14339 return true 14340 } 14341 // match: (MOVWUreg x:(MOVHUload _ _)) 14342 // result: (MOVDreg x) 14343 for { 14344 x := v.Args[0] 14345 if x.Op != OpARM64MOVHUload { 14346 break 14347 } 14348 _ = x.Args[1] 14349 v.reset(OpARM64MOVDreg) 14350 v.AddArg(x) 14351 return true 14352 } 14353 // match: (MOVWUreg x:(MOVWUload _ _)) 14354 // result: (MOVDreg x) 14355 for { 14356 x := v.Args[0] 14357 if x.Op != OpARM64MOVWUload { 14358 break 14359 } 14360 _ = x.Args[1] 14361 v.reset(OpARM64MOVDreg) 14362 v.AddArg(x) 14363 return true 14364 } 14365 // match: (MOVWUreg x:(MOVBUloadidx _ _ _)) 14366 // result: (MOVDreg x) 14367 for { 14368 x := v.Args[0] 14369 if x.Op != OpARM64MOVBUloadidx { 14370 break 14371 } 14372 _ = x.Args[2] 14373 v.reset(OpARM64MOVDreg) 14374 v.AddArg(x) 14375 return true 14376 } 14377 // match: (MOVWUreg x:(MOVHUloadidx _ _ _)) 14378 // result: (MOVDreg x) 14379 for { 14380 x := v.Args[0] 14381 if x.Op != OpARM64MOVHUloadidx { 14382 break 14383 } 14384 _ = x.Args[2] 14385 v.reset(OpARM64MOVDreg) 14386 v.AddArg(x) 14387 return true 14388 } 14389 // match: (MOVWUreg x:(MOVWUloadidx _ _ _)) 14390 // result: (MOVDreg x) 14391 for { 14392 x := v.Args[0] 14393 if x.Op != OpARM64MOVWUloadidx { 14394 break 14395 } 14396 _ = x.Args[2] 14397 v.reset(OpARM64MOVDreg) 14398 v.AddArg(x) 14399 return true 14400 } 14401 // match: (MOVWUreg x:(MOVHUloadidx2 _ _ _)) 14402 // result: (MOVDreg x) 14403 for { 14404 x := v.Args[0] 14405 if x.Op != OpARM64MOVHUloadidx2 { 14406 break 14407 } 14408 _ = x.Args[2] 14409 v.reset(OpARM64MOVDreg) 14410 v.AddArg(x) 14411 return true 14412 } 14413 // match: (MOVWUreg x:(MOVWUloadidx4 _ _ _)) 14414 // result: (MOVDreg x) 14415 for { 14416 x := v.Args[0] 14417 if x.Op != OpARM64MOVWUloadidx4 { 14418 break 14419 } 14420 _ = x.Args[2] 14421 v.reset(OpARM64MOVDreg) 14422 v.AddArg(x) 14423 return true 14424 } 14425 // match: (MOVWUreg x:(MOVBUreg _)) 14426 // result: (MOVDreg x) 14427 for { 14428 x := v.Args[0] 14429 if x.Op != OpARM64MOVBUreg { 14430 break 14431 } 14432 v.reset(OpARM64MOVDreg) 14433 v.AddArg(x) 14434 return true 14435 } 14436 // match: (MOVWUreg x:(MOVHUreg _)) 14437 // result: (MOVDreg x) 14438 for { 14439 x := v.Args[0] 14440 if x.Op != OpARM64MOVHUreg { 14441 break 14442 } 14443 v.reset(OpARM64MOVDreg) 14444 v.AddArg(x) 14445 return true 14446 } 14447 return false 14448 } 14449 func rewriteValueARM64_OpARM64MOVWUreg_10(v *Value) bool { 14450 // match: (MOVWUreg x:(MOVWUreg _)) 14451 // result: (MOVDreg x) 14452 for { 14453 x := v.Args[0] 14454 if x.Op != OpARM64MOVWUreg { 14455 break 14456 } 14457 v.reset(OpARM64MOVDreg) 14458 v.AddArg(x) 14459 return true 14460 } 14461 // match: (MOVWUreg (ANDconst [c] x)) 14462 // result: (ANDconst [c&(1<<32-1)] x) 14463 for { 14464 v_0 := v.Args[0] 14465 if v_0.Op != OpARM64ANDconst { 14466 break 14467 } 14468 c := v_0.AuxInt 14469 x := v_0.Args[0] 14470 v.reset(OpARM64ANDconst) 14471 v.AuxInt = c & (1<<32 - 1) 14472 v.AddArg(x) 14473 return true 14474 } 14475 // match: (MOVWUreg (MOVDconst [c])) 14476 // result: (MOVDconst [int64(uint32(c))]) 14477 for { 14478 v_0 := v.Args[0] 14479 if v_0.Op != OpARM64MOVDconst { 14480 break 14481 } 14482 c := v_0.AuxInt 14483 v.reset(OpARM64MOVDconst) 14484 v.AuxInt = int64(uint32(c)) 14485 return true 14486 } 14487 // match: (MOVWUreg (SLLconst [sc] x)) 14488 // cond: isARM64BFMask(sc, 1<<32-1, sc) 14489 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x) 14490 for { 14491 v_0 := v.Args[0] 14492 if v_0.Op != OpARM64SLLconst { 14493 break 14494 } 14495 sc := v_0.AuxInt 14496 x := v_0.Args[0] 14497 if !(isARM64BFMask(sc, 1<<32-1, sc)) { 14498 break 14499 } 14500 v.reset(OpARM64UBFIZ) 14501 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc)) 14502 v.AddArg(x) 14503 return true 14504 } 14505 // match: (MOVWUreg (SRLconst [sc] x)) 14506 // cond: isARM64BFMask(sc, 1<<32-1, 0) 14507 // result: (UBFX [armBFAuxInt(sc, 32)] x) 14508 for { 14509 v_0 := v.Args[0] 14510 if v_0.Op != OpARM64SRLconst { 14511 break 14512 } 14513 sc := v_0.AuxInt 14514 x := v_0.Args[0] 14515 if !(isARM64BFMask(sc, 1<<32-1, 0)) { 14516 break 14517 } 14518 v.reset(OpARM64UBFX) 14519 v.AuxInt = armBFAuxInt(sc, 32) 14520 v.AddArg(x) 14521 return true 14522 } 14523 return false 14524 } 14525 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 14526 b := v.Block 14527 config := b.Func.Config 14528 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 14529 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14530 // result: (MOVWload [off1+off2] {sym} ptr mem) 14531 for { 14532 off1 := v.AuxInt 14533 sym := v.Aux 14534 mem := v.Args[1] 14535 v_0 := v.Args[0] 14536 if v_0.Op != OpARM64ADDconst { 14537 break 14538 } 14539 off2 := v_0.AuxInt 14540 ptr := v_0.Args[0] 14541 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14542 break 14543 } 14544 v.reset(OpARM64MOVWload) 14545 v.AuxInt = off1 + off2 14546 v.Aux = sym 14547 v.AddArg(ptr) 14548 v.AddArg(mem) 14549 return true 14550 } 14551 // match: (MOVWload [off] {sym} (ADD ptr idx) mem) 14552 // cond: off == 0 && sym == nil 14553 // result: (MOVWloadidx ptr idx mem) 14554 for { 14555 off := v.AuxInt 14556 sym := v.Aux 14557 mem := v.Args[1] 14558 v_0 := v.Args[0] 14559 if v_0.Op != OpARM64ADD { 14560 break 14561 } 14562 idx := v_0.Args[1] 14563 ptr := v_0.Args[0] 14564 if !(off == 0 && sym == nil) { 14565 break 14566 } 14567 v.reset(OpARM64MOVWloadidx) 14568 v.AddArg(ptr) 14569 v.AddArg(idx) 14570 v.AddArg(mem) 14571 return true 14572 } 14573 // match: (MOVWload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 14574 // cond: off == 0 && sym == nil 14575 // result: (MOVWloadidx4 ptr idx mem) 14576 for { 14577 off := v.AuxInt 14578 sym := v.Aux 14579 mem := v.Args[1] 14580 v_0 := v.Args[0] 14581 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 14582 break 14583 } 14584 idx := v_0.Args[1] 14585 ptr := v_0.Args[0] 14586 if !(off == 0 && sym == nil) { 14587 break 14588 } 14589 v.reset(OpARM64MOVWloadidx4) 14590 v.AddArg(ptr) 14591 v.AddArg(idx) 14592 v.AddArg(mem) 14593 return true 14594 } 14595 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 14596 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14597 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 14598 for { 14599 off1 := v.AuxInt 14600 sym1 := v.Aux 14601 mem := v.Args[1] 14602 v_0 := v.Args[0] 14603 if v_0.Op != OpARM64MOVDaddr { 14604 break 14605 } 14606 off2 := v_0.AuxInt 14607 sym2 := v_0.Aux 14608 ptr := v_0.Args[0] 14609 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14610 break 14611 } 14612 v.reset(OpARM64MOVWload) 14613 v.AuxInt = off1 + off2 14614 v.Aux = mergeSym(sym1, sym2) 14615 v.AddArg(ptr) 14616 v.AddArg(mem) 14617 return true 14618 } 14619 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 14620 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 14621 // result: (MOVDconst [0]) 14622 for { 14623 off := v.AuxInt 14624 sym := v.Aux 14625 _ = v.Args[1] 14626 ptr := v.Args[0] 14627 v_1 := v.Args[1] 14628 if v_1.Op != OpARM64MOVWstorezero { 14629 break 14630 } 14631 off2 := v_1.AuxInt 14632 sym2 := v_1.Aux 14633 _ = v_1.Args[1] 14634 ptr2 := v_1.Args[0] 14635 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 14636 break 14637 } 14638 v.reset(OpARM64MOVDconst) 14639 v.AuxInt = 0 14640 return true 14641 } 14642 return false 14643 } 14644 func rewriteValueARM64_OpARM64MOVWloadidx_0(v *Value) bool { 14645 // match: (MOVWloadidx ptr (MOVDconst [c]) mem) 14646 // result: (MOVWload [c] ptr mem) 14647 for { 14648 mem := v.Args[2] 14649 ptr := v.Args[0] 14650 v_1 := v.Args[1] 14651 if v_1.Op != OpARM64MOVDconst { 14652 break 14653 } 14654 c := v_1.AuxInt 14655 v.reset(OpARM64MOVWload) 14656 v.AuxInt = c 14657 v.AddArg(ptr) 14658 v.AddArg(mem) 14659 return true 14660 } 14661 // match: (MOVWloadidx (MOVDconst [c]) ptr mem) 14662 // result: (MOVWload [c] ptr mem) 14663 for { 14664 mem := v.Args[2] 14665 v_0 := v.Args[0] 14666 if v_0.Op != OpARM64MOVDconst { 14667 break 14668 } 14669 c := v_0.AuxInt 14670 ptr := v.Args[1] 14671 v.reset(OpARM64MOVWload) 14672 v.AuxInt = c 14673 v.AddArg(ptr) 14674 v.AddArg(mem) 14675 return true 14676 } 14677 // match: (MOVWloadidx ptr (SLLconst [2] idx) mem) 14678 // result: (MOVWloadidx4 ptr idx mem) 14679 for { 14680 mem := v.Args[2] 14681 ptr := v.Args[0] 14682 v_1 := v.Args[1] 14683 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 2 { 14684 break 14685 } 14686 idx := v_1.Args[0] 14687 v.reset(OpARM64MOVWloadidx4) 14688 v.AddArg(ptr) 14689 v.AddArg(idx) 14690 v.AddArg(mem) 14691 return true 14692 } 14693 // match: (MOVWloadidx (SLLconst [2] idx) ptr mem) 14694 // result: (MOVWloadidx4 ptr idx mem) 14695 for { 14696 mem := v.Args[2] 14697 v_0 := v.Args[0] 14698 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 2 { 14699 break 14700 } 14701 idx := v_0.Args[0] 14702 ptr := v.Args[1] 14703 v.reset(OpARM64MOVWloadidx4) 14704 v.AddArg(ptr) 14705 v.AddArg(idx) 14706 v.AddArg(mem) 14707 return true 14708 } 14709 // match: (MOVWloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 14710 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 14711 // result: (MOVDconst [0]) 14712 for { 14713 _ = v.Args[2] 14714 ptr := v.Args[0] 14715 idx := v.Args[1] 14716 v_2 := v.Args[2] 14717 if v_2.Op != OpARM64MOVWstorezeroidx { 14718 break 14719 } 14720 _ = v_2.Args[2] 14721 ptr2 := v_2.Args[0] 14722 idx2 := v_2.Args[1] 14723 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 14724 break 14725 } 14726 v.reset(OpARM64MOVDconst) 14727 v.AuxInt = 0 14728 return true 14729 } 14730 return false 14731 } 14732 func rewriteValueARM64_OpARM64MOVWloadidx4_0(v *Value) bool { 14733 // match: (MOVWloadidx4 ptr (MOVDconst [c]) mem) 14734 // result: (MOVWload [c<<2] ptr mem) 14735 for { 14736 mem := v.Args[2] 14737 ptr := v.Args[0] 14738 v_1 := v.Args[1] 14739 if v_1.Op != OpARM64MOVDconst { 14740 break 14741 } 14742 c := v_1.AuxInt 14743 v.reset(OpARM64MOVWload) 14744 v.AuxInt = c << 2 14745 v.AddArg(ptr) 14746 v.AddArg(mem) 14747 return true 14748 } 14749 // match: (MOVWloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 14750 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 14751 // result: (MOVDconst [0]) 14752 for { 14753 _ = v.Args[2] 14754 ptr := v.Args[0] 14755 idx := v.Args[1] 14756 v_2 := v.Args[2] 14757 if v_2.Op != OpARM64MOVWstorezeroidx4 { 14758 break 14759 } 14760 _ = v_2.Args[2] 14761 ptr2 := v_2.Args[0] 14762 idx2 := v_2.Args[1] 14763 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 14764 break 14765 } 14766 v.reset(OpARM64MOVDconst) 14767 v.AuxInt = 0 14768 return true 14769 } 14770 return false 14771 } 14772 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 14773 // match: (MOVWreg x:(MOVBload _ _)) 14774 // result: (MOVDreg x) 14775 for { 14776 x := v.Args[0] 14777 if x.Op != OpARM64MOVBload { 14778 break 14779 } 14780 _ = x.Args[1] 14781 v.reset(OpARM64MOVDreg) 14782 v.AddArg(x) 14783 return true 14784 } 14785 // match: (MOVWreg x:(MOVBUload _ _)) 14786 // result: (MOVDreg x) 14787 for { 14788 x := v.Args[0] 14789 if x.Op != OpARM64MOVBUload { 14790 break 14791 } 14792 _ = x.Args[1] 14793 v.reset(OpARM64MOVDreg) 14794 v.AddArg(x) 14795 return true 14796 } 14797 // match: (MOVWreg x:(MOVHload _ _)) 14798 // result: (MOVDreg x) 14799 for { 14800 x := v.Args[0] 14801 if x.Op != OpARM64MOVHload { 14802 break 14803 } 14804 _ = x.Args[1] 14805 v.reset(OpARM64MOVDreg) 14806 v.AddArg(x) 14807 return true 14808 } 14809 // match: (MOVWreg x:(MOVHUload _ _)) 14810 // result: (MOVDreg x) 14811 for { 14812 x := v.Args[0] 14813 if x.Op != OpARM64MOVHUload { 14814 break 14815 } 14816 _ = x.Args[1] 14817 v.reset(OpARM64MOVDreg) 14818 v.AddArg(x) 14819 return true 14820 } 14821 // match: (MOVWreg x:(MOVWload _ _)) 14822 // result: (MOVDreg x) 14823 for { 14824 x := v.Args[0] 14825 if x.Op != OpARM64MOVWload { 14826 break 14827 } 14828 _ = x.Args[1] 14829 v.reset(OpARM64MOVDreg) 14830 v.AddArg(x) 14831 return true 14832 } 14833 // match: (MOVWreg x:(MOVBloadidx _ _ _)) 14834 // result: (MOVDreg x) 14835 for { 14836 x := v.Args[0] 14837 if x.Op != OpARM64MOVBloadidx { 14838 break 14839 } 14840 _ = x.Args[2] 14841 v.reset(OpARM64MOVDreg) 14842 v.AddArg(x) 14843 return true 14844 } 14845 // match: (MOVWreg x:(MOVBUloadidx _ _ _)) 14846 // result: (MOVDreg x) 14847 for { 14848 x := v.Args[0] 14849 if x.Op != OpARM64MOVBUloadidx { 14850 break 14851 } 14852 _ = x.Args[2] 14853 v.reset(OpARM64MOVDreg) 14854 v.AddArg(x) 14855 return true 14856 } 14857 // match: (MOVWreg x:(MOVHloadidx _ _ _)) 14858 // result: (MOVDreg x) 14859 for { 14860 x := v.Args[0] 14861 if x.Op != OpARM64MOVHloadidx { 14862 break 14863 } 14864 _ = x.Args[2] 14865 v.reset(OpARM64MOVDreg) 14866 v.AddArg(x) 14867 return true 14868 } 14869 // match: (MOVWreg x:(MOVHUloadidx _ _ _)) 14870 // result: (MOVDreg x) 14871 for { 14872 x := v.Args[0] 14873 if x.Op != OpARM64MOVHUloadidx { 14874 break 14875 } 14876 _ = x.Args[2] 14877 v.reset(OpARM64MOVDreg) 14878 v.AddArg(x) 14879 return true 14880 } 14881 // match: (MOVWreg x:(MOVWloadidx _ _ _)) 14882 // result: (MOVDreg x) 14883 for { 14884 x := v.Args[0] 14885 if x.Op != OpARM64MOVWloadidx { 14886 break 14887 } 14888 _ = x.Args[2] 14889 v.reset(OpARM64MOVDreg) 14890 v.AddArg(x) 14891 return true 14892 } 14893 return false 14894 } 14895 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 14896 // match: (MOVWreg x:(MOVHloadidx2 _ _ _)) 14897 // result: (MOVDreg x) 14898 for { 14899 x := v.Args[0] 14900 if x.Op != OpARM64MOVHloadidx2 { 14901 break 14902 } 14903 _ = x.Args[2] 14904 v.reset(OpARM64MOVDreg) 14905 v.AddArg(x) 14906 return true 14907 } 14908 // match: (MOVWreg x:(MOVHUloadidx2 _ _ _)) 14909 // result: (MOVDreg x) 14910 for { 14911 x := v.Args[0] 14912 if x.Op != OpARM64MOVHUloadidx2 { 14913 break 14914 } 14915 _ = x.Args[2] 14916 v.reset(OpARM64MOVDreg) 14917 v.AddArg(x) 14918 return true 14919 } 14920 // match: (MOVWreg x:(MOVWloadidx4 _ _ _)) 14921 // result: (MOVDreg x) 14922 for { 14923 x := v.Args[0] 14924 if x.Op != OpARM64MOVWloadidx4 { 14925 break 14926 } 14927 _ = x.Args[2] 14928 v.reset(OpARM64MOVDreg) 14929 v.AddArg(x) 14930 return true 14931 } 14932 // match: (MOVWreg x:(MOVBreg _)) 14933 // result: (MOVDreg x) 14934 for { 14935 x := v.Args[0] 14936 if x.Op != OpARM64MOVBreg { 14937 break 14938 } 14939 v.reset(OpARM64MOVDreg) 14940 v.AddArg(x) 14941 return true 14942 } 14943 // match: (MOVWreg x:(MOVBUreg _)) 14944 // result: (MOVDreg x) 14945 for { 14946 x := v.Args[0] 14947 if x.Op != OpARM64MOVBUreg { 14948 break 14949 } 14950 v.reset(OpARM64MOVDreg) 14951 v.AddArg(x) 14952 return true 14953 } 14954 // match: (MOVWreg x:(MOVHreg _)) 14955 // result: (MOVDreg x) 14956 for { 14957 x := v.Args[0] 14958 if x.Op != OpARM64MOVHreg { 14959 break 14960 } 14961 v.reset(OpARM64MOVDreg) 14962 v.AddArg(x) 14963 return true 14964 } 14965 // match: (MOVWreg x:(MOVHreg _)) 14966 // result: (MOVDreg x) 14967 for { 14968 x := v.Args[0] 14969 if x.Op != OpARM64MOVHreg { 14970 break 14971 } 14972 v.reset(OpARM64MOVDreg) 14973 v.AddArg(x) 14974 return true 14975 } 14976 // match: (MOVWreg x:(MOVWreg _)) 14977 // result: (MOVDreg x) 14978 for { 14979 x := v.Args[0] 14980 if x.Op != OpARM64MOVWreg { 14981 break 14982 } 14983 v.reset(OpARM64MOVDreg) 14984 v.AddArg(x) 14985 return true 14986 } 14987 // match: (MOVWreg (MOVDconst [c])) 14988 // result: (MOVDconst [int64(int32(c))]) 14989 for { 14990 v_0 := v.Args[0] 14991 if v_0.Op != OpARM64MOVDconst { 14992 break 14993 } 14994 c := v_0.AuxInt 14995 v.reset(OpARM64MOVDconst) 14996 v.AuxInt = int64(int32(c)) 14997 return true 14998 } 14999 // match: (MOVWreg (SLLconst [lc] x)) 15000 // cond: lc < 32 15001 // result: (SBFIZ [armBFAuxInt(lc, 32-lc)] x) 15002 for { 15003 v_0 := v.Args[0] 15004 if v_0.Op != OpARM64SLLconst { 15005 break 15006 } 15007 lc := v_0.AuxInt 15008 x := v_0.Args[0] 15009 if !(lc < 32) { 15010 break 15011 } 15012 v.reset(OpARM64SBFIZ) 15013 v.AuxInt = armBFAuxInt(lc, 32-lc) 15014 v.AddArg(x) 15015 return true 15016 } 15017 return false 15018 } 15019 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 15020 b := v.Block 15021 config := b.Func.Config 15022 // match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem) 15023 // result: (FMOVSstore [off] {sym} ptr val mem) 15024 for { 15025 off := v.AuxInt 15026 sym := v.Aux 15027 mem := v.Args[2] 15028 ptr := v.Args[0] 15029 v_1 := v.Args[1] 15030 if v_1.Op != OpARM64FMOVSfpgp { 15031 break 15032 } 15033 val := v_1.Args[0] 15034 v.reset(OpARM64FMOVSstore) 15035 v.AuxInt = off 15036 v.Aux = sym 15037 v.AddArg(ptr) 15038 v.AddArg(val) 15039 v.AddArg(mem) 15040 return true 15041 } 15042 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 15043 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15044 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 15045 for { 15046 off1 := v.AuxInt 15047 sym := v.Aux 15048 mem := v.Args[2] 15049 v_0 := v.Args[0] 15050 if v_0.Op != OpARM64ADDconst { 15051 break 15052 } 15053 off2 := v_0.AuxInt 15054 ptr := v_0.Args[0] 15055 val := v.Args[1] 15056 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15057 break 15058 } 15059 v.reset(OpARM64MOVWstore) 15060 v.AuxInt = off1 + off2 15061 v.Aux = sym 15062 v.AddArg(ptr) 15063 v.AddArg(val) 15064 v.AddArg(mem) 15065 return true 15066 } 15067 // match: (MOVWstore [off] {sym} (ADD ptr idx) val mem) 15068 // cond: off == 0 && sym == nil 15069 // result: (MOVWstoreidx ptr idx val mem) 15070 for { 15071 off := v.AuxInt 15072 sym := v.Aux 15073 mem := v.Args[2] 15074 v_0 := v.Args[0] 15075 if v_0.Op != OpARM64ADD { 15076 break 15077 } 15078 idx := v_0.Args[1] 15079 ptr := v_0.Args[0] 15080 val := v.Args[1] 15081 if !(off == 0 && sym == nil) { 15082 break 15083 } 15084 v.reset(OpARM64MOVWstoreidx) 15085 v.AddArg(ptr) 15086 v.AddArg(idx) 15087 v.AddArg(val) 15088 v.AddArg(mem) 15089 return true 15090 } 15091 // match: (MOVWstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem) 15092 // cond: off == 0 && sym == nil 15093 // result: (MOVWstoreidx4 ptr idx val mem) 15094 for { 15095 off := v.AuxInt 15096 sym := v.Aux 15097 mem := v.Args[2] 15098 v_0 := v.Args[0] 15099 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 15100 break 15101 } 15102 idx := v_0.Args[1] 15103 ptr := v_0.Args[0] 15104 val := v.Args[1] 15105 if !(off == 0 && sym == nil) { 15106 break 15107 } 15108 v.reset(OpARM64MOVWstoreidx4) 15109 v.AddArg(ptr) 15110 v.AddArg(idx) 15111 v.AddArg(val) 15112 v.AddArg(mem) 15113 return true 15114 } 15115 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 15116 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15117 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 15118 for { 15119 off1 := v.AuxInt 15120 sym1 := v.Aux 15121 mem := v.Args[2] 15122 v_0 := v.Args[0] 15123 if v_0.Op != OpARM64MOVDaddr { 15124 break 15125 } 15126 off2 := v_0.AuxInt 15127 sym2 := v_0.Aux 15128 ptr := v_0.Args[0] 15129 val := v.Args[1] 15130 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15131 break 15132 } 15133 v.reset(OpARM64MOVWstore) 15134 v.AuxInt = off1 + off2 15135 v.Aux = mergeSym(sym1, sym2) 15136 v.AddArg(ptr) 15137 v.AddArg(val) 15138 v.AddArg(mem) 15139 return true 15140 } 15141 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 15142 // result: (MOVWstorezero [off] {sym} ptr mem) 15143 for { 15144 off := v.AuxInt 15145 sym := v.Aux 15146 mem := v.Args[2] 15147 ptr := v.Args[0] 15148 v_1 := v.Args[1] 15149 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 15150 break 15151 } 15152 v.reset(OpARM64MOVWstorezero) 15153 v.AuxInt = off 15154 v.Aux = sym 15155 v.AddArg(ptr) 15156 v.AddArg(mem) 15157 return true 15158 } 15159 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 15160 // result: (MOVWstore [off] {sym} ptr x mem) 15161 for { 15162 off := v.AuxInt 15163 sym := v.Aux 15164 mem := v.Args[2] 15165 ptr := v.Args[0] 15166 v_1 := v.Args[1] 15167 if v_1.Op != OpARM64MOVWreg { 15168 break 15169 } 15170 x := v_1.Args[0] 15171 v.reset(OpARM64MOVWstore) 15172 v.AuxInt = off 15173 v.Aux = sym 15174 v.AddArg(ptr) 15175 v.AddArg(x) 15176 v.AddArg(mem) 15177 return true 15178 } 15179 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 15180 // result: (MOVWstore [off] {sym} ptr x mem) 15181 for { 15182 off := v.AuxInt 15183 sym := v.Aux 15184 mem := v.Args[2] 15185 ptr := v.Args[0] 15186 v_1 := v.Args[1] 15187 if v_1.Op != OpARM64MOVWUreg { 15188 break 15189 } 15190 x := v_1.Args[0] 15191 v.reset(OpARM64MOVWstore) 15192 v.AuxInt = off 15193 v.Aux = sym 15194 v.AddArg(ptr) 15195 v.AddArg(x) 15196 v.AddArg(mem) 15197 return true 15198 } 15199 // match: (MOVWstore [i] {s} ptr0 (SRLconst [32] w) x:(MOVWstore [i-4] {s} ptr1 w mem)) 15200 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 15201 // result: (MOVDstore [i-4] {s} ptr0 w mem) 15202 for { 15203 i := v.AuxInt 15204 s := v.Aux 15205 _ = v.Args[2] 15206 ptr0 := v.Args[0] 15207 v_1 := v.Args[1] 15208 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 32 { 15209 break 15210 } 15211 w := v_1.Args[0] 15212 x := v.Args[2] 15213 if x.Op != OpARM64MOVWstore || x.AuxInt != i-4 || x.Aux != s { 15214 break 15215 } 15216 mem := x.Args[2] 15217 ptr1 := x.Args[0] 15218 if w != x.Args[1] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 15219 break 15220 } 15221 v.reset(OpARM64MOVDstore) 15222 v.AuxInt = i - 4 15223 v.Aux = s 15224 v.AddArg(ptr0) 15225 v.AddArg(w) 15226 v.AddArg(mem) 15227 return true 15228 } 15229 // match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx ptr1 idx1 w mem)) 15230 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15231 // result: (MOVDstoreidx ptr1 idx1 w mem) 15232 for { 15233 if v.AuxInt != 4 { 15234 break 15235 } 15236 s := v.Aux 15237 _ = v.Args[2] 15238 v_0 := v.Args[0] 15239 if v_0.Op != OpARM64ADD { 15240 break 15241 } 15242 idx0 := v_0.Args[1] 15243 ptr0 := v_0.Args[0] 15244 v_1 := v.Args[1] 15245 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 32 { 15246 break 15247 } 15248 w := v_1.Args[0] 15249 x := v.Args[2] 15250 if x.Op != OpARM64MOVWstoreidx { 15251 break 15252 } 15253 mem := x.Args[3] 15254 ptr1 := x.Args[0] 15255 idx1 := x.Args[1] 15256 if w != x.Args[2] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15257 break 15258 } 15259 v.reset(OpARM64MOVDstoreidx) 15260 v.AddArg(ptr1) 15261 v.AddArg(idx1) 15262 v.AddArg(w) 15263 v.AddArg(mem) 15264 return true 15265 } 15266 return false 15267 } 15268 func rewriteValueARM64_OpARM64MOVWstore_10(v *Value) bool { 15269 b := v.Block 15270 // match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx4 ptr1 idx1 w mem)) 15271 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15272 // result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w mem) 15273 for { 15274 if v.AuxInt != 4 { 15275 break 15276 } 15277 s := v.Aux 15278 _ = v.Args[2] 15279 v_0 := v.Args[0] 15280 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 15281 break 15282 } 15283 idx0 := v_0.Args[1] 15284 ptr0 := v_0.Args[0] 15285 v_1 := v.Args[1] 15286 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != 32 { 15287 break 15288 } 15289 w := v_1.Args[0] 15290 x := v.Args[2] 15291 if x.Op != OpARM64MOVWstoreidx4 { 15292 break 15293 } 15294 mem := x.Args[3] 15295 ptr1 := x.Args[0] 15296 idx1 := x.Args[1] 15297 if w != x.Args[2] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15298 break 15299 } 15300 v.reset(OpARM64MOVDstoreidx) 15301 v.AddArg(ptr1) 15302 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15303 v0.AuxInt = 2 15304 v0.AddArg(idx1) 15305 v.AddArg(v0) 15306 v.AddArg(w) 15307 v.AddArg(mem) 15308 return true 15309 } 15310 // match: (MOVWstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVWstore [i-4] {s} ptr1 w0:(SRLconst [j-32] w) mem)) 15311 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 15312 // result: (MOVDstore [i-4] {s} ptr0 w0 mem) 15313 for { 15314 i := v.AuxInt 15315 s := v.Aux 15316 _ = v.Args[2] 15317 ptr0 := v.Args[0] 15318 v_1 := v.Args[1] 15319 if v_1.Op != OpARM64SRLconst { 15320 break 15321 } 15322 j := v_1.AuxInt 15323 w := v_1.Args[0] 15324 x := v.Args[2] 15325 if x.Op != OpARM64MOVWstore || x.AuxInt != i-4 || x.Aux != s { 15326 break 15327 } 15328 mem := x.Args[2] 15329 ptr1 := x.Args[0] 15330 w0 := x.Args[1] 15331 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-32 || w != w0.Args[0] || !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 15332 break 15333 } 15334 v.reset(OpARM64MOVDstore) 15335 v.AuxInt = i - 4 15336 v.Aux = s 15337 v.AddArg(ptr0) 15338 v.AddArg(w0) 15339 v.AddArg(mem) 15340 return true 15341 } 15342 // match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx ptr1 idx1 w0:(SRLconst [j-32] w) mem)) 15343 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15344 // result: (MOVDstoreidx ptr1 idx1 w0 mem) 15345 for { 15346 if v.AuxInt != 4 { 15347 break 15348 } 15349 s := v.Aux 15350 _ = v.Args[2] 15351 v_0 := v.Args[0] 15352 if v_0.Op != OpARM64ADD { 15353 break 15354 } 15355 idx0 := v_0.Args[1] 15356 ptr0 := v_0.Args[0] 15357 v_1 := v.Args[1] 15358 if v_1.Op != OpARM64SRLconst { 15359 break 15360 } 15361 j := v_1.AuxInt 15362 w := v_1.Args[0] 15363 x := v.Args[2] 15364 if x.Op != OpARM64MOVWstoreidx { 15365 break 15366 } 15367 mem := x.Args[3] 15368 ptr1 := x.Args[0] 15369 idx1 := x.Args[1] 15370 w0 := x.Args[2] 15371 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-32 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15372 break 15373 } 15374 v.reset(OpARM64MOVDstoreidx) 15375 v.AddArg(ptr1) 15376 v.AddArg(idx1) 15377 v.AddArg(w0) 15378 v.AddArg(mem) 15379 return true 15380 } 15381 // match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx4 ptr1 idx1 w0:(SRLconst [j-32] w) mem)) 15382 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15383 // result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w0 mem) 15384 for { 15385 if v.AuxInt != 4 { 15386 break 15387 } 15388 s := v.Aux 15389 _ = v.Args[2] 15390 v_0 := v.Args[0] 15391 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 15392 break 15393 } 15394 idx0 := v_0.Args[1] 15395 ptr0 := v_0.Args[0] 15396 v_1 := v.Args[1] 15397 if v_1.Op != OpARM64SRLconst { 15398 break 15399 } 15400 j := v_1.AuxInt 15401 w := v_1.Args[0] 15402 x := v.Args[2] 15403 if x.Op != OpARM64MOVWstoreidx4 { 15404 break 15405 } 15406 mem := x.Args[3] 15407 ptr1 := x.Args[0] 15408 idx1 := x.Args[1] 15409 w0 := x.Args[2] 15410 if w0.Op != OpARM64SRLconst || w0.AuxInt != j-32 || w != w0.Args[0] || !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15411 break 15412 } 15413 v.reset(OpARM64MOVDstoreidx) 15414 v.AddArg(ptr1) 15415 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15416 v0.AuxInt = 2 15417 v0.AddArg(idx1) 15418 v.AddArg(v0) 15419 v.AddArg(w0) 15420 v.AddArg(mem) 15421 return true 15422 } 15423 return false 15424 } 15425 func rewriteValueARM64_OpARM64MOVWstoreidx_0(v *Value) bool { 15426 // match: (MOVWstoreidx ptr (MOVDconst [c]) val mem) 15427 // result: (MOVWstore [c] ptr val mem) 15428 for { 15429 mem := v.Args[3] 15430 ptr := v.Args[0] 15431 v_1 := v.Args[1] 15432 if v_1.Op != OpARM64MOVDconst { 15433 break 15434 } 15435 c := v_1.AuxInt 15436 val := v.Args[2] 15437 v.reset(OpARM64MOVWstore) 15438 v.AuxInt = c 15439 v.AddArg(ptr) 15440 v.AddArg(val) 15441 v.AddArg(mem) 15442 return true 15443 } 15444 // match: (MOVWstoreidx (MOVDconst [c]) idx val mem) 15445 // result: (MOVWstore [c] idx val mem) 15446 for { 15447 mem := v.Args[3] 15448 v_0 := v.Args[0] 15449 if v_0.Op != OpARM64MOVDconst { 15450 break 15451 } 15452 c := v_0.AuxInt 15453 idx := v.Args[1] 15454 val := v.Args[2] 15455 v.reset(OpARM64MOVWstore) 15456 v.AuxInt = c 15457 v.AddArg(idx) 15458 v.AddArg(val) 15459 v.AddArg(mem) 15460 return true 15461 } 15462 // match: (MOVWstoreidx ptr (SLLconst [2] idx) val mem) 15463 // result: (MOVWstoreidx4 ptr idx val mem) 15464 for { 15465 mem := v.Args[3] 15466 ptr := v.Args[0] 15467 v_1 := v.Args[1] 15468 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 2 { 15469 break 15470 } 15471 idx := v_1.Args[0] 15472 val := v.Args[2] 15473 v.reset(OpARM64MOVWstoreidx4) 15474 v.AddArg(ptr) 15475 v.AddArg(idx) 15476 v.AddArg(val) 15477 v.AddArg(mem) 15478 return true 15479 } 15480 // match: (MOVWstoreidx (SLLconst [2] idx) ptr val mem) 15481 // result: (MOVWstoreidx4 ptr idx val mem) 15482 for { 15483 mem := v.Args[3] 15484 v_0 := v.Args[0] 15485 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 2 { 15486 break 15487 } 15488 idx := v_0.Args[0] 15489 ptr := v.Args[1] 15490 val := v.Args[2] 15491 v.reset(OpARM64MOVWstoreidx4) 15492 v.AddArg(ptr) 15493 v.AddArg(idx) 15494 v.AddArg(val) 15495 v.AddArg(mem) 15496 return true 15497 } 15498 // match: (MOVWstoreidx ptr idx (MOVDconst [0]) mem) 15499 // result: (MOVWstorezeroidx ptr idx mem) 15500 for { 15501 mem := v.Args[3] 15502 ptr := v.Args[0] 15503 idx := v.Args[1] 15504 v_2 := v.Args[2] 15505 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 15506 break 15507 } 15508 v.reset(OpARM64MOVWstorezeroidx) 15509 v.AddArg(ptr) 15510 v.AddArg(idx) 15511 v.AddArg(mem) 15512 return true 15513 } 15514 // match: (MOVWstoreidx ptr idx (MOVWreg x) mem) 15515 // result: (MOVWstoreidx ptr idx x mem) 15516 for { 15517 mem := v.Args[3] 15518 ptr := v.Args[0] 15519 idx := v.Args[1] 15520 v_2 := v.Args[2] 15521 if v_2.Op != OpARM64MOVWreg { 15522 break 15523 } 15524 x := v_2.Args[0] 15525 v.reset(OpARM64MOVWstoreidx) 15526 v.AddArg(ptr) 15527 v.AddArg(idx) 15528 v.AddArg(x) 15529 v.AddArg(mem) 15530 return true 15531 } 15532 // match: (MOVWstoreidx ptr idx (MOVWUreg x) mem) 15533 // result: (MOVWstoreidx ptr idx x mem) 15534 for { 15535 mem := v.Args[3] 15536 ptr := v.Args[0] 15537 idx := v.Args[1] 15538 v_2 := v.Args[2] 15539 if v_2.Op != OpARM64MOVWUreg { 15540 break 15541 } 15542 x := v_2.Args[0] 15543 v.reset(OpARM64MOVWstoreidx) 15544 v.AddArg(ptr) 15545 v.AddArg(idx) 15546 v.AddArg(x) 15547 v.AddArg(mem) 15548 return true 15549 } 15550 // match: (MOVWstoreidx ptr (ADDconst [4] idx) (SRLconst [32] w) x:(MOVWstoreidx ptr idx w mem)) 15551 // cond: x.Uses == 1 && clobber(x) 15552 // result: (MOVDstoreidx ptr idx w mem) 15553 for { 15554 _ = v.Args[3] 15555 ptr := v.Args[0] 15556 v_1 := v.Args[1] 15557 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 4 { 15558 break 15559 } 15560 idx := v_1.Args[0] 15561 v_2 := v.Args[2] 15562 if v_2.Op != OpARM64SRLconst || v_2.AuxInt != 32 { 15563 break 15564 } 15565 w := v_2.Args[0] 15566 x := v.Args[3] 15567 if x.Op != OpARM64MOVWstoreidx { 15568 break 15569 } 15570 mem := x.Args[3] 15571 if ptr != x.Args[0] || idx != x.Args[1] || w != x.Args[2] || !(x.Uses == 1 && clobber(x)) { 15572 break 15573 } 15574 v.reset(OpARM64MOVDstoreidx) 15575 v.AddArg(ptr) 15576 v.AddArg(idx) 15577 v.AddArg(w) 15578 v.AddArg(mem) 15579 return true 15580 } 15581 return false 15582 } 15583 func rewriteValueARM64_OpARM64MOVWstoreidx4_0(v *Value) bool { 15584 // match: (MOVWstoreidx4 ptr (MOVDconst [c]) val mem) 15585 // result: (MOVWstore [c<<2] ptr val mem) 15586 for { 15587 mem := v.Args[3] 15588 ptr := v.Args[0] 15589 v_1 := v.Args[1] 15590 if v_1.Op != OpARM64MOVDconst { 15591 break 15592 } 15593 c := v_1.AuxInt 15594 val := v.Args[2] 15595 v.reset(OpARM64MOVWstore) 15596 v.AuxInt = c << 2 15597 v.AddArg(ptr) 15598 v.AddArg(val) 15599 v.AddArg(mem) 15600 return true 15601 } 15602 // match: (MOVWstoreidx4 ptr idx (MOVDconst [0]) mem) 15603 // result: (MOVWstorezeroidx4 ptr idx mem) 15604 for { 15605 mem := v.Args[3] 15606 ptr := v.Args[0] 15607 idx := v.Args[1] 15608 v_2 := v.Args[2] 15609 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 15610 break 15611 } 15612 v.reset(OpARM64MOVWstorezeroidx4) 15613 v.AddArg(ptr) 15614 v.AddArg(idx) 15615 v.AddArg(mem) 15616 return true 15617 } 15618 // match: (MOVWstoreidx4 ptr idx (MOVWreg x) mem) 15619 // result: (MOVWstoreidx4 ptr idx x mem) 15620 for { 15621 mem := v.Args[3] 15622 ptr := v.Args[0] 15623 idx := v.Args[1] 15624 v_2 := v.Args[2] 15625 if v_2.Op != OpARM64MOVWreg { 15626 break 15627 } 15628 x := v_2.Args[0] 15629 v.reset(OpARM64MOVWstoreidx4) 15630 v.AddArg(ptr) 15631 v.AddArg(idx) 15632 v.AddArg(x) 15633 v.AddArg(mem) 15634 return true 15635 } 15636 // match: (MOVWstoreidx4 ptr idx (MOVWUreg x) mem) 15637 // result: (MOVWstoreidx4 ptr idx x mem) 15638 for { 15639 mem := v.Args[3] 15640 ptr := v.Args[0] 15641 idx := v.Args[1] 15642 v_2 := v.Args[2] 15643 if v_2.Op != OpARM64MOVWUreg { 15644 break 15645 } 15646 x := v_2.Args[0] 15647 v.reset(OpARM64MOVWstoreidx4) 15648 v.AddArg(ptr) 15649 v.AddArg(idx) 15650 v.AddArg(x) 15651 v.AddArg(mem) 15652 return true 15653 } 15654 return false 15655 } 15656 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 15657 b := v.Block 15658 config := b.Func.Config 15659 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 15660 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15661 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 15662 for { 15663 off1 := v.AuxInt 15664 sym := v.Aux 15665 mem := v.Args[1] 15666 v_0 := v.Args[0] 15667 if v_0.Op != OpARM64ADDconst { 15668 break 15669 } 15670 off2 := v_0.AuxInt 15671 ptr := v_0.Args[0] 15672 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15673 break 15674 } 15675 v.reset(OpARM64MOVWstorezero) 15676 v.AuxInt = off1 + off2 15677 v.Aux = sym 15678 v.AddArg(ptr) 15679 v.AddArg(mem) 15680 return true 15681 } 15682 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 15683 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15684 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 15685 for { 15686 off1 := v.AuxInt 15687 sym1 := v.Aux 15688 mem := v.Args[1] 15689 v_0 := v.Args[0] 15690 if v_0.Op != OpARM64MOVDaddr { 15691 break 15692 } 15693 off2 := v_0.AuxInt 15694 sym2 := v_0.Aux 15695 ptr := v_0.Args[0] 15696 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15697 break 15698 } 15699 v.reset(OpARM64MOVWstorezero) 15700 v.AuxInt = off1 + off2 15701 v.Aux = mergeSym(sym1, sym2) 15702 v.AddArg(ptr) 15703 v.AddArg(mem) 15704 return true 15705 } 15706 // match: (MOVWstorezero [off] {sym} (ADD ptr idx) mem) 15707 // cond: off == 0 && sym == nil 15708 // result: (MOVWstorezeroidx ptr idx mem) 15709 for { 15710 off := v.AuxInt 15711 sym := v.Aux 15712 mem := v.Args[1] 15713 v_0 := v.Args[0] 15714 if v_0.Op != OpARM64ADD { 15715 break 15716 } 15717 idx := v_0.Args[1] 15718 ptr := v_0.Args[0] 15719 if !(off == 0 && sym == nil) { 15720 break 15721 } 15722 v.reset(OpARM64MOVWstorezeroidx) 15723 v.AddArg(ptr) 15724 v.AddArg(idx) 15725 v.AddArg(mem) 15726 return true 15727 } 15728 // match: (MOVWstorezero [off] {sym} (ADDshiftLL [2] ptr idx) mem) 15729 // cond: off == 0 && sym == nil 15730 // result: (MOVWstorezeroidx4 ptr idx mem) 15731 for { 15732 off := v.AuxInt 15733 sym := v.Aux 15734 mem := v.Args[1] 15735 v_0 := v.Args[0] 15736 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 15737 break 15738 } 15739 idx := v_0.Args[1] 15740 ptr := v_0.Args[0] 15741 if !(off == 0 && sym == nil) { 15742 break 15743 } 15744 v.reset(OpARM64MOVWstorezeroidx4) 15745 v.AddArg(ptr) 15746 v.AddArg(idx) 15747 v.AddArg(mem) 15748 return true 15749 } 15750 // match: (MOVWstorezero [i] {s} ptr0 x:(MOVWstorezero [j] {s} ptr1 mem)) 15751 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,4) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 15752 // result: (MOVDstorezero [min(i,j)] {s} ptr0 mem) 15753 for { 15754 i := v.AuxInt 15755 s := v.Aux 15756 _ = v.Args[1] 15757 ptr0 := v.Args[0] 15758 x := v.Args[1] 15759 if x.Op != OpARM64MOVWstorezero { 15760 break 15761 } 15762 j := x.AuxInt 15763 if x.Aux != s { 15764 break 15765 } 15766 mem := x.Args[1] 15767 ptr1 := x.Args[0] 15768 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 4) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 15769 break 15770 } 15771 v.reset(OpARM64MOVDstorezero) 15772 v.AuxInt = min(i, j) 15773 v.Aux = s 15774 v.AddArg(ptr0) 15775 v.AddArg(mem) 15776 return true 15777 } 15778 // match: (MOVWstorezero [4] {s} (ADD ptr0 idx0) x:(MOVWstorezeroidx ptr1 idx1 mem)) 15779 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15780 // result: (MOVDstorezeroidx ptr1 idx1 mem) 15781 for { 15782 if v.AuxInt != 4 { 15783 break 15784 } 15785 s := v.Aux 15786 _ = v.Args[1] 15787 v_0 := v.Args[0] 15788 if v_0.Op != OpARM64ADD { 15789 break 15790 } 15791 idx0 := v_0.Args[1] 15792 ptr0 := v_0.Args[0] 15793 x := v.Args[1] 15794 if x.Op != OpARM64MOVWstorezeroidx { 15795 break 15796 } 15797 mem := x.Args[2] 15798 ptr1 := x.Args[0] 15799 idx1 := x.Args[1] 15800 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15801 break 15802 } 15803 v.reset(OpARM64MOVDstorezeroidx) 15804 v.AddArg(ptr1) 15805 v.AddArg(idx1) 15806 v.AddArg(mem) 15807 return true 15808 } 15809 // match: (MOVWstorezero [4] {s} (ADDshiftLL [2] ptr0 idx0) x:(MOVWstorezeroidx4 ptr1 idx1 mem)) 15810 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15811 // result: (MOVDstorezeroidx ptr1 (SLLconst <idx1.Type> [2] idx1) mem) 15812 for { 15813 if v.AuxInt != 4 { 15814 break 15815 } 15816 s := v.Aux 15817 _ = v.Args[1] 15818 v_0 := v.Args[0] 15819 if v_0.Op != OpARM64ADDshiftLL || v_0.AuxInt != 2 { 15820 break 15821 } 15822 idx0 := v_0.Args[1] 15823 ptr0 := v_0.Args[0] 15824 x := v.Args[1] 15825 if x.Op != OpARM64MOVWstorezeroidx4 { 15826 break 15827 } 15828 mem := x.Args[2] 15829 ptr1 := x.Args[0] 15830 idx1 := x.Args[1] 15831 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15832 break 15833 } 15834 v.reset(OpARM64MOVDstorezeroidx) 15835 v.AddArg(ptr1) 15836 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15837 v0.AuxInt = 2 15838 v0.AddArg(idx1) 15839 v.AddArg(v0) 15840 v.AddArg(mem) 15841 return true 15842 } 15843 return false 15844 } 15845 func rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v *Value) bool { 15846 // match: (MOVWstorezeroidx ptr (MOVDconst [c]) mem) 15847 // result: (MOVWstorezero [c] ptr mem) 15848 for { 15849 mem := v.Args[2] 15850 ptr := v.Args[0] 15851 v_1 := v.Args[1] 15852 if v_1.Op != OpARM64MOVDconst { 15853 break 15854 } 15855 c := v_1.AuxInt 15856 v.reset(OpARM64MOVWstorezero) 15857 v.AuxInt = c 15858 v.AddArg(ptr) 15859 v.AddArg(mem) 15860 return true 15861 } 15862 // match: (MOVWstorezeroidx (MOVDconst [c]) idx mem) 15863 // result: (MOVWstorezero [c] idx mem) 15864 for { 15865 mem := v.Args[2] 15866 v_0 := v.Args[0] 15867 if v_0.Op != OpARM64MOVDconst { 15868 break 15869 } 15870 c := v_0.AuxInt 15871 idx := v.Args[1] 15872 v.reset(OpARM64MOVWstorezero) 15873 v.AuxInt = c 15874 v.AddArg(idx) 15875 v.AddArg(mem) 15876 return true 15877 } 15878 // match: (MOVWstorezeroidx ptr (SLLconst [2] idx) mem) 15879 // result: (MOVWstorezeroidx4 ptr idx mem) 15880 for { 15881 mem := v.Args[2] 15882 ptr := v.Args[0] 15883 v_1 := v.Args[1] 15884 if v_1.Op != OpARM64SLLconst || v_1.AuxInt != 2 { 15885 break 15886 } 15887 idx := v_1.Args[0] 15888 v.reset(OpARM64MOVWstorezeroidx4) 15889 v.AddArg(ptr) 15890 v.AddArg(idx) 15891 v.AddArg(mem) 15892 return true 15893 } 15894 // match: (MOVWstorezeroidx (SLLconst [2] idx) ptr mem) 15895 // result: (MOVWstorezeroidx4 ptr idx mem) 15896 for { 15897 mem := v.Args[2] 15898 v_0 := v.Args[0] 15899 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 2 { 15900 break 15901 } 15902 idx := v_0.Args[0] 15903 ptr := v.Args[1] 15904 v.reset(OpARM64MOVWstorezeroidx4) 15905 v.AddArg(ptr) 15906 v.AddArg(idx) 15907 v.AddArg(mem) 15908 return true 15909 } 15910 // match: (MOVWstorezeroidx ptr (ADDconst [4] idx) x:(MOVWstorezeroidx ptr idx mem)) 15911 // cond: x.Uses == 1 && clobber(x) 15912 // result: (MOVDstorezeroidx ptr idx mem) 15913 for { 15914 _ = v.Args[2] 15915 ptr := v.Args[0] 15916 v_1 := v.Args[1] 15917 if v_1.Op != OpARM64ADDconst || v_1.AuxInt != 4 { 15918 break 15919 } 15920 idx := v_1.Args[0] 15921 x := v.Args[2] 15922 if x.Op != OpARM64MOVWstorezeroidx { 15923 break 15924 } 15925 mem := x.Args[2] 15926 if ptr != x.Args[0] || idx != x.Args[1] || !(x.Uses == 1 && clobber(x)) { 15927 break 15928 } 15929 v.reset(OpARM64MOVDstorezeroidx) 15930 v.AddArg(ptr) 15931 v.AddArg(idx) 15932 v.AddArg(mem) 15933 return true 15934 } 15935 return false 15936 } 15937 func rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v *Value) bool { 15938 // match: (MOVWstorezeroidx4 ptr (MOVDconst [c]) mem) 15939 // result: (MOVWstorezero [c<<2] ptr mem) 15940 for { 15941 mem := v.Args[2] 15942 ptr := v.Args[0] 15943 v_1 := v.Args[1] 15944 if v_1.Op != OpARM64MOVDconst { 15945 break 15946 } 15947 c := v_1.AuxInt 15948 v.reset(OpARM64MOVWstorezero) 15949 v.AuxInt = c << 2 15950 v.AddArg(ptr) 15951 v.AddArg(mem) 15952 return true 15953 } 15954 return false 15955 } 15956 func rewriteValueARM64_OpARM64MSUB_0(v *Value) bool { 15957 b := v.Block 15958 // match: (MSUB a x (MOVDconst [-1])) 15959 // result: (ADD a x) 15960 for { 15961 _ = v.Args[2] 15962 a := v.Args[0] 15963 x := v.Args[1] 15964 v_2 := v.Args[2] 15965 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != -1 { 15966 break 15967 } 15968 v.reset(OpARM64ADD) 15969 v.AddArg(a) 15970 v.AddArg(x) 15971 return true 15972 } 15973 // match: (MSUB a _ (MOVDconst [0])) 15974 // result: a 15975 for { 15976 _ = v.Args[2] 15977 a := v.Args[0] 15978 v_2 := v.Args[2] 15979 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 15980 break 15981 } 15982 v.reset(OpCopy) 15983 v.Type = a.Type 15984 v.AddArg(a) 15985 return true 15986 } 15987 // match: (MSUB a x (MOVDconst [1])) 15988 // result: (SUB a x) 15989 for { 15990 _ = v.Args[2] 15991 a := v.Args[0] 15992 x := v.Args[1] 15993 v_2 := v.Args[2] 15994 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 1 { 15995 break 15996 } 15997 v.reset(OpARM64SUB) 15998 v.AddArg(a) 15999 v.AddArg(x) 16000 return true 16001 } 16002 // match: (MSUB a x (MOVDconst [c])) 16003 // cond: isPowerOfTwo(c) 16004 // result: (SUBshiftLL a x [log2(c)]) 16005 for { 16006 _ = v.Args[2] 16007 a := v.Args[0] 16008 x := v.Args[1] 16009 v_2 := v.Args[2] 16010 if v_2.Op != OpARM64MOVDconst { 16011 break 16012 } 16013 c := v_2.AuxInt 16014 if !(isPowerOfTwo(c)) { 16015 break 16016 } 16017 v.reset(OpARM64SUBshiftLL) 16018 v.AuxInt = log2(c) 16019 v.AddArg(a) 16020 v.AddArg(x) 16021 return true 16022 } 16023 // match: (MSUB a x (MOVDconst [c])) 16024 // cond: isPowerOfTwo(c-1) && c>=3 16025 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 16026 for { 16027 _ = v.Args[2] 16028 a := v.Args[0] 16029 x := v.Args[1] 16030 v_2 := v.Args[2] 16031 if v_2.Op != OpARM64MOVDconst { 16032 break 16033 } 16034 c := v_2.AuxInt 16035 if !(isPowerOfTwo(c-1) && c >= 3) { 16036 break 16037 } 16038 v.reset(OpARM64SUB) 16039 v.AddArg(a) 16040 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16041 v0.AuxInt = log2(c - 1) 16042 v0.AddArg(x) 16043 v0.AddArg(x) 16044 v.AddArg(v0) 16045 return true 16046 } 16047 // match: (MSUB a x (MOVDconst [c])) 16048 // cond: isPowerOfTwo(c+1) && c>=7 16049 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 16050 for { 16051 _ = v.Args[2] 16052 a := v.Args[0] 16053 x := v.Args[1] 16054 v_2 := v.Args[2] 16055 if v_2.Op != OpARM64MOVDconst { 16056 break 16057 } 16058 c := v_2.AuxInt 16059 if !(isPowerOfTwo(c+1) && c >= 7) { 16060 break 16061 } 16062 v.reset(OpARM64ADD) 16063 v.AddArg(a) 16064 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16065 v0.AuxInt = log2(c + 1) 16066 v0.AddArg(x) 16067 v0.AddArg(x) 16068 v.AddArg(v0) 16069 return true 16070 } 16071 // match: (MSUB a x (MOVDconst [c])) 16072 // cond: c%3 == 0 && isPowerOfTwo(c/3) 16073 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 16074 for { 16075 _ = v.Args[2] 16076 a := v.Args[0] 16077 x := v.Args[1] 16078 v_2 := v.Args[2] 16079 if v_2.Op != OpARM64MOVDconst { 16080 break 16081 } 16082 c := v_2.AuxInt 16083 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 16084 break 16085 } 16086 v.reset(OpARM64ADDshiftLL) 16087 v.AuxInt = log2(c / 3) 16088 v.AddArg(a) 16089 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16090 v0.AuxInt = 2 16091 v0.AddArg(x) 16092 v0.AddArg(x) 16093 v.AddArg(v0) 16094 return true 16095 } 16096 // match: (MSUB a x (MOVDconst [c])) 16097 // cond: c%5 == 0 && isPowerOfTwo(c/5) 16098 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 16099 for { 16100 _ = v.Args[2] 16101 a := v.Args[0] 16102 x := v.Args[1] 16103 v_2 := v.Args[2] 16104 if v_2.Op != OpARM64MOVDconst { 16105 break 16106 } 16107 c := v_2.AuxInt 16108 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 16109 break 16110 } 16111 v.reset(OpARM64SUBshiftLL) 16112 v.AuxInt = log2(c / 5) 16113 v.AddArg(a) 16114 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16115 v0.AuxInt = 2 16116 v0.AddArg(x) 16117 v0.AddArg(x) 16118 v.AddArg(v0) 16119 return true 16120 } 16121 // match: (MSUB a x (MOVDconst [c])) 16122 // cond: c%7 == 0 && isPowerOfTwo(c/7) 16123 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 16124 for { 16125 _ = v.Args[2] 16126 a := v.Args[0] 16127 x := v.Args[1] 16128 v_2 := v.Args[2] 16129 if v_2.Op != OpARM64MOVDconst { 16130 break 16131 } 16132 c := v_2.AuxInt 16133 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 16134 break 16135 } 16136 v.reset(OpARM64ADDshiftLL) 16137 v.AuxInt = log2(c / 7) 16138 v.AddArg(a) 16139 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16140 v0.AuxInt = 3 16141 v0.AddArg(x) 16142 v0.AddArg(x) 16143 v.AddArg(v0) 16144 return true 16145 } 16146 // match: (MSUB a x (MOVDconst [c])) 16147 // cond: c%9 == 0 && isPowerOfTwo(c/9) 16148 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 16149 for { 16150 _ = v.Args[2] 16151 a := v.Args[0] 16152 x := v.Args[1] 16153 v_2 := v.Args[2] 16154 if v_2.Op != OpARM64MOVDconst { 16155 break 16156 } 16157 c := v_2.AuxInt 16158 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 16159 break 16160 } 16161 v.reset(OpARM64SUBshiftLL) 16162 v.AuxInt = log2(c / 9) 16163 v.AddArg(a) 16164 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16165 v0.AuxInt = 3 16166 v0.AddArg(x) 16167 v0.AddArg(x) 16168 v.AddArg(v0) 16169 return true 16170 } 16171 return false 16172 } 16173 func rewriteValueARM64_OpARM64MSUB_10(v *Value) bool { 16174 b := v.Block 16175 // match: (MSUB a (MOVDconst [-1]) x) 16176 // result: (ADD a x) 16177 for { 16178 x := v.Args[2] 16179 a := v.Args[0] 16180 v_1 := v.Args[1] 16181 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != -1 { 16182 break 16183 } 16184 v.reset(OpARM64ADD) 16185 v.AddArg(a) 16186 v.AddArg(x) 16187 return true 16188 } 16189 // match: (MSUB a (MOVDconst [0]) _) 16190 // result: a 16191 for { 16192 _ = v.Args[2] 16193 a := v.Args[0] 16194 v_1 := v.Args[1] 16195 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 16196 break 16197 } 16198 v.reset(OpCopy) 16199 v.Type = a.Type 16200 v.AddArg(a) 16201 return true 16202 } 16203 // match: (MSUB a (MOVDconst [1]) x) 16204 // result: (SUB a x) 16205 for { 16206 x := v.Args[2] 16207 a := v.Args[0] 16208 v_1 := v.Args[1] 16209 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 1 { 16210 break 16211 } 16212 v.reset(OpARM64SUB) 16213 v.AddArg(a) 16214 v.AddArg(x) 16215 return true 16216 } 16217 // match: (MSUB a (MOVDconst [c]) x) 16218 // cond: isPowerOfTwo(c) 16219 // result: (SUBshiftLL a x [log2(c)]) 16220 for { 16221 x := v.Args[2] 16222 a := v.Args[0] 16223 v_1 := v.Args[1] 16224 if v_1.Op != OpARM64MOVDconst { 16225 break 16226 } 16227 c := v_1.AuxInt 16228 if !(isPowerOfTwo(c)) { 16229 break 16230 } 16231 v.reset(OpARM64SUBshiftLL) 16232 v.AuxInt = log2(c) 16233 v.AddArg(a) 16234 v.AddArg(x) 16235 return true 16236 } 16237 // match: (MSUB a (MOVDconst [c]) x) 16238 // cond: isPowerOfTwo(c-1) && c>=3 16239 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 16240 for { 16241 x := v.Args[2] 16242 a := v.Args[0] 16243 v_1 := v.Args[1] 16244 if v_1.Op != OpARM64MOVDconst { 16245 break 16246 } 16247 c := v_1.AuxInt 16248 if !(isPowerOfTwo(c-1) && c >= 3) { 16249 break 16250 } 16251 v.reset(OpARM64SUB) 16252 v.AddArg(a) 16253 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16254 v0.AuxInt = log2(c - 1) 16255 v0.AddArg(x) 16256 v0.AddArg(x) 16257 v.AddArg(v0) 16258 return true 16259 } 16260 // match: (MSUB a (MOVDconst [c]) x) 16261 // cond: isPowerOfTwo(c+1) && c>=7 16262 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 16263 for { 16264 x := v.Args[2] 16265 a := v.Args[0] 16266 v_1 := v.Args[1] 16267 if v_1.Op != OpARM64MOVDconst { 16268 break 16269 } 16270 c := v_1.AuxInt 16271 if !(isPowerOfTwo(c+1) && c >= 7) { 16272 break 16273 } 16274 v.reset(OpARM64ADD) 16275 v.AddArg(a) 16276 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16277 v0.AuxInt = log2(c + 1) 16278 v0.AddArg(x) 16279 v0.AddArg(x) 16280 v.AddArg(v0) 16281 return true 16282 } 16283 // match: (MSUB a (MOVDconst [c]) x) 16284 // cond: c%3 == 0 && isPowerOfTwo(c/3) 16285 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 16286 for { 16287 x := v.Args[2] 16288 a := v.Args[0] 16289 v_1 := v.Args[1] 16290 if v_1.Op != OpARM64MOVDconst { 16291 break 16292 } 16293 c := v_1.AuxInt 16294 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 16295 break 16296 } 16297 v.reset(OpARM64ADDshiftLL) 16298 v.AuxInt = log2(c / 3) 16299 v.AddArg(a) 16300 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16301 v0.AuxInt = 2 16302 v0.AddArg(x) 16303 v0.AddArg(x) 16304 v.AddArg(v0) 16305 return true 16306 } 16307 // match: (MSUB a (MOVDconst [c]) x) 16308 // cond: c%5 == 0 && isPowerOfTwo(c/5) 16309 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 16310 for { 16311 x := v.Args[2] 16312 a := v.Args[0] 16313 v_1 := v.Args[1] 16314 if v_1.Op != OpARM64MOVDconst { 16315 break 16316 } 16317 c := v_1.AuxInt 16318 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 16319 break 16320 } 16321 v.reset(OpARM64SUBshiftLL) 16322 v.AuxInt = log2(c / 5) 16323 v.AddArg(a) 16324 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16325 v0.AuxInt = 2 16326 v0.AddArg(x) 16327 v0.AddArg(x) 16328 v.AddArg(v0) 16329 return true 16330 } 16331 // match: (MSUB a (MOVDconst [c]) x) 16332 // cond: c%7 == 0 && isPowerOfTwo(c/7) 16333 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 16334 for { 16335 x := v.Args[2] 16336 a := v.Args[0] 16337 v_1 := v.Args[1] 16338 if v_1.Op != OpARM64MOVDconst { 16339 break 16340 } 16341 c := v_1.AuxInt 16342 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 16343 break 16344 } 16345 v.reset(OpARM64ADDshiftLL) 16346 v.AuxInt = log2(c / 7) 16347 v.AddArg(a) 16348 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16349 v0.AuxInt = 3 16350 v0.AddArg(x) 16351 v0.AddArg(x) 16352 v.AddArg(v0) 16353 return true 16354 } 16355 // match: (MSUB a (MOVDconst [c]) x) 16356 // cond: c%9 == 0 && isPowerOfTwo(c/9) 16357 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 16358 for { 16359 x := v.Args[2] 16360 a := v.Args[0] 16361 v_1 := v.Args[1] 16362 if v_1.Op != OpARM64MOVDconst { 16363 break 16364 } 16365 c := v_1.AuxInt 16366 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 16367 break 16368 } 16369 v.reset(OpARM64SUBshiftLL) 16370 v.AuxInt = log2(c / 9) 16371 v.AddArg(a) 16372 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16373 v0.AuxInt = 3 16374 v0.AddArg(x) 16375 v0.AddArg(x) 16376 v.AddArg(v0) 16377 return true 16378 } 16379 return false 16380 } 16381 func rewriteValueARM64_OpARM64MSUB_20(v *Value) bool { 16382 b := v.Block 16383 // match: (MSUB (MOVDconst [c]) x y) 16384 // result: (ADDconst [c] (MNEG <x.Type> x y)) 16385 for { 16386 y := v.Args[2] 16387 v_0 := v.Args[0] 16388 if v_0.Op != OpARM64MOVDconst { 16389 break 16390 } 16391 c := v_0.AuxInt 16392 x := v.Args[1] 16393 v.reset(OpARM64ADDconst) 16394 v.AuxInt = c 16395 v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type) 16396 v0.AddArg(x) 16397 v0.AddArg(y) 16398 v.AddArg(v0) 16399 return true 16400 } 16401 // match: (MSUB a (MOVDconst [c]) (MOVDconst [d])) 16402 // result: (SUBconst [c*d] a) 16403 for { 16404 _ = v.Args[2] 16405 a := v.Args[0] 16406 v_1 := v.Args[1] 16407 if v_1.Op != OpARM64MOVDconst { 16408 break 16409 } 16410 c := v_1.AuxInt 16411 v_2 := v.Args[2] 16412 if v_2.Op != OpARM64MOVDconst { 16413 break 16414 } 16415 d := v_2.AuxInt 16416 v.reset(OpARM64SUBconst) 16417 v.AuxInt = c * d 16418 v.AddArg(a) 16419 return true 16420 } 16421 return false 16422 } 16423 func rewriteValueARM64_OpARM64MSUBW_0(v *Value) bool { 16424 b := v.Block 16425 // match: (MSUBW a x (MOVDconst [c])) 16426 // cond: int32(c)==-1 16427 // result: (ADD a x) 16428 for { 16429 _ = v.Args[2] 16430 a := v.Args[0] 16431 x := v.Args[1] 16432 v_2 := v.Args[2] 16433 if v_2.Op != OpARM64MOVDconst { 16434 break 16435 } 16436 c := v_2.AuxInt 16437 if !(int32(c) == -1) { 16438 break 16439 } 16440 v.reset(OpARM64ADD) 16441 v.AddArg(a) 16442 v.AddArg(x) 16443 return true 16444 } 16445 // match: (MSUBW a _ (MOVDconst [c])) 16446 // cond: int32(c)==0 16447 // result: a 16448 for { 16449 _ = v.Args[2] 16450 a := v.Args[0] 16451 v_2 := v.Args[2] 16452 if v_2.Op != OpARM64MOVDconst { 16453 break 16454 } 16455 c := v_2.AuxInt 16456 if !(int32(c) == 0) { 16457 break 16458 } 16459 v.reset(OpCopy) 16460 v.Type = a.Type 16461 v.AddArg(a) 16462 return true 16463 } 16464 // match: (MSUBW a x (MOVDconst [c])) 16465 // cond: int32(c)==1 16466 // result: (SUB a x) 16467 for { 16468 _ = v.Args[2] 16469 a := v.Args[0] 16470 x := v.Args[1] 16471 v_2 := v.Args[2] 16472 if v_2.Op != OpARM64MOVDconst { 16473 break 16474 } 16475 c := v_2.AuxInt 16476 if !(int32(c) == 1) { 16477 break 16478 } 16479 v.reset(OpARM64SUB) 16480 v.AddArg(a) 16481 v.AddArg(x) 16482 return true 16483 } 16484 // match: (MSUBW a x (MOVDconst [c])) 16485 // cond: isPowerOfTwo(c) 16486 // result: (SUBshiftLL a x [log2(c)]) 16487 for { 16488 _ = v.Args[2] 16489 a := v.Args[0] 16490 x := v.Args[1] 16491 v_2 := v.Args[2] 16492 if v_2.Op != OpARM64MOVDconst { 16493 break 16494 } 16495 c := v_2.AuxInt 16496 if !(isPowerOfTwo(c)) { 16497 break 16498 } 16499 v.reset(OpARM64SUBshiftLL) 16500 v.AuxInt = log2(c) 16501 v.AddArg(a) 16502 v.AddArg(x) 16503 return true 16504 } 16505 // match: (MSUBW a x (MOVDconst [c])) 16506 // cond: isPowerOfTwo(c-1) && int32(c)>=3 16507 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 16508 for { 16509 _ = v.Args[2] 16510 a := v.Args[0] 16511 x := v.Args[1] 16512 v_2 := v.Args[2] 16513 if v_2.Op != OpARM64MOVDconst { 16514 break 16515 } 16516 c := v_2.AuxInt 16517 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 16518 break 16519 } 16520 v.reset(OpARM64SUB) 16521 v.AddArg(a) 16522 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16523 v0.AuxInt = log2(c - 1) 16524 v0.AddArg(x) 16525 v0.AddArg(x) 16526 v.AddArg(v0) 16527 return true 16528 } 16529 // match: (MSUBW a x (MOVDconst [c])) 16530 // cond: isPowerOfTwo(c+1) && int32(c)>=7 16531 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 16532 for { 16533 _ = v.Args[2] 16534 a := v.Args[0] 16535 x := v.Args[1] 16536 v_2 := v.Args[2] 16537 if v_2.Op != OpARM64MOVDconst { 16538 break 16539 } 16540 c := v_2.AuxInt 16541 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 16542 break 16543 } 16544 v.reset(OpARM64ADD) 16545 v.AddArg(a) 16546 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16547 v0.AuxInt = log2(c + 1) 16548 v0.AddArg(x) 16549 v0.AddArg(x) 16550 v.AddArg(v0) 16551 return true 16552 } 16553 // match: (MSUBW a x (MOVDconst [c])) 16554 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 16555 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 16556 for { 16557 _ = v.Args[2] 16558 a := v.Args[0] 16559 x := v.Args[1] 16560 v_2 := v.Args[2] 16561 if v_2.Op != OpARM64MOVDconst { 16562 break 16563 } 16564 c := v_2.AuxInt 16565 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 16566 break 16567 } 16568 v.reset(OpARM64ADDshiftLL) 16569 v.AuxInt = log2(c / 3) 16570 v.AddArg(a) 16571 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16572 v0.AuxInt = 2 16573 v0.AddArg(x) 16574 v0.AddArg(x) 16575 v.AddArg(v0) 16576 return true 16577 } 16578 // match: (MSUBW a x (MOVDconst [c])) 16579 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 16580 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 16581 for { 16582 _ = v.Args[2] 16583 a := v.Args[0] 16584 x := v.Args[1] 16585 v_2 := v.Args[2] 16586 if v_2.Op != OpARM64MOVDconst { 16587 break 16588 } 16589 c := v_2.AuxInt 16590 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 16591 break 16592 } 16593 v.reset(OpARM64SUBshiftLL) 16594 v.AuxInt = log2(c / 5) 16595 v.AddArg(a) 16596 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16597 v0.AuxInt = 2 16598 v0.AddArg(x) 16599 v0.AddArg(x) 16600 v.AddArg(v0) 16601 return true 16602 } 16603 // match: (MSUBW a x (MOVDconst [c])) 16604 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 16605 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 16606 for { 16607 _ = v.Args[2] 16608 a := v.Args[0] 16609 x := v.Args[1] 16610 v_2 := v.Args[2] 16611 if v_2.Op != OpARM64MOVDconst { 16612 break 16613 } 16614 c := v_2.AuxInt 16615 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 16616 break 16617 } 16618 v.reset(OpARM64ADDshiftLL) 16619 v.AuxInt = log2(c / 7) 16620 v.AddArg(a) 16621 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16622 v0.AuxInt = 3 16623 v0.AddArg(x) 16624 v0.AddArg(x) 16625 v.AddArg(v0) 16626 return true 16627 } 16628 // match: (MSUBW a x (MOVDconst [c])) 16629 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 16630 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 16631 for { 16632 _ = v.Args[2] 16633 a := v.Args[0] 16634 x := v.Args[1] 16635 v_2 := v.Args[2] 16636 if v_2.Op != OpARM64MOVDconst { 16637 break 16638 } 16639 c := v_2.AuxInt 16640 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 16641 break 16642 } 16643 v.reset(OpARM64SUBshiftLL) 16644 v.AuxInt = log2(c / 9) 16645 v.AddArg(a) 16646 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16647 v0.AuxInt = 3 16648 v0.AddArg(x) 16649 v0.AddArg(x) 16650 v.AddArg(v0) 16651 return true 16652 } 16653 return false 16654 } 16655 func rewriteValueARM64_OpARM64MSUBW_10(v *Value) bool { 16656 b := v.Block 16657 // match: (MSUBW a (MOVDconst [c]) x) 16658 // cond: int32(c)==-1 16659 // result: (ADD a x) 16660 for { 16661 x := v.Args[2] 16662 a := v.Args[0] 16663 v_1 := v.Args[1] 16664 if v_1.Op != OpARM64MOVDconst { 16665 break 16666 } 16667 c := v_1.AuxInt 16668 if !(int32(c) == -1) { 16669 break 16670 } 16671 v.reset(OpARM64ADD) 16672 v.AddArg(a) 16673 v.AddArg(x) 16674 return true 16675 } 16676 // match: (MSUBW a (MOVDconst [c]) _) 16677 // cond: int32(c)==0 16678 // result: a 16679 for { 16680 _ = v.Args[2] 16681 a := v.Args[0] 16682 v_1 := v.Args[1] 16683 if v_1.Op != OpARM64MOVDconst { 16684 break 16685 } 16686 c := v_1.AuxInt 16687 if !(int32(c) == 0) { 16688 break 16689 } 16690 v.reset(OpCopy) 16691 v.Type = a.Type 16692 v.AddArg(a) 16693 return true 16694 } 16695 // match: (MSUBW a (MOVDconst [c]) x) 16696 // cond: int32(c)==1 16697 // result: (SUB a x) 16698 for { 16699 x := v.Args[2] 16700 a := v.Args[0] 16701 v_1 := v.Args[1] 16702 if v_1.Op != OpARM64MOVDconst { 16703 break 16704 } 16705 c := v_1.AuxInt 16706 if !(int32(c) == 1) { 16707 break 16708 } 16709 v.reset(OpARM64SUB) 16710 v.AddArg(a) 16711 v.AddArg(x) 16712 return true 16713 } 16714 // match: (MSUBW a (MOVDconst [c]) x) 16715 // cond: isPowerOfTwo(c) 16716 // result: (SUBshiftLL a x [log2(c)]) 16717 for { 16718 x := v.Args[2] 16719 a := v.Args[0] 16720 v_1 := v.Args[1] 16721 if v_1.Op != OpARM64MOVDconst { 16722 break 16723 } 16724 c := v_1.AuxInt 16725 if !(isPowerOfTwo(c)) { 16726 break 16727 } 16728 v.reset(OpARM64SUBshiftLL) 16729 v.AuxInt = log2(c) 16730 v.AddArg(a) 16731 v.AddArg(x) 16732 return true 16733 } 16734 // match: (MSUBW a (MOVDconst [c]) x) 16735 // cond: isPowerOfTwo(c-1) && int32(c)>=3 16736 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 16737 for { 16738 x := v.Args[2] 16739 a := v.Args[0] 16740 v_1 := v.Args[1] 16741 if v_1.Op != OpARM64MOVDconst { 16742 break 16743 } 16744 c := v_1.AuxInt 16745 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 16746 break 16747 } 16748 v.reset(OpARM64SUB) 16749 v.AddArg(a) 16750 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16751 v0.AuxInt = log2(c - 1) 16752 v0.AddArg(x) 16753 v0.AddArg(x) 16754 v.AddArg(v0) 16755 return true 16756 } 16757 // match: (MSUBW a (MOVDconst [c]) x) 16758 // cond: isPowerOfTwo(c+1) && int32(c)>=7 16759 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 16760 for { 16761 x := v.Args[2] 16762 a := v.Args[0] 16763 v_1 := v.Args[1] 16764 if v_1.Op != OpARM64MOVDconst { 16765 break 16766 } 16767 c := v_1.AuxInt 16768 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 16769 break 16770 } 16771 v.reset(OpARM64ADD) 16772 v.AddArg(a) 16773 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16774 v0.AuxInt = log2(c + 1) 16775 v0.AddArg(x) 16776 v0.AddArg(x) 16777 v.AddArg(v0) 16778 return true 16779 } 16780 // match: (MSUBW a (MOVDconst [c]) x) 16781 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 16782 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 16783 for { 16784 x := v.Args[2] 16785 a := v.Args[0] 16786 v_1 := v.Args[1] 16787 if v_1.Op != OpARM64MOVDconst { 16788 break 16789 } 16790 c := v_1.AuxInt 16791 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 16792 break 16793 } 16794 v.reset(OpARM64ADDshiftLL) 16795 v.AuxInt = log2(c / 3) 16796 v.AddArg(a) 16797 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16798 v0.AuxInt = 2 16799 v0.AddArg(x) 16800 v0.AddArg(x) 16801 v.AddArg(v0) 16802 return true 16803 } 16804 // match: (MSUBW a (MOVDconst [c]) x) 16805 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 16806 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 16807 for { 16808 x := v.Args[2] 16809 a := v.Args[0] 16810 v_1 := v.Args[1] 16811 if v_1.Op != OpARM64MOVDconst { 16812 break 16813 } 16814 c := v_1.AuxInt 16815 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 16816 break 16817 } 16818 v.reset(OpARM64SUBshiftLL) 16819 v.AuxInt = log2(c / 5) 16820 v.AddArg(a) 16821 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16822 v0.AuxInt = 2 16823 v0.AddArg(x) 16824 v0.AddArg(x) 16825 v.AddArg(v0) 16826 return true 16827 } 16828 // match: (MSUBW a (MOVDconst [c]) x) 16829 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 16830 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 16831 for { 16832 x := v.Args[2] 16833 a := v.Args[0] 16834 v_1 := v.Args[1] 16835 if v_1.Op != OpARM64MOVDconst { 16836 break 16837 } 16838 c := v_1.AuxInt 16839 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 16840 break 16841 } 16842 v.reset(OpARM64ADDshiftLL) 16843 v.AuxInt = log2(c / 7) 16844 v.AddArg(a) 16845 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 16846 v0.AuxInt = 3 16847 v0.AddArg(x) 16848 v0.AddArg(x) 16849 v.AddArg(v0) 16850 return true 16851 } 16852 // match: (MSUBW a (MOVDconst [c]) x) 16853 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 16854 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 16855 for { 16856 x := v.Args[2] 16857 a := v.Args[0] 16858 v_1 := v.Args[1] 16859 if v_1.Op != OpARM64MOVDconst { 16860 break 16861 } 16862 c := v_1.AuxInt 16863 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 16864 break 16865 } 16866 v.reset(OpARM64SUBshiftLL) 16867 v.AuxInt = log2(c / 9) 16868 v.AddArg(a) 16869 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 16870 v0.AuxInt = 3 16871 v0.AddArg(x) 16872 v0.AddArg(x) 16873 v.AddArg(v0) 16874 return true 16875 } 16876 return false 16877 } 16878 func rewriteValueARM64_OpARM64MSUBW_20(v *Value) bool { 16879 b := v.Block 16880 // match: (MSUBW (MOVDconst [c]) x y) 16881 // result: (ADDconst [c] (MNEGW <x.Type> x y)) 16882 for { 16883 y := v.Args[2] 16884 v_0 := v.Args[0] 16885 if v_0.Op != OpARM64MOVDconst { 16886 break 16887 } 16888 c := v_0.AuxInt 16889 x := v.Args[1] 16890 v.reset(OpARM64ADDconst) 16891 v.AuxInt = c 16892 v0 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type) 16893 v0.AddArg(x) 16894 v0.AddArg(y) 16895 v.AddArg(v0) 16896 return true 16897 } 16898 // match: (MSUBW a (MOVDconst [c]) (MOVDconst [d])) 16899 // result: (SUBconst [int64(int32(c)*int32(d))] a) 16900 for { 16901 _ = v.Args[2] 16902 a := v.Args[0] 16903 v_1 := v.Args[1] 16904 if v_1.Op != OpARM64MOVDconst { 16905 break 16906 } 16907 c := v_1.AuxInt 16908 v_2 := v.Args[2] 16909 if v_2.Op != OpARM64MOVDconst { 16910 break 16911 } 16912 d := v_2.AuxInt 16913 v.reset(OpARM64SUBconst) 16914 v.AuxInt = int64(int32(c) * int32(d)) 16915 v.AddArg(a) 16916 return true 16917 } 16918 return false 16919 } 16920 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 16921 // match: (MUL (NEG x) y) 16922 // result: (MNEG x y) 16923 for { 16924 y := v.Args[1] 16925 v_0 := v.Args[0] 16926 if v_0.Op != OpARM64NEG { 16927 break 16928 } 16929 x := v_0.Args[0] 16930 v.reset(OpARM64MNEG) 16931 v.AddArg(x) 16932 v.AddArg(y) 16933 return true 16934 } 16935 // match: (MUL y (NEG x)) 16936 // result: (MNEG x y) 16937 for { 16938 _ = v.Args[1] 16939 y := v.Args[0] 16940 v_1 := v.Args[1] 16941 if v_1.Op != OpARM64NEG { 16942 break 16943 } 16944 x := v_1.Args[0] 16945 v.reset(OpARM64MNEG) 16946 v.AddArg(x) 16947 v.AddArg(y) 16948 return true 16949 } 16950 // match: (MUL x (MOVDconst [-1])) 16951 // result: (NEG x) 16952 for { 16953 _ = v.Args[1] 16954 x := v.Args[0] 16955 v_1 := v.Args[1] 16956 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != -1 { 16957 break 16958 } 16959 v.reset(OpARM64NEG) 16960 v.AddArg(x) 16961 return true 16962 } 16963 // match: (MUL (MOVDconst [-1]) x) 16964 // result: (NEG x) 16965 for { 16966 x := v.Args[1] 16967 v_0 := v.Args[0] 16968 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != -1 { 16969 break 16970 } 16971 v.reset(OpARM64NEG) 16972 v.AddArg(x) 16973 return true 16974 } 16975 // match: (MUL _ (MOVDconst [0])) 16976 // result: (MOVDconst [0]) 16977 for { 16978 _ = v.Args[1] 16979 v_1 := v.Args[1] 16980 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 16981 break 16982 } 16983 v.reset(OpARM64MOVDconst) 16984 v.AuxInt = 0 16985 return true 16986 } 16987 // match: (MUL (MOVDconst [0]) _) 16988 // result: (MOVDconst [0]) 16989 for { 16990 _ = v.Args[1] 16991 v_0 := v.Args[0] 16992 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != 0 { 16993 break 16994 } 16995 v.reset(OpARM64MOVDconst) 16996 v.AuxInt = 0 16997 return true 16998 } 16999 // match: (MUL x (MOVDconst [1])) 17000 // result: x 17001 for { 17002 _ = v.Args[1] 17003 x := v.Args[0] 17004 v_1 := v.Args[1] 17005 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 1 { 17006 break 17007 } 17008 v.reset(OpCopy) 17009 v.Type = x.Type 17010 v.AddArg(x) 17011 return true 17012 } 17013 // match: (MUL (MOVDconst [1]) x) 17014 // result: x 17015 for { 17016 x := v.Args[1] 17017 v_0 := v.Args[0] 17018 if v_0.Op != OpARM64MOVDconst || v_0.AuxInt != 1 { 17019 break 17020 } 17021 v.reset(OpCopy) 17022 v.Type = x.Type 17023 v.AddArg(x) 17024 return true 17025 } 17026 // match: (MUL x (MOVDconst [c])) 17027 // cond: isPowerOfTwo(c) 17028 // result: (SLLconst [log2(c)] x) 17029 for { 17030 _ = v.Args[1] 17031 x := v.Args[0] 17032 v_1 := v.Args[1] 17033 if v_1.Op != OpARM64MOVDconst { 17034 break 17035 } 17036 c := v_1.AuxInt 17037 if !(isPowerOfTwo(c)) { 17038 break 17039 } 17040 v.reset(OpARM64SLLconst) 17041 v.AuxInt = log2(c) 17042 v.AddArg(x) 17043 return true 17044 } 17045 // match: (MUL (MOVDconst [c]) x) 17046 // cond: isPowerOfTwo(c) 17047 // result: (SLLconst [log2(c)] x) 17048 for { 17049 x := v.Args[1] 17050 v_0 := v.Args[0] 17051 if v_0.Op != OpARM64MOVDconst { 17052 break 17053 } 17054 c := v_0.AuxInt 17055 if !(isPowerOfTwo(c)) { 17056 break 17057 } 17058 v.reset(OpARM64SLLconst) 17059 v.AuxInt = log2(c) 17060 v.AddArg(x) 17061 return true 17062 } 17063 return false 17064 } 17065 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 17066 b := v.Block 17067 // match: (MUL x (MOVDconst [c])) 17068 // cond: isPowerOfTwo(c-1) && c >= 3 17069 // result: (ADDshiftLL x x [log2(c-1)]) 17070 for { 17071 _ = v.Args[1] 17072 x := v.Args[0] 17073 v_1 := v.Args[1] 17074 if v_1.Op != OpARM64MOVDconst { 17075 break 17076 } 17077 c := v_1.AuxInt 17078 if !(isPowerOfTwo(c-1) && c >= 3) { 17079 break 17080 } 17081 v.reset(OpARM64ADDshiftLL) 17082 v.AuxInt = log2(c - 1) 17083 v.AddArg(x) 17084 v.AddArg(x) 17085 return true 17086 } 17087 // match: (MUL (MOVDconst [c]) x) 17088 // cond: isPowerOfTwo(c-1) && c >= 3 17089 // result: (ADDshiftLL x x [log2(c-1)]) 17090 for { 17091 x := v.Args[1] 17092 v_0 := v.Args[0] 17093 if v_0.Op != OpARM64MOVDconst { 17094 break 17095 } 17096 c := v_0.AuxInt 17097 if !(isPowerOfTwo(c-1) && c >= 3) { 17098 break 17099 } 17100 v.reset(OpARM64ADDshiftLL) 17101 v.AuxInt = log2(c - 1) 17102 v.AddArg(x) 17103 v.AddArg(x) 17104 return true 17105 } 17106 // match: (MUL x (MOVDconst [c])) 17107 // cond: isPowerOfTwo(c+1) && c >= 7 17108 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 17109 for { 17110 _ = v.Args[1] 17111 x := v.Args[0] 17112 v_1 := v.Args[1] 17113 if v_1.Op != OpARM64MOVDconst { 17114 break 17115 } 17116 c := v_1.AuxInt 17117 if !(isPowerOfTwo(c+1) && c >= 7) { 17118 break 17119 } 17120 v.reset(OpARM64ADDshiftLL) 17121 v.AuxInt = log2(c + 1) 17122 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17123 v0.AddArg(x) 17124 v.AddArg(v0) 17125 v.AddArg(x) 17126 return true 17127 } 17128 // match: (MUL (MOVDconst [c]) x) 17129 // cond: isPowerOfTwo(c+1) && c >= 7 17130 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 17131 for { 17132 x := v.Args[1] 17133 v_0 := v.Args[0] 17134 if v_0.Op != OpARM64MOVDconst { 17135 break 17136 } 17137 c := v_0.AuxInt 17138 if !(isPowerOfTwo(c+1) && c >= 7) { 17139 break 17140 } 17141 v.reset(OpARM64ADDshiftLL) 17142 v.AuxInt = log2(c + 1) 17143 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17144 v0.AddArg(x) 17145 v.AddArg(v0) 17146 v.AddArg(x) 17147 return true 17148 } 17149 // match: (MUL x (MOVDconst [c])) 17150 // cond: c%3 == 0 && isPowerOfTwo(c/3) 17151 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 17152 for { 17153 _ = v.Args[1] 17154 x := v.Args[0] 17155 v_1 := v.Args[1] 17156 if v_1.Op != OpARM64MOVDconst { 17157 break 17158 } 17159 c := v_1.AuxInt 17160 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 17161 break 17162 } 17163 v.reset(OpARM64SLLconst) 17164 v.AuxInt = log2(c / 3) 17165 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17166 v0.AuxInt = 1 17167 v0.AddArg(x) 17168 v0.AddArg(x) 17169 v.AddArg(v0) 17170 return true 17171 } 17172 // match: (MUL (MOVDconst [c]) x) 17173 // cond: c%3 == 0 && isPowerOfTwo(c/3) 17174 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 17175 for { 17176 x := v.Args[1] 17177 v_0 := v.Args[0] 17178 if v_0.Op != OpARM64MOVDconst { 17179 break 17180 } 17181 c := v_0.AuxInt 17182 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 17183 break 17184 } 17185 v.reset(OpARM64SLLconst) 17186 v.AuxInt = log2(c / 3) 17187 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17188 v0.AuxInt = 1 17189 v0.AddArg(x) 17190 v0.AddArg(x) 17191 v.AddArg(v0) 17192 return true 17193 } 17194 // match: (MUL x (MOVDconst [c])) 17195 // cond: c%5 == 0 && isPowerOfTwo(c/5) 17196 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 17197 for { 17198 _ = v.Args[1] 17199 x := v.Args[0] 17200 v_1 := v.Args[1] 17201 if v_1.Op != OpARM64MOVDconst { 17202 break 17203 } 17204 c := v_1.AuxInt 17205 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 17206 break 17207 } 17208 v.reset(OpARM64SLLconst) 17209 v.AuxInt = log2(c / 5) 17210 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17211 v0.AuxInt = 2 17212 v0.AddArg(x) 17213 v0.AddArg(x) 17214 v.AddArg(v0) 17215 return true 17216 } 17217 // match: (MUL (MOVDconst [c]) x) 17218 // cond: c%5 == 0 && isPowerOfTwo(c/5) 17219 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 17220 for { 17221 x := v.Args[1] 17222 v_0 := v.Args[0] 17223 if v_0.Op != OpARM64MOVDconst { 17224 break 17225 } 17226 c := v_0.AuxInt 17227 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 17228 break 17229 } 17230 v.reset(OpARM64SLLconst) 17231 v.AuxInt = log2(c / 5) 17232 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17233 v0.AuxInt = 2 17234 v0.AddArg(x) 17235 v0.AddArg(x) 17236 v.AddArg(v0) 17237 return true 17238 } 17239 // match: (MUL x (MOVDconst [c])) 17240 // cond: c%7 == 0 && isPowerOfTwo(c/7) 17241 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 17242 for { 17243 _ = v.Args[1] 17244 x := v.Args[0] 17245 v_1 := v.Args[1] 17246 if v_1.Op != OpARM64MOVDconst { 17247 break 17248 } 17249 c := v_1.AuxInt 17250 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 17251 break 17252 } 17253 v.reset(OpARM64SLLconst) 17254 v.AuxInt = log2(c / 7) 17255 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17256 v0.AuxInt = 3 17257 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17258 v1.AddArg(x) 17259 v0.AddArg(v1) 17260 v0.AddArg(x) 17261 v.AddArg(v0) 17262 return true 17263 } 17264 // match: (MUL (MOVDconst [c]) x) 17265 // cond: c%7 == 0 && isPowerOfTwo(c/7) 17266 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 17267 for { 17268 x := v.Args[1] 17269 v_0 := v.Args[0] 17270 if v_0.Op != OpARM64MOVDconst { 17271 break 17272 } 17273 c := v_0.AuxInt 17274 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 17275 break 17276 } 17277 v.reset(OpARM64SLLconst) 17278 v.AuxInt = log2(c / 7) 17279 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17280 v0.AuxInt = 3 17281 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17282 v1.AddArg(x) 17283 v0.AddArg(v1) 17284 v0.AddArg(x) 17285 v.AddArg(v0) 17286 return true 17287 } 17288 return false 17289 } 17290 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 17291 b := v.Block 17292 // match: (MUL x (MOVDconst [c])) 17293 // cond: c%9 == 0 && isPowerOfTwo(c/9) 17294 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 17295 for { 17296 _ = v.Args[1] 17297 x := v.Args[0] 17298 v_1 := v.Args[1] 17299 if v_1.Op != OpARM64MOVDconst { 17300 break 17301 } 17302 c := v_1.AuxInt 17303 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 17304 break 17305 } 17306 v.reset(OpARM64SLLconst) 17307 v.AuxInt = log2(c / 9) 17308 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17309 v0.AuxInt = 3 17310 v0.AddArg(x) 17311 v0.AddArg(x) 17312 v.AddArg(v0) 17313 return true 17314 } 17315 // match: (MUL (MOVDconst [c]) x) 17316 // cond: c%9 == 0 && isPowerOfTwo(c/9) 17317 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 17318 for { 17319 x := v.Args[1] 17320 v_0 := v.Args[0] 17321 if v_0.Op != OpARM64MOVDconst { 17322 break 17323 } 17324 c := v_0.AuxInt 17325 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 17326 break 17327 } 17328 v.reset(OpARM64SLLconst) 17329 v.AuxInt = log2(c / 9) 17330 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17331 v0.AuxInt = 3 17332 v0.AddArg(x) 17333 v0.AddArg(x) 17334 v.AddArg(v0) 17335 return true 17336 } 17337 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 17338 // result: (MOVDconst [c*d]) 17339 for { 17340 _ = v.Args[1] 17341 v_0 := v.Args[0] 17342 if v_0.Op != OpARM64MOVDconst { 17343 break 17344 } 17345 c := v_0.AuxInt 17346 v_1 := v.Args[1] 17347 if v_1.Op != OpARM64MOVDconst { 17348 break 17349 } 17350 d := v_1.AuxInt 17351 v.reset(OpARM64MOVDconst) 17352 v.AuxInt = c * d 17353 return true 17354 } 17355 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 17356 // result: (MOVDconst [c*d]) 17357 for { 17358 _ = v.Args[1] 17359 v_0 := v.Args[0] 17360 if v_0.Op != OpARM64MOVDconst { 17361 break 17362 } 17363 d := v_0.AuxInt 17364 v_1 := v.Args[1] 17365 if v_1.Op != OpARM64MOVDconst { 17366 break 17367 } 17368 c := v_1.AuxInt 17369 v.reset(OpARM64MOVDconst) 17370 v.AuxInt = c * d 17371 return true 17372 } 17373 return false 17374 } 17375 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 17376 // match: (MULW (NEG x) y) 17377 // result: (MNEGW x y) 17378 for { 17379 y := v.Args[1] 17380 v_0 := v.Args[0] 17381 if v_0.Op != OpARM64NEG { 17382 break 17383 } 17384 x := v_0.Args[0] 17385 v.reset(OpARM64MNEGW) 17386 v.AddArg(x) 17387 v.AddArg(y) 17388 return true 17389 } 17390 // match: (MULW y (NEG x)) 17391 // result: (MNEGW x y) 17392 for { 17393 _ = v.Args[1] 17394 y := v.Args[0] 17395 v_1 := v.Args[1] 17396 if v_1.Op != OpARM64NEG { 17397 break 17398 } 17399 x := v_1.Args[0] 17400 v.reset(OpARM64MNEGW) 17401 v.AddArg(x) 17402 v.AddArg(y) 17403 return true 17404 } 17405 // match: (MULW x (MOVDconst [c])) 17406 // cond: int32(c)==-1 17407 // result: (NEG x) 17408 for { 17409 _ = v.Args[1] 17410 x := v.Args[0] 17411 v_1 := v.Args[1] 17412 if v_1.Op != OpARM64MOVDconst { 17413 break 17414 } 17415 c := v_1.AuxInt 17416 if !(int32(c) == -1) { 17417 break 17418 } 17419 v.reset(OpARM64NEG) 17420 v.AddArg(x) 17421 return true 17422 } 17423 // match: (MULW (MOVDconst [c]) x) 17424 // cond: int32(c)==-1 17425 // result: (NEG x) 17426 for { 17427 x := v.Args[1] 17428 v_0 := v.Args[0] 17429 if v_0.Op != OpARM64MOVDconst { 17430 break 17431 } 17432 c := v_0.AuxInt 17433 if !(int32(c) == -1) { 17434 break 17435 } 17436 v.reset(OpARM64NEG) 17437 v.AddArg(x) 17438 return true 17439 } 17440 // match: (MULW _ (MOVDconst [c])) 17441 // cond: int32(c)==0 17442 // result: (MOVDconst [0]) 17443 for { 17444 _ = v.Args[1] 17445 v_1 := v.Args[1] 17446 if v_1.Op != OpARM64MOVDconst { 17447 break 17448 } 17449 c := v_1.AuxInt 17450 if !(int32(c) == 0) { 17451 break 17452 } 17453 v.reset(OpARM64MOVDconst) 17454 v.AuxInt = 0 17455 return true 17456 } 17457 // match: (MULW (MOVDconst [c]) _) 17458 // cond: int32(c)==0 17459 // result: (MOVDconst [0]) 17460 for { 17461 _ = v.Args[1] 17462 v_0 := v.Args[0] 17463 if v_0.Op != OpARM64MOVDconst { 17464 break 17465 } 17466 c := v_0.AuxInt 17467 if !(int32(c) == 0) { 17468 break 17469 } 17470 v.reset(OpARM64MOVDconst) 17471 v.AuxInt = 0 17472 return true 17473 } 17474 // match: (MULW x (MOVDconst [c])) 17475 // cond: int32(c)==1 17476 // result: x 17477 for { 17478 _ = v.Args[1] 17479 x := v.Args[0] 17480 v_1 := v.Args[1] 17481 if v_1.Op != OpARM64MOVDconst { 17482 break 17483 } 17484 c := v_1.AuxInt 17485 if !(int32(c) == 1) { 17486 break 17487 } 17488 v.reset(OpCopy) 17489 v.Type = x.Type 17490 v.AddArg(x) 17491 return true 17492 } 17493 // match: (MULW (MOVDconst [c]) x) 17494 // cond: int32(c)==1 17495 // result: x 17496 for { 17497 x := v.Args[1] 17498 v_0 := v.Args[0] 17499 if v_0.Op != OpARM64MOVDconst { 17500 break 17501 } 17502 c := v_0.AuxInt 17503 if !(int32(c) == 1) { 17504 break 17505 } 17506 v.reset(OpCopy) 17507 v.Type = x.Type 17508 v.AddArg(x) 17509 return true 17510 } 17511 // match: (MULW x (MOVDconst [c])) 17512 // cond: isPowerOfTwo(c) 17513 // result: (SLLconst [log2(c)] x) 17514 for { 17515 _ = v.Args[1] 17516 x := v.Args[0] 17517 v_1 := v.Args[1] 17518 if v_1.Op != OpARM64MOVDconst { 17519 break 17520 } 17521 c := v_1.AuxInt 17522 if !(isPowerOfTwo(c)) { 17523 break 17524 } 17525 v.reset(OpARM64SLLconst) 17526 v.AuxInt = log2(c) 17527 v.AddArg(x) 17528 return true 17529 } 17530 // match: (MULW (MOVDconst [c]) x) 17531 // cond: isPowerOfTwo(c) 17532 // result: (SLLconst [log2(c)] x) 17533 for { 17534 x := v.Args[1] 17535 v_0 := v.Args[0] 17536 if v_0.Op != OpARM64MOVDconst { 17537 break 17538 } 17539 c := v_0.AuxInt 17540 if !(isPowerOfTwo(c)) { 17541 break 17542 } 17543 v.reset(OpARM64SLLconst) 17544 v.AuxInt = log2(c) 17545 v.AddArg(x) 17546 return true 17547 } 17548 return false 17549 } 17550 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 17551 b := v.Block 17552 // match: (MULW x (MOVDconst [c])) 17553 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 17554 // result: (ADDshiftLL x x [log2(c-1)]) 17555 for { 17556 _ = v.Args[1] 17557 x := v.Args[0] 17558 v_1 := v.Args[1] 17559 if v_1.Op != OpARM64MOVDconst { 17560 break 17561 } 17562 c := v_1.AuxInt 17563 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 17564 break 17565 } 17566 v.reset(OpARM64ADDshiftLL) 17567 v.AuxInt = log2(c - 1) 17568 v.AddArg(x) 17569 v.AddArg(x) 17570 return true 17571 } 17572 // match: (MULW (MOVDconst [c]) x) 17573 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 17574 // result: (ADDshiftLL x x [log2(c-1)]) 17575 for { 17576 x := v.Args[1] 17577 v_0 := v.Args[0] 17578 if v_0.Op != OpARM64MOVDconst { 17579 break 17580 } 17581 c := v_0.AuxInt 17582 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 17583 break 17584 } 17585 v.reset(OpARM64ADDshiftLL) 17586 v.AuxInt = log2(c - 1) 17587 v.AddArg(x) 17588 v.AddArg(x) 17589 return true 17590 } 17591 // match: (MULW x (MOVDconst [c])) 17592 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 17593 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 17594 for { 17595 _ = v.Args[1] 17596 x := v.Args[0] 17597 v_1 := v.Args[1] 17598 if v_1.Op != OpARM64MOVDconst { 17599 break 17600 } 17601 c := v_1.AuxInt 17602 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 17603 break 17604 } 17605 v.reset(OpARM64ADDshiftLL) 17606 v.AuxInt = log2(c + 1) 17607 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17608 v0.AddArg(x) 17609 v.AddArg(v0) 17610 v.AddArg(x) 17611 return true 17612 } 17613 // match: (MULW (MOVDconst [c]) x) 17614 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 17615 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 17616 for { 17617 x := v.Args[1] 17618 v_0 := v.Args[0] 17619 if v_0.Op != OpARM64MOVDconst { 17620 break 17621 } 17622 c := v_0.AuxInt 17623 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 17624 break 17625 } 17626 v.reset(OpARM64ADDshiftLL) 17627 v.AuxInt = log2(c + 1) 17628 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17629 v0.AddArg(x) 17630 v.AddArg(v0) 17631 v.AddArg(x) 17632 return true 17633 } 17634 // match: (MULW x (MOVDconst [c])) 17635 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 17636 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 17637 for { 17638 _ = v.Args[1] 17639 x := v.Args[0] 17640 v_1 := v.Args[1] 17641 if v_1.Op != OpARM64MOVDconst { 17642 break 17643 } 17644 c := v_1.AuxInt 17645 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 17646 break 17647 } 17648 v.reset(OpARM64SLLconst) 17649 v.AuxInt = log2(c / 3) 17650 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17651 v0.AuxInt = 1 17652 v0.AddArg(x) 17653 v0.AddArg(x) 17654 v.AddArg(v0) 17655 return true 17656 } 17657 // match: (MULW (MOVDconst [c]) x) 17658 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 17659 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 17660 for { 17661 x := v.Args[1] 17662 v_0 := v.Args[0] 17663 if v_0.Op != OpARM64MOVDconst { 17664 break 17665 } 17666 c := v_0.AuxInt 17667 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 17668 break 17669 } 17670 v.reset(OpARM64SLLconst) 17671 v.AuxInt = log2(c / 3) 17672 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17673 v0.AuxInt = 1 17674 v0.AddArg(x) 17675 v0.AddArg(x) 17676 v.AddArg(v0) 17677 return true 17678 } 17679 // match: (MULW x (MOVDconst [c])) 17680 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 17681 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 17682 for { 17683 _ = v.Args[1] 17684 x := v.Args[0] 17685 v_1 := v.Args[1] 17686 if v_1.Op != OpARM64MOVDconst { 17687 break 17688 } 17689 c := v_1.AuxInt 17690 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 17691 break 17692 } 17693 v.reset(OpARM64SLLconst) 17694 v.AuxInt = log2(c / 5) 17695 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17696 v0.AuxInt = 2 17697 v0.AddArg(x) 17698 v0.AddArg(x) 17699 v.AddArg(v0) 17700 return true 17701 } 17702 // match: (MULW (MOVDconst [c]) x) 17703 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 17704 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 17705 for { 17706 x := v.Args[1] 17707 v_0 := v.Args[0] 17708 if v_0.Op != OpARM64MOVDconst { 17709 break 17710 } 17711 c := v_0.AuxInt 17712 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 17713 break 17714 } 17715 v.reset(OpARM64SLLconst) 17716 v.AuxInt = log2(c / 5) 17717 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17718 v0.AuxInt = 2 17719 v0.AddArg(x) 17720 v0.AddArg(x) 17721 v.AddArg(v0) 17722 return true 17723 } 17724 // match: (MULW x (MOVDconst [c])) 17725 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 17726 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 17727 for { 17728 _ = v.Args[1] 17729 x := v.Args[0] 17730 v_1 := v.Args[1] 17731 if v_1.Op != OpARM64MOVDconst { 17732 break 17733 } 17734 c := v_1.AuxInt 17735 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 17736 break 17737 } 17738 v.reset(OpARM64SLLconst) 17739 v.AuxInt = log2(c / 7) 17740 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17741 v0.AuxInt = 3 17742 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17743 v1.AddArg(x) 17744 v0.AddArg(v1) 17745 v0.AddArg(x) 17746 v.AddArg(v0) 17747 return true 17748 } 17749 // match: (MULW (MOVDconst [c]) x) 17750 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 17751 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 17752 for { 17753 x := v.Args[1] 17754 v_0 := v.Args[0] 17755 if v_0.Op != OpARM64MOVDconst { 17756 break 17757 } 17758 c := v_0.AuxInt 17759 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 17760 break 17761 } 17762 v.reset(OpARM64SLLconst) 17763 v.AuxInt = log2(c / 7) 17764 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17765 v0.AuxInt = 3 17766 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 17767 v1.AddArg(x) 17768 v0.AddArg(v1) 17769 v0.AddArg(x) 17770 v.AddArg(v0) 17771 return true 17772 } 17773 return false 17774 } 17775 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 17776 b := v.Block 17777 // match: (MULW x (MOVDconst [c])) 17778 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 17779 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 17780 for { 17781 _ = v.Args[1] 17782 x := v.Args[0] 17783 v_1 := v.Args[1] 17784 if v_1.Op != OpARM64MOVDconst { 17785 break 17786 } 17787 c := v_1.AuxInt 17788 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 17789 break 17790 } 17791 v.reset(OpARM64SLLconst) 17792 v.AuxInt = log2(c / 9) 17793 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17794 v0.AuxInt = 3 17795 v0.AddArg(x) 17796 v0.AddArg(x) 17797 v.AddArg(v0) 17798 return true 17799 } 17800 // match: (MULW (MOVDconst [c]) x) 17801 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 17802 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 17803 for { 17804 x := v.Args[1] 17805 v_0 := v.Args[0] 17806 if v_0.Op != OpARM64MOVDconst { 17807 break 17808 } 17809 c := v_0.AuxInt 17810 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 17811 break 17812 } 17813 v.reset(OpARM64SLLconst) 17814 v.AuxInt = log2(c / 9) 17815 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 17816 v0.AuxInt = 3 17817 v0.AddArg(x) 17818 v0.AddArg(x) 17819 v.AddArg(v0) 17820 return true 17821 } 17822 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 17823 // result: (MOVDconst [int64(int32(c)*int32(d))]) 17824 for { 17825 _ = v.Args[1] 17826 v_0 := v.Args[0] 17827 if v_0.Op != OpARM64MOVDconst { 17828 break 17829 } 17830 c := v_0.AuxInt 17831 v_1 := v.Args[1] 17832 if v_1.Op != OpARM64MOVDconst { 17833 break 17834 } 17835 d := v_1.AuxInt 17836 v.reset(OpARM64MOVDconst) 17837 v.AuxInt = int64(int32(c) * int32(d)) 17838 return true 17839 } 17840 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 17841 // result: (MOVDconst [int64(int32(c)*int32(d))]) 17842 for { 17843 _ = v.Args[1] 17844 v_0 := v.Args[0] 17845 if v_0.Op != OpARM64MOVDconst { 17846 break 17847 } 17848 d := v_0.AuxInt 17849 v_1 := v.Args[1] 17850 if v_1.Op != OpARM64MOVDconst { 17851 break 17852 } 17853 c := v_1.AuxInt 17854 v.reset(OpARM64MOVDconst) 17855 v.AuxInt = int64(int32(c) * int32(d)) 17856 return true 17857 } 17858 return false 17859 } 17860 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 17861 // match: (MVN (MOVDconst [c])) 17862 // result: (MOVDconst [^c]) 17863 for { 17864 v_0 := v.Args[0] 17865 if v_0.Op != OpARM64MOVDconst { 17866 break 17867 } 17868 c := v_0.AuxInt 17869 v.reset(OpARM64MOVDconst) 17870 v.AuxInt = ^c 17871 return true 17872 } 17873 // match: (MVN x:(SLLconst [c] y)) 17874 // cond: clobberIfDead(x) 17875 // result: (MVNshiftLL [c] y) 17876 for { 17877 x := v.Args[0] 17878 if x.Op != OpARM64SLLconst { 17879 break 17880 } 17881 c := x.AuxInt 17882 y := x.Args[0] 17883 if !(clobberIfDead(x)) { 17884 break 17885 } 17886 v.reset(OpARM64MVNshiftLL) 17887 v.AuxInt = c 17888 v.AddArg(y) 17889 return true 17890 } 17891 // match: (MVN x:(SRLconst [c] y)) 17892 // cond: clobberIfDead(x) 17893 // result: (MVNshiftRL [c] y) 17894 for { 17895 x := v.Args[0] 17896 if x.Op != OpARM64SRLconst { 17897 break 17898 } 17899 c := x.AuxInt 17900 y := x.Args[0] 17901 if !(clobberIfDead(x)) { 17902 break 17903 } 17904 v.reset(OpARM64MVNshiftRL) 17905 v.AuxInt = c 17906 v.AddArg(y) 17907 return true 17908 } 17909 // match: (MVN x:(SRAconst [c] y)) 17910 // cond: clobberIfDead(x) 17911 // result: (MVNshiftRA [c] y) 17912 for { 17913 x := v.Args[0] 17914 if x.Op != OpARM64SRAconst { 17915 break 17916 } 17917 c := x.AuxInt 17918 y := x.Args[0] 17919 if !(clobberIfDead(x)) { 17920 break 17921 } 17922 v.reset(OpARM64MVNshiftRA) 17923 v.AuxInt = c 17924 v.AddArg(y) 17925 return true 17926 } 17927 return false 17928 } 17929 func rewriteValueARM64_OpARM64MVNshiftLL_0(v *Value) bool { 17930 // match: (MVNshiftLL (MOVDconst [c]) [d]) 17931 // result: (MOVDconst [^int64(uint64(c)<<uint64(d))]) 17932 for { 17933 d := v.AuxInt 17934 v_0 := v.Args[0] 17935 if v_0.Op != OpARM64MOVDconst { 17936 break 17937 } 17938 c := v_0.AuxInt 17939 v.reset(OpARM64MOVDconst) 17940 v.AuxInt = ^int64(uint64(c) << uint64(d)) 17941 return true 17942 } 17943 return false 17944 } 17945 func rewriteValueARM64_OpARM64MVNshiftRA_0(v *Value) bool { 17946 // match: (MVNshiftRA (MOVDconst [c]) [d]) 17947 // result: (MOVDconst [^(c>>uint64(d))]) 17948 for { 17949 d := v.AuxInt 17950 v_0 := v.Args[0] 17951 if v_0.Op != OpARM64MOVDconst { 17952 break 17953 } 17954 c := v_0.AuxInt 17955 v.reset(OpARM64MOVDconst) 17956 v.AuxInt = ^(c >> uint64(d)) 17957 return true 17958 } 17959 return false 17960 } 17961 func rewriteValueARM64_OpARM64MVNshiftRL_0(v *Value) bool { 17962 // match: (MVNshiftRL (MOVDconst [c]) [d]) 17963 // result: (MOVDconst [^int64(uint64(c)>>uint64(d))]) 17964 for { 17965 d := v.AuxInt 17966 v_0 := v.Args[0] 17967 if v_0.Op != OpARM64MOVDconst { 17968 break 17969 } 17970 c := v_0.AuxInt 17971 v.reset(OpARM64MOVDconst) 17972 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 17973 return true 17974 } 17975 return false 17976 } 17977 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 17978 // match: (NEG (MUL x y)) 17979 // result: (MNEG x y) 17980 for { 17981 v_0 := v.Args[0] 17982 if v_0.Op != OpARM64MUL { 17983 break 17984 } 17985 y := v_0.Args[1] 17986 x := v_0.Args[0] 17987 v.reset(OpARM64MNEG) 17988 v.AddArg(x) 17989 v.AddArg(y) 17990 return true 17991 } 17992 // match: (NEG (MULW x y)) 17993 // result: (MNEGW x y) 17994 for { 17995 v_0 := v.Args[0] 17996 if v_0.Op != OpARM64MULW { 17997 break 17998 } 17999 y := v_0.Args[1] 18000 x := v_0.Args[0] 18001 v.reset(OpARM64MNEGW) 18002 v.AddArg(x) 18003 v.AddArg(y) 18004 return true 18005 } 18006 // match: (NEG (MOVDconst [c])) 18007 // result: (MOVDconst [-c]) 18008 for { 18009 v_0 := v.Args[0] 18010 if v_0.Op != OpARM64MOVDconst { 18011 break 18012 } 18013 c := v_0.AuxInt 18014 v.reset(OpARM64MOVDconst) 18015 v.AuxInt = -c 18016 return true 18017 } 18018 // match: (NEG x:(SLLconst [c] y)) 18019 // cond: clobberIfDead(x) 18020 // result: (NEGshiftLL [c] y) 18021 for { 18022 x := v.Args[0] 18023 if x.Op != OpARM64SLLconst { 18024 break 18025 } 18026 c := x.AuxInt 18027 y := x.Args[0] 18028 if !(clobberIfDead(x)) { 18029 break 18030 } 18031 v.reset(OpARM64NEGshiftLL) 18032 v.AuxInt = c 18033 v.AddArg(y) 18034 return true 18035 } 18036 // match: (NEG x:(SRLconst [c] y)) 18037 // cond: clobberIfDead(x) 18038 // result: (NEGshiftRL [c] y) 18039 for { 18040 x := v.Args[0] 18041 if x.Op != OpARM64SRLconst { 18042 break 18043 } 18044 c := x.AuxInt 18045 y := x.Args[0] 18046 if !(clobberIfDead(x)) { 18047 break 18048 } 18049 v.reset(OpARM64NEGshiftRL) 18050 v.AuxInt = c 18051 v.AddArg(y) 18052 return true 18053 } 18054 // match: (NEG x:(SRAconst [c] y)) 18055 // cond: clobberIfDead(x) 18056 // result: (NEGshiftRA [c] y) 18057 for { 18058 x := v.Args[0] 18059 if x.Op != OpARM64SRAconst { 18060 break 18061 } 18062 c := x.AuxInt 18063 y := x.Args[0] 18064 if !(clobberIfDead(x)) { 18065 break 18066 } 18067 v.reset(OpARM64NEGshiftRA) 18068 v.AuxInt = c 18069 v.AddArg(y) 18070 return true 18071 } 18072 return false 18073 } 18074 func rewriteValueARM64_OpARM64NEGshiftLL_0(v *Value) bool { 18075 // match: (NEGshiftLL (MOVDconst [c]) [d]) 18076 // result: (MOVDconst [-int64(uint64(c)<<uint64(d))]) 18077 for { 18078 d := v.AuxInt 18079 v_0 := v.Args[0] 18080 if v_0.Op != OpARM64MOVDconst { 18081 break 18082 } 18083 c := v_0.AuxInt 18084 v.reset(OpARM64MOVDconst) 18085 v.AuxInt = -int64(uint64(c) << uint64(d)) 18086 return true 18087 } 18088 return false 18089 } 18090 func rewriteValueARM64_OpARM64NEGshiftRA_0(v *Value) bool { 18091 // match: (NEGshiftRA (MOVDconst [c]) [d]) 18092 // result: (MOVDconst [-(c>>uint64(d))]) 18093 for { 18094 d := v.AuxInt 18095 v_0 := v.Args[0] 18096 if v_0.Op != OpARM64MOVDconst { 18097 break 18098 } 18099 c := v_0.AuxInt 18100 v.reset(OpARM64MOVDconst) 18101 v.AuxInt = -(c >> uint64(d)) 18102 return true 18103 } 18104 return false 18105 } 18106 func rewriteValueARM64_OpARM64NEGshiftRL_0(v *Value) bool { 18107 // match: (NEGshiftRL (MOVDconst [c]) [d]) 18108 // result: (MOVDconst [-int64(uint64(c)>>uint64(d))]) 18109 for { 18110 d := v.AuxInt 18111 v_0 := v.Args[0] 18112 if v_0.Op != OpARM64MOVDconst { 18113 break 18114 } 18115 c := v_0.AuxInt 18116 v.reset(OpARM64MOVDconst) 18117 v.AuxInt = -int64(uint64(c) >> uint64(d)) 18118 return true 18119 } 18120 return false 18121 } 18122 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 18123 // match: (NotEqual (FlagEQ)) 18124 // result: (MOVDconst [0]) 18125 for { 18126 v_0 := v.Args[0] 18127 if v_0.Op != OpARM64FlagEQ { 18128 break 18129 } 18130 v.reset(OpARM64MOVDconst) 18131 v.AuxInt = 0 18132 return true 18133 } 18134 // match: (NotEqual (FlagLT_ULT)) 18135 // result: (MOVDconst [1]) 18136 for { 18137 v_0 := v.Args[0] 18138 if v_0.Op != OpARM64FlagLT_ULT { 18139 break 18140 } 18141 v.reset(OpARM64MOVDconst) 18142 v.AuxInt = 1 18143 return true 18144 } 18145 // match: (NotEqual (FlagLT_UGT)) 18146 // result: (MOVDconst [1]) 18147 for { 18148 v_0 := v.Args[0] 18149 if v_0.Op != OpARM64FlagLT_UGT { 18150 break 18151 } 18152 v.reset(OpARM64MOVDconst) 18153 v.AuxInt = 1 18154 return true 18155 } 18156 // match: (NotEqual (FlagGT_ULT)) 18157 // result: (MOVDconst [1]) 18158 for { 18159 v_0 := v.Args[0] 18160 if v_0.Op != OpARM64FlagGT_ULT { 18161 break 18162 } 18163 v.reset(OpARM64MOVDconst) 18164 v.AuxInt = 1 18165 return true 18166 } 18167 // match: (NotEqual (FlagGT_UGT)) 18168 // result: (MOVDconst [1]) 18169 for { 18170 v_0 := v.Args[0] 18171 if v_0.Op != OpARM64FlagGT_UGT { 18172 break 18173 } 18174 v.reset(OpARM64MOVDconst) 18175 v.AuxInt = 1 18176 return true 18177 } 18178 // match: (NotEqual (InvertFlags x)) 18179 // result: (NotEqual x) 18180 for { 18181 v_0 := v.Args[0] 18182 if v_0.Op != OpARM64InvertFlags { 18183 break 18184 } 18185 x := v_0.Args[0] 18186 v.reset(OpARM64NotEqual) 18187 v.AddArg(x) 18188 return true 18189 } 18190 return false 18191 } 18192 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 18193 // match: (OR x (MOVDconst [c])) 18194 // result: (ORconst [c] x) 18195 for { 18196 _ = v.Args[1] 18197 x := v.Args[0] 18198 v_1 := v.Args[1] 18199 if v_1.Op != OpARM64MOVDconst { 18200 break 18201 } 18202 c := v_1.AuxInt 18203 v.reset(OpARM64ORconst) 18204 v.AuxInt = c 18205 v.AddArg(x) 18206 return true 18207 } 18208 // match: (OR (MOVDconst [c]) x) 18209 // result: (ORconst [c] x) 18210 for { 18211 x := v.Args[1] 18212 v_0 := v.Args[0] 18213 if v_0.Op != OpARM64MOVDconst { 18214 break 18215 } 18216 c := v_0.AuxInt 18217 v.reset(OpARM64ORconst) 18218 v.AuxInt = c 18219 v.AddArg(x) 18220 return true 18221 } 18222 // match: (OR x x) 18223 // result: x 18224 for { 18225 x := v.Args[1] 18226 if x != v.Args[0] { 18227 break 18228 } 18229 v.reset(OpCopy) 18230 v.Type = x.Type 18231 v.AddArg(x) 18232 return true 18233 } 18234 // match: (OR x (MVN y)) 18235 // result: (ORN x y) 18236 for { 18237 _ = v.Args[1] 18238 x := v.Args[0] 18239 v_1 := v.Args[1] 18240 if v_1.Op != OpARM64MVN { 18241 break 18242 } 18243 y := v_1.Args[0] 18244 v.reset(OpARM64ORN) 18245 v.AddArg(x) 18246 v.AddArg(y) 18247 return true 18248 } 18249 // match: (OR (MVN y) x) 18250 // result: (ORN x y) 18251 for { 18252 x := v.Args[1] 18253 v_0 := v.Args[0] 18254 if v_0.Op != OpARM64MVN { 18255 break 18256 } 18257 y := v_0.Args[0] 18258 v.reset(OpARM64ORN) 18259 v.AddArg(x) 18260 v.AddArg(y) 18261 return true 18262 } 18263 // match: (OR x0 x1:(SLLconst [c] y)) 18264 // cond: clobberIfDead(x1) 18265 // result: (ORshiftLL x0 y [c]) 18266 for { 18267 _ = v.Args[1] 18268 x0 := v.Args[0] 18269 x1 := v.Args[1] 18270 if x1.Op != OpARM64SLLconst { 18271 break 18272 } 18273 c := x1.AuxInt 18274 y := x1.Args[0] 18275 if !(clobberIfDead(x1)) { 18276 break 18277 } 18278 v.reset(OpARM64ORshiftLL) 18279 v.AuxInt = c 18280 v.AddArg(x0) 18281 v.AddArg(y) 18282 return true 18283 } 18284 // match: (OR x1:(SLLconst [c] y) x0) 18285 // cond: clobberIfDead(x1) 18286 // result: (ORshiftLL x0 y [c]) 18287 for { 18288 x0 := v.Args[1] 18289 x1 := v.Args[0] 18290 if x1.Op != OpARM64SLLconst { 18291 break 18292 } 18293 c := x1.AuxInt 18294 y := x1.Args[0] 18295 if !(clobberIfDead(x1)) { 18296 break 18297 } 18298 v.reset(OpARM64ORshiftLL) 18299 v.AuxInt = c 18300 v.AddArg(x0) 18301 v.AddArg(y) 18302 return true 18303 } 18304 // match: (OR x0 x1:(SRLconst [c] y)) 18305 // cond: clobberIfDead(x1) 18306 // result: (ORshiftRL x0 y [c]) 18307 for { 18308 _ = v.Args[1] 18309 x0 := v.Args[0] 18310 x1 := v.Args[1] 18311 if x1.Op != OpARM64SRLconst { 18312 break 18313 } 18314 c := x1.AuxInt 18315 y := x1.Args[0] 18316 if !(clobberIfDead(x1)) { 18317 break 18318 } 18319 v.reset(OpARM64ORshiftRL) 18320 v.AuxInt = c 18321 v.AddArg(x0) 18322 v.AddArg(y) 18323 return true 18324 } 18325 // match: (OR x1:(SRLconst [c] y) x0) 18326 // cond: clobberIfDead(x1) 18327 // result: (ORshiftRL x0 y [c]) 18328 for { 18329 x0 := v.Args[1] 18330 x1 := v.Args[0] 18331 if x1.Op != OpARM64SRLconst { 18332 break 18333 } 18334 c := x1.AuxInt 18335 y := x1.Args[0] 18336 if !(clobberIfDead(x1)) { 18337 break 18338 } 18339 v.reset(OpARM64ORshiftRL) 18340 v.AuxInt = c 18341 v.AddArg(x0) 18342 v.AddArg(y) 18343 return true 18344 } 18345 // match: (OR x0 x1:(SRAconst [c] y)) 18346 // cond: clobberIfDead(x1) 18347 // result: (ORshiftRA x0 y [c]) 18348 for { 18349 _ = v.Args[1] 18350 x0 := v.Args[0] 18351 x1 := v.Args[1] 18352 if x1.Op != OpARM64SRAconst { 18353 break 18354 } 18355 c := x1.AuxInt 18356 y := x1.Args[0] 18357 if !(clobberIfDead(x1)) { 18358 break 18359 } 18360 v.reset(OpARM64ORshiftRA) 18361 v.AuxInt = c 18362 v.AddArg(x0) 18363 v.AddArg(y) 18364 return true 18365 } 18366 return false 18367 } 18368 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 18369 b := v.Block 18370 typ := &b.Func.Config.Types 18371 // match: (OR x1:(SRAconst [c] y) x0) 18372 // cond: clobberIfDead(x1) 18373 // result: (ORshiftRA x0 y [c]) 18374 for { 18375 x0 := v.Args[1] 18376 x1 := v.Args[0] 18377 if x1.Op != OpARM64SRAconst { 18378 break 18379 } 18380 c := x1.AuxInt 18381 y := x1.Args[0] 18382 if !(clobberIfDead(x1)) { 18383 break 18384 } 18385 v.reset(OpARM64ORshiftRA) 18386 v.AuxInt = c 18387 v.AddArg(x0) 18388 v.AddArg(y) 18389 return true 18390 } 18391 // match: (OR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 18392 // cond: cc.(Op) == OpARM64LessThanU 18393 // result: (ROR x (NEG <t> y)) 18394 for { 18395 _ = v.Args[1] 18396 v_0 := v.Args[0] 18397 if v_0.Op != OpARM64SLL { 18398 break 18399 } 18400 _ = v_0.Args[1] 18401 x := v_0.Args[0] 18402 v_0_1 := v_0.Args[1] 18403 if v_0_1.Op != OpARM64ANDconst { 18404 break 18405 } 18406 t := v_0_1.Type 18407 if v_0_1.AuxInt != 63 { 18408 break 18409 } 18410 y := v_0_1.Args[0] 18411 v_1 := v.Args[1] 18412 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 { 18413 break 18414 } 18415 cc := v_1.Aux 18416 _ = v_1.Args[1] 18417 v_1_0 := v_1.Args[0] 18418 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 { 18419 break 18420 } 18421 _ = v_1_0.Args[1] 18422 if x != v_1_0.Args[0] { 18423 break 18424 } 18425 v_1_0_1 := v_1_0.Args[1] 18426 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 18427 break 18428 } 18429 _ = v_1_0_1.Args[1] 18430 v_1_0_1_0 := v_1_0_1.Args[0] 18431 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 { 18432 break 18433 } 18434 v_1_0_1_1 := v_1_0_1.Args[1] 18435 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] { 18436 break 18437 } 18438 v_1_1 := v_1.Args[1] 18439 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 18440 break 18441 } 18442 v_1_1_0 := v_1_1.Args[0] 18443 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 18444 break 18445 } 18446 _ = v_1_1_0.Args[1] 18447 v_1_1_0_0 := v_1_1_0.Args[0] 18448 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 { 18449 break 18450 } 18451 v_1_1_0_1 := v_1_1_0.Args[1] 18452 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18453 break 18454 } 18455 v.reset(OpARM64ROR) 18456 v.AddArg(x) 18457 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 18458 v0.AddArg(y) 18459 v.AddArg(v0) 18460 return true 18461 } 18462 // match: (OR (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 18463 // cond: cc.(Op) == OpARM64LessThanU 18464 // result: (ROR x (NEG <t> y)) 18465 for { 18466 _ = v.Args[1] 18467 v_0 := v.Args[0] 18468 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt64 { 18469 break 18470 } 18471 cc := v_0.Aux 18472 _ = v_0.Args[1] 18473 v_0_0 := v_0.Args[0] 18474 if v_0_0.Op != OpARM64SRL || v_0_0.Type != typ.UInt64 { 18475 break 18476 } 18477 _ = v_0_0.Args[1] 18478 x := v_0_0.Args[0] 18479 v_0_0_1 := v_0_0.Args[1] 18480 if v_0_0_1.Op != OpARM64SUB { 18481 break 18482 } 18483 t := v_0_0_1.Type 18484 _ = v_0_0_1.Args[1] 18485 v_0_0_1_0 := v_0_0_1.Args[0] 18486 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 64 { 18487 break 18488 } 18489 v_0_0_1_1 := v_0_0_1.Args[1] 18490 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 63 { 18491 break 18492 } 18493 y := v_0_0_1_1.Args[0] 18494 v_0_1 := v_0.Args[1] 18495 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 18496 break 18497 } 18498 v_0_1_0 := v_0_1.Args[0] 18499 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 18500 break 18501 } 18502 _ = v_0_1_0.Args[1] 18503 v_0_1_0_0 := v_0_1_0.Args[0] 18504 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 64 { 18505 break 18506 } 18507 v_0_1_0_1 := v_0_1_0.Args[1] 18508 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 63 || y != v_0_1_0_1.Args[0] { 18509 break 18510 } 18511 v_1 := v.Args[1] 18512 if v_1.Op != OpARM64SLL { 18513 break 18514 } 18515 _ = v_1.Args[1] 18516 if x != v_1.Args[0] { 18517 break 18518 } 18519 v_1_1 := v_1.Args[1] 18520 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 63 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18521 break 18522 } 18523 v.reset(OpARM64ROR) 18524 v.AddArg(x) 18525 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 18526 v0.AddArg(y) 18527 v.AddArg(v0) 18528 return true 18529 } 18530 // match: (OR (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 18531 // cond: cc.(Op) == OpARM64LessThanU 18532 // result: (ROR x y) 18533 for { 18534 _ = v.Args[1] 18535 v_0 := v.Args[0] 18536 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 { 18537 break 18538 } 18539 _ = v_0.Args[1] 18540 x := v_0.Args[0] 18541 v_0_1 := v_0.Args[1] 18542 if v_0_1.Op != OpARM64ANDconst { 18543 break 18544 } 18545 t := v_0_1.Type 18546 if v_0_1.AuxInt != 63 { 18547 break 18548 } 18549 y := v_0_1.Args[0] 18550 v_1 := v.Args[1] 18551 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 { 18552 break 18553 } 18554 cc := v_1.Aux 18555 _ = v_1.Args[1] 18556 v_1_0 := v_1.Args[0] 18557 if v_1_0.Op != OpARM64SLL { 18558 break 18559 } 18560 _ = v_1_0.Args[1] 18561 if x != v_1_0.Args[0] { 18562 break 18563 } 18564 v_1_0_1 := v_1_0.Args[1] 18565 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 18566 break 18567 } 18568 _ = v_1_0_1.Args[1] 18569 v_1_0_1_0 := v_1_0_1.Args[0] 18570 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 { 18571 break 18572 } 18573 v_1_0_1_1 := v_1_0_1.Args[1] 18574 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] { 18575 break 18576 } 18577 v_1_1 := v_1.Args[1] 18578 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 18579 break 18580 } 18581 v_1_1_0 := v_1_1.Args[0] 18582 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 18583 break 18584 } 18585 _ = v_1_1_0.Args[1] 18586 v_1_1_0_0 := v_1_1_0.Args[0] 18587 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 { 18588 break 18589 } 18590 v_1_1_0_1 := v_1_1_0.Args[1] 18591 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18592 break 18593 } 18594 v.reset(OpARM64ROR) 18595 v.AddArg(x) 18596 v.AddArg(y) 18597 return true 18598 } 18599 // match: (OR (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 18600 // cond: cc.(Op) == OpARM64LessThanU 18601 // result: (ROR x y) 18602 for { 18603 _ = v.Args[1] 18604 v_0 := v.Args[0] 18605 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt64 { 18606 break 18607 } 18608 cc := v_0.Aux 18609 _ = v_0.Args[1] 18610 v_0_0 := v_0.Args[0] 18611 if v_0_0.Op != OpARM64SLL { 18612 break 18613 } 18614 _ = v_0_0.Args[1] 18615 x := v_0_0.Args[0] 18616 v_0_0_1 := v_0_0.Args[1] 18617 if v_0_0_1.Op != OpARM64SUB { 18618 break 18619 } 18620 t := v_0_0_1.Type 18621 _ = v_0_0_1.Args[1] 18622 v_0_0_1_0 := v_0_0_1.Args[0] 18623 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 64 { 18624 break 18625 } 18626 v_0_0_1_1 := v_0_0_1.Args[1] 18627 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 63 { 18628 break 18629 } 18630 y := v_0_0_1_1.Args[0] 18631 v_0_1 := v_0.Args[1] 18632 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 18633 break 18634 } 18635 v_0_1_0 := v_0_1.Args[0] 18636 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 18637 break 18638 } 18639 _ = v_0_1_0.Args[1] 18640 v_0_1_0_0 := v_0_1_0.Args[0] 18641 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 64 { 18642 break 18643 } 18644 v_0_1_0_1 := v_0_1_0.Args[1] 18645 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 63 || y != v_0_1_0_1.Args[0] { 18646 break 18647 } 18648 v_1 := v.Args[1] 18649 if v_1.Op != OpARM64SRL || v_1.Type != typ.UInt64 { 18650 break 18651 } 18652 _ = v_1.Args[1] 18653 if x != v_1.Args[0] { 18654 break 18655 } 18656 v_1_1 := v_1.Args[1] 18657 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 63 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18658 break 18659 } 18660 v.reset(OpARM64ROR) 18661 v.AddArg(x) 18662 v.AddArg(y) 18663 return true 18664 } 18665 // match: (OR (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 18666 // cond: cc.(Op) == OpARM64LessThanU 18667 // result: (RORW x (NEG <t> y)) 18668 for { 18669 _ = v.Args[1] 18670 v_0 := v.Args[0] 18671 if v_0.Op != OpARM64SLL { 18672 break 18673 } 18674 _ = v_0.Args[1] 18675 x := v_0.Args[0] 18676 v_0_1 := v_0.Args[1] 18677 if v_0_1.Op != OpARM64ANDconst { 18678 break 18679 } 18680 t := v_0_1.Type 18681 if v_0_1.AuxInt != 31 { 18682 break 18683 } 18684 y := v_0_1.Args[0] 18685 v_1 := v.Args[1] 18686 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 { 18687 break 18688 } 18689 cc := v_1.Aux 18690 _ = v_1.Args[1] 18691 v_1_0 := v_1.Args[0] 18692 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 { 18693 break 18694 } 18695 _ = v_1_0.Args[1] 18696 v_1_0_0 := v_1_0.Args[0] 18697 if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] { 18698 break 18699 } 18700 v_1_0_1 := v_1_0.Args[1] 18701 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 18702 break 18703 } 18704 _ = v_1_0_1.Args[1] 18705 v_1_0_1_0 := v_1_0_1.Args[0] 18706 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 { 18707 break 18708 } 18709 v_1_0_1_1 := v_1_0_1.Args[1] 18710 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] { 18711 break 18712 } 18713 v_1_1 := v_1.Args[1] 18714 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 18715 break 18716 } 18717 v_1_1_0 := v_1_1.Args[0] 18718 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 18719 break 18720 } 18721 _ = v_1_1_0.Args[1] 18722 v_1_1_0_0 := v_1_1_0.Args[0] 18723 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 { 18724 break 18725 } 18726 v_1_1_0_1 := v_1_1_0.Args[1] 18727 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18728 break 18729 } 18730 v.reset(OpARM64RORW) 18731 v.AddArg(x) 18732 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 18733 v0.AddArg(y) 18734 v.AddArg(v0) 18735 return true 18736 } 18737 // match: (OR (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 18738 // cond: cc.(Op) == OpARM64LessThanU 18739 // result: (RORW x (NEG <t> y)) 18740 for { 18741 _ = v.Args[1] 18742 v_0 := v.Args[0] 18743 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt32 { 18744 break 18745 } 18746 cc := v_0.Aux 18747 _ = v_0.Args[1] 18748 v_0_0 := v_0.Args[0] 18749 if v_0_0.Op != OpARM64SRL || v_0_0.Type != typ.UInt32 { 18750 break 18751 } 18752 _ = v_0_0.Args[1] 18753 v_0_0_0 := v_0_0.Args[0] 18754 if v_0_0_0.Op != OpARM64MOVWUreg { 18755 break 18756 } 18757 x := v_0_0_0.Args[0] 18758 v_0_0_1 := v_0_0.Args[1] 18759 if v_0_0_1.Op != OpARM64SUB { 18760 break 18761 } 18762 t := v_0_0_1.Type 18763 _ = v_0_0_1.Args[1] 18764 v_0_0_1_0 := v_0_0_1.Args[0] 18765 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 32 { 18766 break 18767 } 18768 v_0_0_1_1 := v_0_0_1.Args[1] 18769 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 31 { 18770 break 18771 } 18772 y := v_0_0_1_1.Args[0] 18773 v_0_1 := v_0.Args[1] 18774 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 18775 break 18776 } 18777 v_0_1_0 := v_0_1.Args[0] 18778 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 18779 break 18780 } 18781 _ = v_0_1_0.Args[1] 18782 v_0_1_0_0 := v_0_1_0.Args[0] 18783 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 32 { 18784 break 18785 } 18786 v_0_1_0_1 := v_0_1_0.Args[1] 18787 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 31 || y != v_0_1_0_1.Args[0] { 18788 break 18789 } 18790 v_1 := v.Args[1] 18791 if v_1.Op != OpARM64SLL { 18792 break 18793 } 18794 _ = v_1.Args[1] 18795 if x != v_1.Args[0] { 18796 break 18797 } 18798 v_1_1 := v_1.Args[1] 18799 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 31 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18800 break 18801 } 18802 v.reset(OpARM64RORW) 18803 v.AddArg(x) 18804 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 18805 v0.AddArg(y) 18806 v.AddArg(v0) 18807 return true 18808 } 18809 // match: (OR (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 18810 // cond: cc.(Op) == OpARM64LessThanU 18811 // result: (RORW x y) 18812 for { 18813 _ = v.Args[1] 18814 v_0 := v.Args[0] 18815 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 { 18816 break 18817 } 18818 _ = v_0.Args[1] 18819 v_0_0 := v_0.Args[0] 18820 if v_0_0.Op != OpARM64MOVWUreg { 18821 break 18822 } 18823 x := v_0_0.Args[0] 18824 v_0_1 := v_0.Args[1] 18825 if v_0_1.Op != OpARM64ANDconst { 18826 break 18827 } 18828 t := v_0_1.Type 18829 if v_0_1.AuxInt != 31 { 18830 break 18831 } 18832 y := v_0_1.Args[0] 18833 v_1 := v.Args[1] 18834 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 { 18835 break 18836 } 18837 cc := v_1.Aux 18838 _ = v_1.Args[1] 18839 v_1_0 := v_1.Args[0] 18840 if v_1_0.Op != OpARM64SLL { 18841 break 18842 } 18843 _ = v_1_0.Args[1] 18844 if x != v_1_0.Args[0] { 18845 break 18846 } 18847 v_1_0_1 := v_1_0.Args[1] 18848 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 18849 break 18850 } 18851 _ = v_1_0_1.Args[1] 18852 v_1_0_1_0 := v_1_0_1.Args[0] 18853 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 { 18854 break 18855 } 18856 v_1_0_1_1 := v_1_0_1.Args[1] 18857 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] { 18858 break 18859 } 18860 v_1_1 := v_1.Args[1] 18861 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 18862 break 18863 } 18864 v_1_1_0 := v_1_1.Args[0] 18865 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 18866 break 18867 } 18868 _ = v_1_1_0.Args[1] 18869 v_1_1_0_0 := v_1_1_0.Args[0] 18870 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 { 18871 break 18872 } 18873 v_1_1_0_1 := v_1_1_0.Args[1] 18874 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18875 break 18876 } 18877 v.reset(OpARM64RORW) 18878 v.AddArg(x) 18879 v.AddArg(y) 18880 return true 18881 } 18882 // match: (OR (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 18883 // cond: cc.(Op) == OpARM64LessThanU 18884 // result: (RORW x y) 18885 for { 18886 _ = v.Args[1] 18887 v_0 := v.Args[0] 18888 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt32 { 18889 break 18890 } 18891 cc := v_0.Aux 18892 _ = v_0.Args[1] 18893 v_0_0 := v_0.Args[0] 18894 if v_0_0.Op != OpARM64SLL { 18895 break 18896 } 18897 _ = v_0_0.Args[1] 18898 x := v_0_0.Args[0] 18899 v_0_0_1 := v_0_0.Args[1] 18900 if v_0_0_1.Op != OpARM64SUB { 18901 break 18902 } 18903 t := v_0_0_1.Type 18904 _ = v_0_0_1.Args[1] 18905 v_0_0_1_0 := v_0_0_1.Args[0] 18906 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 32 { 18907 break 18908 } 18909 v_0_0_1_1 := v_0_0_1.Args[1] 18910 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 31 { 18911 break 18912 } 18913 y := v_0_0_1_1.Args[0] 18914 v_0_1 := v_0.Args[1] 18915 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 18916 break 18917 } 18918 v_0_1_0 := v_0_1.Args[0] 18919 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 18920 break 18921 } 18922 _ = v_0_1_0.Args[1] 18923 v_0_1_0_0 := v_0_1_0.Args[0] 18924 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 32 { 18925 break 18926 } 18927 v_0_1_0_1 := v_0_1_0.Args[1] 18928 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 31 || y != v_0_1_0_1.Args[0] { 18929 break 18930 } 18931 v_1 := v.Args[1] 18932 if v_1.Op != OpARM64SRL || v_1.Type != typ.UInt32 { 18933 break 18934 } 18935 _ = v_1.Args[1] 18936 v_1_0 := v_1.Args[0] 18937 if v_1_0.Op != OpARM64MOVWUreg || x != v_1_0.Args[0] { 18938 break 18939 } 18940 v_1_1 := v_1.Args[1] 18941 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 31 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 18942 break 18943 } 18944 v.reset(OpARM64RORW) 18945 v.AddArg(x) 18946 v.AddArg(y) 18947 return true 18948 } 18949 // match: (OR (UBFIZ [bfc] x) (ANDconst [ac] y)) 18950 // cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc))) 18951 // result: (BFI [bfc] y x) 18952 for { 18953 _ = v.Args[1] 18954 v_0 := v.Args[0] 18955 if v_0.Op != OpARM64UBFIZ { 18956 break 18957 } 18958 bfc := v_0.AuxInt 18959 x := v_0.Args[0] 18960 v_1 := v.Args[1] 18961 if v_1.Op != OpARM64ANDconst { 18962 break 18963 } 18964 ac := v_1.AuxInt 18965 y := v_1.Args[0] 18966 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) { 18967 break 18968 } 18969 v.reset(OpARM64BFI) 18970 v.AuxInt = bfc 18971 v.AddArg(y) 18972 v.AddArg(x) 18973 return true 18974 } 18975 return false 18976 } 18977 func rewriteValueARM64_OpARM64OR_20(v *Value) bool { 18978 b := v.Block 18979 // match: (OR (ANDconst [ac] y) (UBFIZ [bfc] x)) 18980 // cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc))) 18981 // result: (BFI [bfc] y x) 18982 for { 18983 _ = v.Args[1] 18984 v_0 := v.Args[0] 18985 if v_0.Op != OpARM64ANDconst { 18986 break 18987 } 18988 ac := v_0.AuxInt 18989 y := v_0.Args[0] 18990 v_1 := v.Args[1] 18991 if v_1.Op != OpARM64UBFIZ { 18992 break 18993 } 18994 bfc := v_1.AuxInt 18995 x := v_1.Args[0] 18996 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) { 18997 break 18998 } 18999 v.reset(OpARM64BFI) 19000 v.AuxInt = bfc 19001 v.AddArg(y) 19002 v.AddArg(x) 19003 return true 19004 } 19005 // match: (OR (UBFX [bfc] x) (ANDconst [ac] y)) 19006 // cond: ac == ^(1<<uint(getARM64BFwidth(bfc))-1) 19007 // result: (BFXIL [bfc] y x) 19008 for { 19009 _ = v.Args[1] 19010 v_0 := v.Args[0] 19011 if v_0.Op != OpARM64UBFX { 19012 break 19013 } 19014 bfc := v_0.AuxInt 19015 x := v_0.Args[0] 19016 v_1 := v.Args[1] 19017 if v_1.Op != OpARM64ANDconst { 19018 break 19019 } 19020 ac := v_1.AuxInt 19021 y := v_1.Args[0] 19022 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) { 19023 break 19024 } 19025 v.reset(OpARM64BFXIL) 19026 v.AuxInt = bfc 19027 v.AddArg(y) 19028 v.AddArg(x) 19029 return true 19030 } 19031 // match: (OR (ANDconst [ac] y) (UBFX [bfc] x)) 19032 // cond: ac == ^(1<<uint(getARM64BFwidth(bfc))-1) 19033 // result: (BFXIL [bfc] y x) 19034 for { 19035 _ = v.Args[1] 19036 v_0 := v.Args[0] 19037 if v_0.Op != OpARM64ANDconst { 19038 break 19039 } 19040 ac := v_0.AuxInt 19041 y := v_0.Args[0] 19042 v_1 := v.Args[1] 19043 if v_1.Op != OpARM64UBFX { 19044 break 19045 } 19046 bfc := v_1.AuxInt 19047 x := v_1.Args[0] 19048 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) { 19049 break 19050 } 19051 v.reset(OpARM64BFXIL) 19052 v.AuxInt = bfc 19053 v.AddArg(y) 19054 v.AddArg(x) 19055 return true 19056 } 19057 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 19058 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 19059 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 19060 for { 19061 t := v.Type 19062 _ = v.Args[1] 19063 o0 := v.Args[0] 19064 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19065 break 19066 } 19067 _ = o0.Args[1] 19068 o1 := o0.Args[0] 19069 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19070 break 19071 } 19072 _ = o1.Args[1] 19073 s0 := o1.Args[0] 19074 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 19075 break 19076 } 19077 y0 := s0.Args[0] 19078 if y0.Op != OpARM64MOVDnop { 19079 break 19080 } 19081 x0 := y0.Args[0] 19082 if x0.Op != OpARM64MOVBUload { 19083 break 19084 } 19085 i3 := x0.AuxInt 19086 s := x0.Aux 19087 mem := x0.Args[1] 19088 p := x0.Args[0] 19089 y1 := o1.Args[1] 19090 if y1.Op != OpARM64MOVDnop { 19091 break 19092 } 19093 x1 := y1.Args[0] 19094 if x1.Op != OpARM64MOVBUload { 19095 break 19096 } 19097 i2 := x1.AuxInt 19098 if x1.Aux != s { 19099 break 19100 } 19101 _ = x1.Args[1] 19102 if p != x1.Args[0] || mem != x1.Args[1] { 19103 break 19104 } 19105 y2 := o0.Args[1] 19106 if y2.Op != OpARM64MOVDnop { 19107 break 19108 } 19109 x2 := y2.Args[0] 19110 if x2.Op != OpARM64MOVBUload { 19111 break 19112 } 19113 i1 := x2.AuxInt 19114 if x2.Aux != s { 19115 break 19116 } 19117 _ = x2.Args[1] 19118 if p != x2.Args[0] || mem != x2.Args[1] { 19119 break 19120 } 19121 y3 := v.Args[1] 19122 if y3.Op != OpARM64MOVDnop { 19123 break 19124 } 19125 x3 := y3.Args[0] 19126 if x3.Op != OpARM64MOVBUload { 19127 break 19128 } 19129 i0 := x3.AuxInt 19130 if x3.Aux != s { 19131 break 19132 } 19133 _ = x3.Args[1] 19134 if p != x3.Args[0] || mem != x3.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 19135 break 19136 } 19137 b = mergePoint(b, x0, x1, x2, x3) 19138 v0 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t) 19139 v.reset(OpCopy) 19140 v.AddArg(v0) 19141 v0.Aux = s 19142 v1 := b.NewValue0(x3.Pos, OpOffPtr, p.Type) 19143 v1.AuxInt = i0 19144 v1.AddArg(p) 19145 v0.AddArg(v1) 19146 v0.AddArg(mem) 19147 return true 19148 } 19149 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 19150 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 19151 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 19152 for { 19153 t := v.Type 19154 _ = v.Args[1] 19155 y3 := v.Args[0] 19156 if y3.Op != OpARM64MOVDnop { 19157 break 19158 } 19159 x3 := y3.Args[0] 19160 if x3.Op != OpARM64MOVBUload { 19161 break 19162 } 19163 i0 := x3.AuxInt 19164 s := x3.Aux 19165 mem := x3.Args[1] 19166 p := x3.Args[0] 19167 o0 := v.Args[1] 19168 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19169 break 19170 } 19171 _ = o0.Args[1] 19172 o1 := o0.Args[0] 19173 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19174 break 19175 } 19176 _ = o1.Args[1] 19177 s0 := o1.Args[0] 19178 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 19179 break 19180 } 19181 y0 := s0.Args[0] 19182 if y0.Op != OpARM64MOVDnop { 19183 break 19184 } 19185 x0 := y0.Args[0] 19186 if x0.Op != OpARM64MOVBUload { 19187 break 19188 } 19189 i3 := x0.AuxInt 19190 if x0.Aux != s { 19191 break 19192 } 19193 _ = x0.Args[1] 19194 if p != x0.Args[0] || mem != x0.Args[1] { 19195 break 19196 } 19197 y1 := o1.Args[1] 19198 if y1.Op != OpARM64MOVDnop { 19199 break 19200 } 19201 x1 := y1.Args[0] 19202 if x1.Op != OpARM64MOVBUload { 19203 break 19204 } 19205 i2 := x1.AuxInt 19206 if x1.Aux != s { 19207 break 19208 } 19209 _ = x1.Args[1] 19210 if p != x1.Args[0] || mem != x1.Args[1] { 19211 break 19212 } 19213 y2 := o0.Args[1] 19214 if y2.Op != OpARM64MOVDnop { 19215 break 19216 } 19217 x2 := y2.Args[0] 19218 if x2.Op != OpARM64MOVBUload { 19219 break 19220 } 19221 i1 := x2.AuxInt 19222 if x2.Aux != s { 19223 break 19224 } 19225 _ = x2.Args[1] 19226 if p != x2.Args[0] || mem != x2.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 19227 break 19228 } 19229 b = mergePoint(b, x0, x1, x2, x3) 19230 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 19231 v.reset(OpCopy) 19232 v.AddArg(v0) 19233 v0.Aux = s 19234 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 19235 v1.AuxInt = i0 19236 v1.AddArg(p) 19237 v0.AddArg(v1) 19238 v0.AddArg(mem) 19239 return true 19240 } 19241 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem))) 19242 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 19243 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem) 19244 for { 19245 t := v.Type 19246 _ = v.Args[1] 19247 o0 := v.Args[0] 19248 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19249 break 19250 } 19251 _ = o0.Args[1] 19252 o1 := o0.Args[0] 19253 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19254 break 19255 } 19256 _ = o1.Args[1] 19257 s0 := o1.Args[0] 19258 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 19259 break 19260 } 19261 y0 := s0.Args[0] 19262 if y0.Op != OpARM64MOVDnop { 19263 break 19264 } 19265 x0 := y0.Args[0] 19266 if x0.Op != OpARM64MOVBUload || x0.AuxInt != 3 { 19267 break 19268 } 19269 s := x0.Aux 19270 mem := x0.Args[1] 19271 p := x0.Args[0] 19272 y1 := o1.Args[1] 19273 if y1.Op != OpARM64MOVDnop { 19274 break 19275 } 19276 x1 := y1.Args[0] 19277 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 2 || x1.Aux != s { 19278 break 19279 } 19280 _ = x1.Args[1] 19281 if p != x1.Args[0] || mem != x1.Args[1] { 19282 break 19283 } 19284 y2 := o0.Args[1] 19285 if y2.Op != OpARM64MOVDnop { 19286 break 19287 } 19288 x2 := y2.Args[0] 19289 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 1 || x2.Aux != s { 19290 break 19291 } 19292 _ = x2.Args[1] 19293 p1 := x2.Args[0] 19294 if p1.Op != OpARM64ADD { 19295 break 19296 } 19297 idx1 := p1.Args[1] 19298 ptr1 := p1.Args[0] 19299 if mem != x2.Args[1] { 19300 break 19301 } 19302 y3 := v.Args[1] 19303 if y3.Op != OpARM64MOVDnop { 19304 break 19305 } 19306 x3 := y3.Args[0] 19307 if x3.Op != OpARM64MOVBUloadidx { 19308 break 19309 } 19310 _ = x3.Args[2] 19311 ptr0 := x3.Args[0] 19312 idx0 := x3.Args[1] 19313 if mem != x3.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 19314 break 19315 } 19316 b = mergePoint(b, x0, x1, x2, x3) 19317 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 19318 v.reset(OpCopy) 19319 v.AddArg(v0) 19320 v0.AddArg(ptr0) 19321 v0.AddArg(idx0) 19322 v0.AddArg(mem) 19323 return true 19324 } 19325 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))) 19326 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 19327 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem) 19328 for { 19329 t := v.Type 19330 _ = v.Args[1] 19331 y3 := v.Args[0] 19332 if y3.Op != OpARM64MOVDnop { 19333 break 19334 } 19335 x3 := y3.Args[0] 19336 if x3.Op != OpARM64MOVBUloadidx { 19337 break 19338 } 19339 mem := x3.Args[2] 19340 ptr0 := x3.Args[0] 19341 idx0 := x3.Args[1] 19342 o0 := v.Args[1] 19343 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19344 break 19345 } 19346 _ = o0.Args[1] 19347 o1 := o0.Args[0] 19348 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19349 break 19350 } 19351 _ = o1.Args[1] 19352 s0 := o1.Args[0] 19353 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 19354 break 19355 } 19356 y0 := s0.Args[0] 19357 if y0.Op != OpARM64MOVDnop { 19358 break 19359 } 19360 x0 := y0.Args[0] 19361 if x0.Op != OpARM64MOVBUload || x0.AuxInt != 3 { 19362 break 19363 } 19364 s := x0.Aux 19365 _ = x0.Args[1] 19366 p := x0.Args[0] 19367 if mem != x0.Args[1] { 19368 break 19369 } 19370 y1 := o1.Args[1] 19371 if y1.Op != OpARM64MOVDnop { 19372 break 19373 } 19374 x1 := y1.Args[0] 19375 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 2 || x1.Aux != s { 19376 break 19377 } 19378 _ = x1.Args[1] 19379 if p != x1.Args[0] || mem != x1.Args[1] { 19380 break 19381 } 19382 y2 := o0.Args[1] 19383 if y2.Op != OpARM64MOVDnop { 19384 break 19385 } 19386 x2 := y2.Args[0] 19387 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 1 || x2.Aux != s { 19388 break 19389 } 19390 _ = x2.Args[1] 19391 p1 := x2.Args[0] 19392 if p1.Op != OpARM64ADD { 19393 break 19394 } 19395 idx1 := p1.Args[1] 19396 ptr1 := p1.Args[0] 19397 if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 19398 break 19399 } 19400 b = mergePoint(b, x0, x1, x2, x3) 19401 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 19402 v.reset(OpCopy) 19403 v.AddArg(v0) 19404 v0.AddArg(ptr0) 19405 v0.AddArg(idx0) 19406 v0.AddArg(mem) 19407 return true 19408 } 19409 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem))) 19410 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 19411 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem) 19412 for { 19413 t := v.Type 19414 _ = v.Args[1] 19415 o0 := v.Args[0] 19416 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19417 break 19418 } 19419 _ = o0.Args[1] 19420 o1 := o0.Args[0] 19421 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19422 break 19423 } 19424 _ = o1.Args[1] 19425 s0 := o1.Args[0] 19426 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 19427 break 19428 } 19429 y0 := s0.Args[0] 19430 if y0.Op != OpARM64MOVDnop { 19431 break 19432 } 19433 x0 := y0.Args[0] 19434 if x0.Op != OpARM64MOVBUloadidx { 19435 break 19436 } 19437 mem := x0.Args[2] 19438 ptr := x0.Args[0] 19439 x0_1 := x0.Args[1] 19440 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 3 { 19441 break 19442 } 19443 idx := x0_1.Args[0] 19444 y1 := o1.Args[1] 19445 if y1.Op != OpARM64MOVDnop { 19446 break 19447 } 19448 x1 := y1.Args[0] 19449 if x1.Op != OpARM64MOVBUloadidx { 19450 break 19451 } 19452 _ = x1.Args[2] 19453 if ptr != x1.Args[0] { 19454 break 19455 } 19456 x1_1 := x1.Args[1] 19457 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] { 19458 break 19459 } 19460 y2 := o0.Args[1] 19461 if y2.Op != OpARM64MOVDnop { 19462 break 19463 } 19464 x2 := y2.Args[0] 19465 if x2.Op != OpARM64MOVBUloadidx { 19466 break 19467 } 19468 _ = x2.Args[2] 19469 if ptr != x2.Args[0] { 19470 break 19471 } 19472 x2_1 := x2.Args[1] 19473 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 1 || idx != x2_1.Args[0] || mem != x2.Args[2] { 19474 break 19475 } 19476 y3 := v.Args[1] 19477 if y3.Op != OpARM64MOVDnop { 19478 break 19479 } 19480 x3 := y3.Args[0] 19481 if x3.Op != OpARM64MOVBUloadidx { 19482 break 19483 } 19484 _ = x3.Args[2] 19485 if ptr != x3.Args[0] || idx != x3.Args[1] || mem != x3.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 19486 break 19487 } 19488 b = mergePoint(b, x0, x1, x2, x3) 19489 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 19490 v.reset(OpCopy) 19491 v.AddArg(v0) 19492 v0.AddArg(ptr) 19493 v0.AddArg(idx) 19494 v0.AddArg(mem) 19495 return true 19496 } 19497 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))) 19498 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 19499 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem) 19500 for { 19501 t := v.Type 19502 _ = v.Args[1] 19503 y3 := v.Args[0] 19504 if y3.Op != OpARM64MOVDnop { 19505 break 19506 } 19507 x3 := y3.Args[0] 19508 if x3.Op != OpARM64MOVBUloadidx { 19509 break 19510 } 19511 mem := x3.Args[2] 19512 ptr := x3.Args[0] 19513 idx := x3.Args[1] 19514 o0 := v.Args[1] 19515 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19516 break 19517 } 19518 _ = o0.Args[1] 19519 o1 := o0.Args[0] 19520 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19521 break 19522 } 19523 _ = o1.Args[1] 19524 s0 := o1.Args[0] 19525 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 19526 break 19527 } 19528 y0 := s0.Args[0] 19529 if y0.Op != OpARM64MOVDnop { 19530 break 19531 } 19532 x0 := y0.Args[0] 19533 if x0.Op != OpARM64MOVBUloadidx { 19534 break 19535 } 19536 _ = x0.Args[2] 19537 if ptr != x0.Args[0] { 19538 break 19539 } 19540 x0_1 := x0.Args[1] 19541 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 3 || idx != x0_1.Args[0] || mem != x0.Args[2] { 19542 break 19543 } 19544 y1 := o1.Args[1] 19545 if y1.Op != OpARM64MOVDnop { 19546 break 19547 } 19548 x1 := y1.Args[0] 19549 if x1.Op != OpARM64MOVBUloadidx { 19550 break 19551 } 19552 _ = x1.Args[2] 19553 if ptr != x1.Args[0] { 19554 break 19555 } 19556 x1_1 := x1.Args[1] 19557 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] { 19558 break 19559 } 19560 y2 := o0.Args[1] 19561 if y2.Op != OpARM64MOVDnop { 19562 break 19563 } 19564 x2 := y2.Args[0] 19565 if x2.Op != OpARM64MOVBUloadidx { 19566 break 19567 } 19568 _ = x2.Args[2] 19569 if ptr != x2.Args[0] { 19570 break 19571 } 19572 x2_1 := x2.Args[1] 19573 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 1 || idx != x2_1.Args[0] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 19574 break 19575 } 19576 b = mergePoint(b, x0, x1, x2, x3) 19577 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 19578 v.reset(OpCopy) 19579 v.AddArg(v0) 19580 v0.AddArg(ptr) 19581 v0.AddArg(idx) 19582 v0.AddArg(mem) 19583 return true 19584 } 19585 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 19586 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 19587 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 19588 for { 19589 t := v.Type 19590 _ = v.Args[1] 19591 o0 := v.Args[0] 19592 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19593 break 19594 } 19595 _ = o0.Args[1] 19596 o1 := o0.Args[0] 19597 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19598 break 19599 } 19600 _ = o1.Args[1] 19601 o2 := o1.Args[0] 19602 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 19603 break 19604 } 19605 _ = o2.Args[1] 19606 o3 := o2.Args[0] 19607 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 19608 break 19609 } 19610 _ = o3.Args[1] 19611 o4 := o3.Args[0] 19612 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 19613 break 19614 } 19615 _ = o4.Args[1] 19616 o5 := o4.Args[0] 19617 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 19618 break 19619 } 19620 _ = o5.Args[1] 19621 s0 := o5.Args[0] 19622 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 19623 break 19624 } 19625 y0 := s0.Args[0] 19626 if y0.Op != OpARM64MOVDnop { 19627 break 19628 } 19629 x0 := y0.Args[0] 19630 if x0.Op != OpARM64MOVBUload { 19631 break 19632 } 19633 i7 := x0.AuxInt 19634 s := x0.Aux 19635 mem := x0.Args[1] 19636 p := x0.Args[0] 19637 y1 := o5.Args[1] 19638 if y1.Op != OpARM64MOVDnop { 19639 break 19640 } 19641 x1 := y1.Args[0] 19642 if x1.Op != OpARM64MOVBUload { 19643 break 19644 } 19645 i6 := x1.AuxInt 19646 if x1.Aux != s { 19647 break 19648 } 19649 _ = x1.Args[1] 19650 if p != x1.Args[0] || mem != x1.Args[1] { 19651 break 19652 } 19653 y2 := o4.Args[1] 19654 if y2.Op != OpARM64MOVDnop { 19655 break 19656 } 19657 x2 := y2.Args[0] 19658 if x2.Op != OpARM64MOVBUload { 19659 break 19660 } 19661 i5 := x2.AuxInt 19662 if x2.Aux != s { 19663 break 19664 } 19665 _ = x2.Args[1] 19666 if p != x2.Args[0] || mem != x2.Args[1] { 19667 break 19668 } 19669 y3 := o3.Args[1] 19670 if y3.Op != OpARM64MOVDnop { 19671 break 19672 } 19673 x3 := y3.Args[0] 19674 if x3.Op != OpARM64MOVBUload { 19675 break 19676 } 19677 i4 := x3.AuxInt 19678 if x3.Aux != s { 19679 break 19680 } 19681 _ = x3.Args[1] 19682 if p != x3.Args[0] || mem != x3.Args[1] { 19683 break 19684 } 19685 y4 := o2.Args[1] 19686 if y4.Op != OpARM64MOVDnop { 19687 break 19688 } 19689 x4 := y4.Args[0] 19690 if x4.Op != OpARM64MOVBUload { 19691 break 19692 } 19693 i3 := x4.AuxInt 19694 if x4.Aux != s { 19695 break 19696 } 19697 _ = x4.Args[1] 19698 if p != x4.Args[0] || mem != x4.Args[1] { 19699 break 19700 } 19701 y5 := o1.Args[1] 19702 if y5.Op != OpARM64MOVDnop { 19703 break 19704 } 19705 x5 := y5.Args[0] 19706 if x5.Op != OpARM64MOVBUload { 19707 break 19708 } 19709 i2 := x5.AuxInt 19710 if x5.Aux != s { 19711 break 19712 } 19713 _ = x5.Args[1] 19714 if p != x5.Args[0] || mem != x5.Args[1] { 19715 break 19716 } 19717 y6 := o0.Args[1] 19718 if y6.Op != OpARM64MOVDnop { 19719 break 19720 } 19721 x6 := y6.Args[0] 19722 if x6.Op != OpARM64MOVBUload { 19723 break 19724 } 19725 i1 := x6.AuxInt 19726 if x6.Aux != s { 19727 break 19728 } 19729 _ = x6.Args[1] 19730 if p != x6.Args[0] || mem != x6.Args[1] { 19731 break 19732 } 19733 y7 := v.Args[1] 19734 if y7.Op != OpARM64MOVDnop { 19735 break 19736 } 19737 x7 := y7.Args[0] 19738 if x7.Op != OpARM64MOVBUload { 19739 break 19740 } 19741 i0 := x7.AuxInt 19742 if x7.Aux != s { 19743 break 19744 } 19745 _ = x7.Args[1] 19746 if p != x7.Args[0] || mem != x7.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 19747 break 19748 } 19749 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 19750 v0 := b.NewValue0(x7.Pos, OpARM64MOVDload, t) 19751 v.reset(OpCopy) 19752 v.AddArg(v0) 19753 v0.Aux = s 19754 v1 := b.NewValue0(x7.Pos, OpOffPtr, p.Type) 19755 v1.AuxInt = i0 19756 v1.AddArg(p) 19757 v0.AddArg(v1) 19758 v0.AddArg(mem) 19759 return true 19760 } 19761 return false 19762 } 19763 func rewriteValueARM64_OpARM64OR_30(v *Value) bool { 19764 b := v.Block 19765 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 19766 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 19767 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 19768 for { 19769 t := v.Type 19770 _ = v.Args[1] 19771 y7 := v.Args[0] 19772 if y7.Op != OpARM64MOVDnop { 19773 break 19774 } 19775 x7 := y7.Args[0] 19776 if x7.Op != OpARM64MOVBUload { 19777 break 19778 } 19779 i0 := x7.AuxInt 19780 s := x7.Aux 19781 mem := x7.Args[1] 19782 p := x7.Args[0] 19783 o0 := v.Args[1] 19784 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19785 break 19786 } 19787 _ = o0.Args[1] 19788 o1 := o0.Args[0] 19789 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19790 break 19791 } 19792 _ = o1.Args[1] 19793 o2 := o1.Args[0] 19794 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 19795 break 19796 } 19797 _ = o2.Args[1] 19798 o3 := o2.Args[0] 19799 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 19800 break 19801 } 19802 _ = o3.Args[1] 19803 o4 := o3.Args[0] 19804 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 19805 break 19806 } 19807 _ = o4.Args[1] 19808 o5 := o4.Args[0] 19809 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 19810 break 19811 } 19812 _ = o5.Args[1] 19813 s0 := o5.Args[0] 19814 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 19815 break 19816 } 19817 y0 := s0.Args[0] 19818 if y0.Op != OpARM64MOVDnop { 19819 break 19820 } 19821 x0 := y0.Args[0] 19822 if x0.Op != OpARM64MOVBUload { 19823 break 19824 } 19825 i7 := x0.AuxInt 19826 if x0.Aux != s { 19827 break 19828 } 19829 _ = x0.Args[1] 19830 if p != x0.Args[0] || mem != x0.Args[1] { 19831 break 19832 } 19833 y1 := o5.Args[1] 19834 if y1.Op != OpARM64MOVDnop { 19835 break 19836 } 19837 x1 := y1.Args[0] 19838 if x1.Op != OpARM64MOVBUload { 19839 break 19840 } 19841 i6 := x1.AuxInt 19842 if x1.Aux != s { 19843 break 19844 } 19845 _ = x1.Args[1] 19846 if p != x1.Args[0] || mem != x1.Args[1] { 19847 break 19848 } 19849 y2 := o4.Args[1] 19850 if y2.Op != OpARM64MOVDnop { 19851 break 19852 } 19853 x2 := y2.Args[0] 19854 if x2.Op != OpARM64MOVBUload { 19855 break 19856 } 19857 i5 := x2.AuxInt 19858 if x2.Aux != s { 19859 break 19860 } 19861 _ = x2.Args[1] 19862 if p != x2.Args[0] || mem != x2.Args[1] { 19863 break 19864 } 19865 y3 := o3.Args[1] 19866 if y3.Op != OpARM64MOVDnop { 19867 break 19868 } 19869 x3 := y3.Args[0] 19870 if x3.Op != OpARM64MOVBUload { 19871 break 19872 } 19873 i4 := x3.AuxInt 19874 if x3.Aux != s { 19875 break 19876 } 19877 _ = x3.Args[1] 19878 if p != x3.Args[0] || mem != x3.Args[1] { 19879 break 19880 } 19881 y4 := o2.Args[1] 19882 if y4.Op != OpARM64MOVDnop { 19883 break 19884 } 19885 x4 := y4.Args[0] 19886 if x4.Op != OpARM64MOVBUload { 19887 break 19888 } 19889 i3 := x4.AuxInt 19890 if x4.Aux != s { 19891 break 19892 } 19893 _ = x4.Args[1] 19894 if p != x4.Args[0] || mem != x4.Args[1] { 19895 break 19896 } 19897 y5 := o1.Args[1] 19898 if y5.Op != OpARM64MOVDnop { 19899 break 19900 } 19901 x5 := y5.Args[0] 19902 if x5.Op != OpARM64MOVBUload { 19903 break 19904 } 19905 i2 := x5.AuxInt 19906 if x5.Aux != s { 19907 break 19908 } 19909 _ = x5.Args[1] 19910 if p != x5.Args[0] || mem != x5.Args[1] { 19911 break 19912 } 19913 y6 := o0.Args[1] 19914 if y6.Op != OpARM64MOVDnop { 19915 break 19916 } 19917 x6 := y6.Args[0] 19918 if x6.Op != OpARM64MOVBUload { 19919 break 19920 } 19921 i1 := x6.AuxInt 19922 if x6.Aux != s { 19923 break 19924 } 19925 _ = x6.Args[1] 19926 if p != x6.Args[0] || mem != x6.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 19927 break 19928 } 19929 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 19930 v0 := b.NewValue0(x6.Pos, OpARM64MOVDload, t) 19931 v.reset(OpCopy) 19932 v.AddArg(v0) 19933 v0.Aux = s 19934 v1 := b.NewValue0(x6.Pos, OpOffPtr, p.Type) 19935 v1.AuxInt = i0 19936 v1.AddArg(p) 19937 v0.AddArg(v1) 19938 v0.AddArg(mem) 19939 return true 19940 } 19941 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem))) 19942 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 19943 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem) 19944 for { 19945 t := v.Type 19946 _ = v.Args[1] 19947 o0 := v.Args[0] 19948 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 19949 break 19950 } 19951 _ = o0.Args[1] 19952 o1 := o0.Args[0] 19953 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 19954 break 19955 } 19956 _ = o1.Args[1] 19957 o2 := o1.Args[0] 19958 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 19959 break 19960 } 19961 _ = o2.Args[1] 19962 o3 := o2.Args[0] 19963 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 19964 break 19965 } 19966 _ = o3.Args[1] 19967 o4 := o3.Args[0] 19968 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 19969 break 19970 } 19971 _ = o4.Args[1] 19972 o5 := o4.Args[0] 19973 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 19974 break 19975 } 19976 _ = o5.Args[1] 19977 s0 := o5.Args[0] 19978 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 19979 break 19980 } 19981 y0 := s0.Args[0] 19982 if y0.Op != OpARM64MOVDnop { 19983 break 19984 } 19985 x0 := y0.Args[0] 19986 if x0.Op != OpARM64MOVBUload || x0.AuxInt != 7 { 19987 break 19988 } 19989 s := x0.Aux 19990 mem := x0.Args[1] 19991 p := x0.Args[0] 19992 y1 := o5.Args[1] 19993 if y1.Op != OpARM64MOVDnop { 19994 break 19995 } 19996 x1 := y1.Args[0] 19997 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 6 || x1.Aux != s { 19998 break 19999 } 20000 _ = x1.Args[1] 20001 if p != x1.Args[0] || mem != x1.Args[1] { 20002 break 20003 } 20004 y2 := o4.Args[1] 20005 if y2.Op != OpARM64MOVDnop { 20006 break 20007 } 20008 x2 := y2.Args[0] 20009 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 5 || x2.Aux != s { 20010 break 20011 } 20012 _ = x2.Args[1] 20013 if p != x2.Args[0] || mem != x2.Args[1] { 20014 break 20015 } 20016 y3 := o3.Args[1] 20017 if y3.Op != OpARM64MOVDnop { 20018 break 20019 } 20020 x3 := y3.Args[0] 20021 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 4 || x3.Aux != s { 20022 break 20023 } 20024 _ = x3.Args[1] 20025 if p != x3.Args[0] || mem != x3.Args[1] { 20026 break 20027 } 20028 y4 := o2.Args[1] 20029 if y4.Op != OpARM64MOVDnop { 20030 break 20031 } 20032 x4 := y4.Args[0] 20033 if x4.Op != OpARM64MOVBUload || x4.AuxInt != 3 || x4.Aux != s { 20034 break 20035 } 20036 _ = x4.Args[1] 20037 if p != x4.Args[0] || mem != x4.Args[1] { 20038 break 20039 } 20040 y5 := o1.Args[1] 20041 if y5.Op != OpARM64MOVDnop { 20042 break 20043 } 20044 x5 := y5.Args[0] 20045 if x5.Op != OpARM64MOVBUload || x5.AuxInt != 2 || x5.Aux != s { 20046 break 20047 } 20048 _ = x5.Args[1] 20049 if p != x5.Args[0] || mem != x5.Args[1] { 20050 break 20051 } 20052 y6 := o0.Args[1] 20053 if y6.Op != OpARM64MOVDnop { 20054 break 20055 } 20056 x6 := y6.Args[0] 20057 if x6.Op != OpARM64MOVBUload || x6.AuxInt != 1 || x6.Aux != s { 20058 break 20059 } 20060 _ = x6.Args[1] 20061 p1 := x6.Args[0] 20062 if p1.Op != OpARM64ADD { 20063 break 20064 } 20065 idx1 := p1.Args[1] 20066 ptr1 := p1.Args[0] 20067 if mem != x6.Args[1] { 20068 break 20069 } 20070 y7 := v.Args[1] 20071 if y7.Op != OpARM64MOVDnop { 20072 break 20073 } 20074 x7 := y7.Args[0] 20075 if x7.Op != OpARM64MOVBUloadidx { 20076 break 20077 } 20078 _ = x7.Args[2] 20079 ptr0 := x7.Args[0] 20080 idx0 := x7.Args[1] 20081 if mem != x7.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 20082 break 20083 } 20084 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 20085 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t) 20086 v.reset(OpCopy) 20087 v.AddArg(v0) 20088 v0.AddArg(ptr0) 20089 v0.AddArg(idx0) 20090 v0.AddArg(mem) 20091 return true 20092 } 20093 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))) 20094 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 20095 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem) 20096 for { 20097 t := v.Type 20098 _ = v.Args[1] 20099 y7 := v.Args[0] 20100 if y7.Op != OpARM64MOVDnop { 20101 break 20102 } 20103 x7 := y7.Args[0] 20104 if x7.Op != OpARM64MOVBUloadidx { 20105 break 20106 } 20107 mem := x7.Args[2] 20108 ptr0 := x7.Args[0] 20109 idx0 := x7.Args[1] 20110 o0 := v.Args[1] 20111 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20112 break 20113 } 20114 _ = o0.Args[1] 20115 o1 := o0.Args[0] 20116 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20117 break 20118 } 20119 _ = o1.Args[1] 20120 o2 := o1.Args[0] 20121 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 20122 break 20123 } 20124 _ = o2.Args[1] 20125 o3 := o2.Args[0] 20126 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 20127 break 20128 } 20129 _ = o3.Args[1] 20130 o4 := o3.Args[0] 20131 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 20132 break 20133 } 20134 _ = o4.Args[1] 20135 o5 := o4.Args[0] 20136 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 20137 break 20138 } 20139 _ = o5.Args[1] 20140 s0 := o5.Args[0] 20141 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 20142 break 20143 } 20144 y0 := s0.Args[0] 20145 if y0.Op != OpARM64MOVDnop { 20146 break 20147 } 20148 x0 := y0.Args[0] 20149 if x0.Op != OpARM64MOVBUload || x0.AuxInt != 7 { 20150 break 20151 } 20152 s := x0.Aux 20153 _ = x0.Args[1] 20154 p := x0.Args[0] 20155 if mem != x0.Args[1] { 20156 break 20157 } 20158 y1 := o5.Args[1] 20159 if y1.Op != OpARM64MOVDnop { 20160 break 20161 } 20162 x1 := y1.Args[0] 20163 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 6 || x1.Aux != s { 20164 break 20165 } 20166 _ = x1.Args[1] 20167 if p != x1.Args[0] || mem != x1.Args[1] { 20168 break 20169 } 20170 y2 := o4.Args[1] 20171 if y2.Op != OpARM64MOVDnop { 20172 break 20173 } 20174 x2 := y2.Args[0] 20175 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 5 || x2.Aux != s { 20176 break 20177 } 20178 _ = x2.Args[1] 20179 if p != x2.Args[0] || mem != x2.Args[1] { 20180 break 20181 } 20182 y3 := o3.Args[1] 20183 if y3.Op != OpARM64MOVDnop { 20184 break 20185 } 20186 x3 := y3.Args[0] 20187 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 4 || x3.Aux != s { 20188 break 20189 } 20190 _ = x3.Args[1] 20191 if p != x3.Args[0] || mem != x3.Args[1] { 20192 break 20193 } 20194 y4 := o2.Args[1] 20195 if y4.Op != OpARM64MOVDnop { 20196 break 20197 } 20198 x4 := y4.Args[0] 20199 if x4.Op != OpARM64MOVBUload || x4.AuxInt != 3 || x4.Aux != s { 20200 break 20201 } 20202 _ = x4.Args[1] 20203 if p != x4.Args[0] || mem != x4.Args[1] { 20204 break 20205 } 20206 y5 := o1.Args[1] 20207 if y5.Op != OpARM64MOVDnop { 20208 break 20209 } 20210 x5 := y5.Args[0] 20211 if x5.Op != OpARM64MOVBUload || x5.AuxInt != 2 || x5.Aux != s { 20212 break 20213 } 20214 _ = x5.Args[1] 20215 if p != x5.Args[0] || mem != x5.Args[1] { 20216 break 20217 } 20218 y6 := o0.Args[1] 20219 if y6.Op != OpARM64MOVDnop { 20220 break 20221 } 20222 x6 := y6.Args[0] 20223 if x6.Op != OpARM64MOVBUload || x6.AuxInt != 1 || x6.Aux != s { 20224 break 20225 } 20226 _ = x6.Args[1] 20227 p1 := x6.Args[0] 20228 if p1.Op != OpARM64ADD { 20229 break 20230 } 20231 idx1 := p1.Args[1] 20232 ptr1 := p1.Args[0] 20233 if mem != x6.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 20234 break 20235 } 20236 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 20237 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t) 20238 v.reset(OpCopy) 20239 v.AddArg(v0) 20240 v0.AddArg(ptr0) 20241 v0.AddArg(idx0) 20242 v0.AddArg(mem) 20243 return true 20244 } 20245 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem))) 20246 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 20247 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem) 20248 for { 20249 t := v.Type 20250 _ = v.Args[1] 20251 o0 := v.Args[0] 20252 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20253 break 20254 } 20255 _ = o0.Args[1] 20256 o1 := o0.Args[0] 20257 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20258 break 20259 } 20260 _ = o1.Args[1] 20261 o2 := o1.Args[0] 20262 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 20263 break 20264 } 20265 _ = o2.Args[1] 20266 o3 := o2.Args[0] 20267 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 20268 break 20269 } 20270 _ = o3.Args[1] 20271 o4 := o3.Args[0] 20272 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 20273 break 20274 } 20275 _ = o4.Args[1] 20276 o5 := o4.Args[0] 20277 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 20278 break 20279 } 20280 _ = o5.Args[1] 20281 s0 := o5.Args[0] 20282 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 20283 break 20284 } 20285 y0 := s0.Args[0] 20286 if y0.Op != OpARM64MOVDnop { 20287 break 20288 } 20289 x0 := y0.Args[0] 20290 if x0.Op != OpARM64MOVBUloadidx { 20291 break 20292 } 20293 mem := x0.Args[2] 20294 ptr := x0.Args[0] 20295 x0_1 := x0.Args[1] 20296 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 7 { 20297 break 20298 } 20299 idx := x0_1.Args[0] 20300 y1 := o5.Args[1] 20301 if y1.Op != OpARM64MOVDnop { 20302 break 20303 } 20304 x1 := y1.Args[0] 20305 if x1.Op != OpARM64MOVBUloadidx { 20306 break 20307 } 20308 _ = x1.Args[2] 20309 if ptr != x1.Args[0] { 20310 break 20311 } 20312 x1_1 := x1.Args[1] 20313 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 6 || idx != x1_1.Args[0] || mem != x1.Args[2] { 20314 break 20315 } 20316 y2 := o4.Args[1] 20317 if y2.Op != OpARM64MOVDnop { 20318 break 20319 } 20320 x2 := y2.Args[0] 20321 if x2.Op != OpARM64MOVBUloadidx { 20322 break 20323 } 20324 _ = x2.Args[2] 20325 if ptr != x2.Args[0] { 20326 break 20327 } 20328 x2_1 := x2.Args[1] 20329 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] { 20330 break 20331 } 20332 y3 := o3.Args[1] 20333 if y3.Op != OpARM64MOVDnop { 20334 break 20335 } 20336 x3 := y3.Args[0] 20337 if x3.Op != OpARM64MOVBUloadidx { 20338 break 20339 } 20340 _ = x3.Args[2] 20341 if ptr != x3.Args[0] { 20342 break 20343 } 20344 x3_1 := x3.Args[1] 20345 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 4 || idx != x3_1.Args[0] || mem != x3.Args[2] { 20346 break 20347 } 20348 y4 := o2.Args[1] 20349 if y4.Op != OpARM64MOVDnop { 20350 break 20351 } 20352 x4 := y4.Args[0] 20353 if x4.Op != OpARM64MOVBUloadidx { 20354 break 20355 } 20356 _ = x4.Args[2] 20357 if ptr != x4.Args[0] { 20358 break 20359 } 20360 x4_1 := x4.Args[1] 20361 if x4_1.Op != OpARM64ADDconst || x4_1.AuxInt != 3 || idx != x4_1.Args[0] || mem != x4.Args[2] { 20362 break 20363 } 20364 y5 := o1.Args[1] 20365 if y5.Op != OpARM64MOVDnop { 20366 break 20367 } 20368 x5 := y5.Args[0] 20369 if x5.Op != OpARM64MOVBUloadidx { 20370 break 20371 } 20372 _ = x5.Args[2] 20373 if ptr != x5.Args[0] { 20374 break 20375 } 20376 x5_1 := x5.Args[1] 20377 if x5_1.Op != OpARM64ADDconst || x5_1.AuxInt != 2 || idx != x5_1.Args[0] || mem != x5.Args[2] { 20378 break 20379 } 20380 y6 := o0.Args[1] 20381 if y6.Op != OpARM64MOVDnop { 20382 break 20383 } 20384 x6 := y6.Args[0] 20385 if x6.Op != OpARM64MOVBUloadidx { 20386 break 20387 } 20388 _ = x6.Args[2] 20389 if ptr != x6.Args[0] { 20390 break 20391 } 20392 x6_1 := x6.Args[1] 20393 if x6_1.Op != OpARM64ADDconst || x6_1.AuxInt != 1 || idx != x6_1.Args[0] || mem != x6.Args[2] { 20394 break 20395 } 20396 y7 := v.Args[1] 20397 if y7.Op != OpARM64MOVDnop { 20398 break 20399 } 20400 x7 := y7.Args[0] 20401 if x7.Op != OpARM64MOVBUloadidx { 20402 break 20403 } 20404 _ = x7.Args[2] 20405 if ptr != x7.Args[0] || idx != x7.Args[1] || mem != x7.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 20406 break 20407 } 20408 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 20409 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 20410 v.reset(OpCopy) 20411 v.AddArg(v0) 20412 v0.AddArg(ptr) 20413 v0.AddArg(idx) 20414 v0.AddArg(mem) 20415 return true 20416 } 20417 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))) 20418 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 20419 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem) 20420 for { 20421 t := v.Type 20422 _ = v.Args[1] 20423 y7 := v.Args[0] 20424 if y7.Op != OpARM64MOVDnop { 20425 break 20426 } 20427 x7 := y7.Args[0] 20428 if x7.Op != OpARM64MOVBUloadidx { 20429 break 20430 } 20431 mem := x7.Args[2] 20432 ptr := x7.Args[0] 20433 idx := x7.Args[1] 20434 o0 := v.Args[1] 20435 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20436 break 20437 } 20438 _ = o0.Args[1] 20439 o1 := o0.Args[0] 20440 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20441 break 20442 } 20443 _ = o1.Args[1] 20444 o2 := o1.Args[0] 20445 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 20446 break 20447 } 20448 _ = o2.Args[1] 20449 o3 := o2.Args[0] 20450 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 20451 break 20452 } 20453 _ = o3.Args[1] 20454 o4 := o3.Args[0] 20455 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 20456 break 20457 } 20458 _ = o4.Args[1] 20459 o5 := o4.Args[0] 20460 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 20461 break 20462 } 20463 _ = o5.Args[1] 20464 s0 := o5.Args[0] 20465 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 20466 break 20467 } 20468 y0 := s0.Args[0] 20469 if y0.Op != OpARM64MOVDnop { 20470 break 20471 } 20472 x0 := y0.Args[0] 20473 if x0.Op != OpARM64MOVBUloadidx { 20474 break 20475 } 20476 _ = x0.Args[2] 20477 if ptr != x0.Args[0] { 20478 break 20479 } 20480 x0_1 := x0.Args[1] 20481 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 7 || idx != x0_1.Args[0] || mem != x0.Args[2] { 20482 break 20483 } 20484 y1 := o5.Args[1] 20485 if y1.Op != OpARM64MOVDnop { 20486 break 20487 } 20488 x1 := y1.Args[0] 20489 if x1.Op != OpARM64MOVBUloadidx { 20490 break 20491 } 20492 _ = x1.Args[2] 20493 if ptr != x1.Args[0] { 20494 break 20495 } 20496 x1_1 := x1.Args[1] 20497 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 6 || idx != x1_1.Args[0] || mem != x1.Args[2] { 20498 break 20499 } 20500 y2 := o4.Args[1] 20501 if y2.Op != OpARM64MOVDnop { 20502 break 20503 } 20504 x2 := y2.Args[0] 20505 if x2.Op != OpARM64MOVBUloadidx { 20506 break 20507 } 20508 _ = x2.Args[2] 20509 if ptr != x2.Args[0] { 20510 break 20511 } 20512 x2_1 := x2.Args[1] 20513 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] { 20514 break 20515 } 20516 y3 := o3.Args[1] 20517 if y3.Op != OpARM64MOVDnop { 20518 break 20519 } 20520 x3 := y3.Args[0] 20521 if x3.Op != OpARM64MOVBUloadidx { 20522 break 20523 } 20524 _ = x3.Args[2] 20525 if ptr != x3.Args[0] { 20526 break 20527 } 20528 x3_1 := x3.Args[1] 20529 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 4 || idx != x3_1.Args[0] || mem != x3.Args[2] { 20530 break 20531 } 20532 y4 := o2.Args[1] 20533 if y4.Op != OpARM64MOVDnop { 20534 break 20535 } 20536 x4 := y4.Args[0] 20537 if x4.Op != OpARM64MOVBUloadidx { 20538 break 20539 } 20540 _ = x4.Args[2] 20541 if ptr != x4.Args[0] { 20542 break 20543 } 20544 x4_1 := x4.Args[1] 20545 if x4_1.Op != OpARM64ADDconst || x4_1.AuxInt != 3 || idx != x4_1.Args[0] || mem != x4.Args[2] { 20546 break 20547 } 20548 y5 := o1.Args[1] 20549 if y5.Op != OpARM64MOVDnop { 20550 break 20551 } 20552 x5 := y5.Args[0] 20553 if x5.Op != OpARM64MOVBUloadidx { 20554 break 20555 } 20556 _ = x5.Args[2] 20557 if ptr != x5.Args[0] { 20558 break 20559 } 20560 x5_1 := x5.Args[1] 20561 if x5_1.Op != OpARM64ADDconst || x5_1.AuxInt != 2 || idx != x5_1.Args[0] || mem != x5.Args[2] { 20562 break 20563 } 20564 y6 := o0.Args[1] 20565 if y6.Op != OpARM64MOVDnop { 20566 break 20567 } 20568 x6 := y6.Args[0] 20569 if x6.Op != OpARM64MOVBUloadidx { 20570 break 20571 } 20572 _ = x6.Args[2] 20573 if ptr != x6.Args[0] { 20574 break 20575 } 20576 x6_1 := x6.Args[1] 20577 if x6_1.Op != OpARM64ADDconst || x6_1.AuxInt != 1 || idx != x6_1.Args[0] || mem != x6.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 20578 break 20579 } 20580 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 20581 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 20582 v.reset(OpCopy) 20583 v.AddArg(v0) 20584 v0.AddArg(ptr) 20585 v0.AddArg(idx) 20586 v0.AddArg(mem) 20587 return true 20588 } 20589 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 20590 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 20591 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 20592 for { 20593 t := v.Type 20594 _ = v.Args[1] 20595 o0 := v.Args[0] 20596 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20597 break 20598 } 20599 _ = o0.Args[1] 20600 o1 := o0.Args[0] 20601 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20602 break 20603 } 20604 _ = o1.Args[1] 20605 s0 := o1.Args[0] 20606 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 20607 break 20608 } 20609 y0 := s0.Args[0] 20610 if y0.Op != OpARM64MOVDnop { 20611 break 20612 } 20613 x0 := y0.Args[0] 20614 if x0.Op != OpARM64MOVBUload { 20615 break 20616 } 20617 i0 := x0.AuxInt 20618 s := x0.Aux 20619 mem := x0.Args[1] 20620 p := x0.Args[0] 20621 y1 := o1.Args[1] 20622 if y1.Op != OpARM64MOVDnop { 20623 break 20624 } 20625 x1 := y1.Args[0] 20626 if x1.Op != OpARM64MOVBUload { 20627 break 20628 } 20629 i1 := x1.AuxInt 20630 if x1.Aux != s { 20631 break 20632 } 20633 _ = x1.Args[1] 20634 if p != x1.Args[0] || mem != x1.Args[1] { 20635 break 20636 } 20637 y2 := o0.Args[1] 20638 if y2.Op != OpARM64MOVDnop { 20639 break 20640 } 20641 x2 := y2.Args[0] 20642 if x2.Op != OpARM64MOVBUload { 20643 break 20644 } 20645 i2 := x2.AuxInt 20646 if x2.Aux != s { 20647 break 20648 } 20649 _ = x2.Args[1] 20650 if p != x2.Args[0] || mem != x2.Args[1] { 20651 break 20652 } 20653 y3 := v.Args[1] 20654 if y3.Op != OpARM64MOVDnop { 20655 break 20656 } 20657 x3 := y3.Args[0] 20658 if x3.Op != OpARM64MOVBUload { 20659 break 20660 } 20661 i3 := x3.AuxInt 20662 if x3.Aux != s { 20663 break 20664 } 20665 _ = x3.Args[1] 20666 if p != x3.Args[0] || mem != x3.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 20667 break 20668 } 20669 b = mergePoint(b, x0, x1, x2, x3) 20670 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t) 20671 v.reset(OpCopy) 20672 v.AddArg(v0) 20673 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t) 20674 v1.Aux = s 20675 v2 := b.NewValue0(x3.Pos, OpOffPtr, p.Type) 20676 v2.AuxInt = i0 20677 v2.AddArg(p) 20678 v1.AddArg(v2) 20679 v1.AddArg(mem) 20680 v0.AddArg(v1) 20681 return true 20682 } 20683 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 20684 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 20685 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 20686 for { 20687 t := v.Type 20688 _ = v.Args[1] 20689 y3 := v.Args[0] 20690 if y3.Op != OpARM64MOVDnop { 20691 break 20692 } 20693 x3 := y3.Args[0] 20694 if x3.Op != OpARM64MOVBUload { 20695 break 20696 } 20697 i3 := x3.AuxInt 20698 s := x3.Aux 20699 mem := x3.Args[1] 20700 p := x3.Args[0] 20701 o0 := v.Args[1] 20702 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20703 break 20704 } 20705 _ = o0.Args[1] 20706 o1 := o0.Args[0] 20707 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20708 break 20709 } 20710 _ = o1.Args[1] 20711 s0 := o1.Args[0] 20712 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 20713 break 20714 } 20715 y0 := s0.Args[0] 20716 if y0.Op != OpARM64MOVDnop { 20717 break 20718 } 20719 x0 := y0.Args[0] 20720 if x0.Op != OpARM64MOVBUload { 20721 break 20722 } 20723 i0 := x0.AuxInt 20724 if x0.Aux != s { 20725 break 20726 } 20727 _ = x0.Args[1] 20728 if p != x0.Args[0] || mem != x0.Args[1] { 20729 break 20730 } 20731 y1 := o1.Args[1] 20732 if y1.Op != OpARM64MOVDnop { 20733 break 20734 } 20735 x1 := y1.Args[0] 20736 if x1.Op != OpARM64MOVBUload { 20737 break 20738 } 20739 i1 := x1.AuxInt 20740 if x1.Aux != s { 20741 break 20742 } 20743 _ = x1.Args[1] 20744 if p != x1.Args[0] || mem != x1.Args[1] { 20745 break 20746 } 20747 y2 := o0.Args[1] 20748 if y2.Op != OpARM64MOVDnop { 20749 break 20750 } 20751 x2 := y2.Args[0] 20752 if x2.Op != OpARM64MOVBUload { 20753 break 20754 } 20755 i2 := x2.AuxInt 20756 if x2.Aux != s { 20757 break 20758 } 20759 _ = x2.Args[1] 20760 if p != x2.Args[0] || mem != x2.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 20761 break 20762 } 20763 b = mergePoint(b, x0, x1, x2, x3) 20764 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t) 20765 v.reset(OpCopy) 20766 v.AddArg(v0) 20767 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 20768 v1.Aux = s 20769 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 20770 v2.AuxInt = i0 20771 v2.AddArg(p) 20772 v1.AddArg(v2) 20773 v1.AddArg(mem) 20774 v0.AddArg(v1) 20775 return true 20776 } 20777 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) 20778 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 20779 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 20780 for { 20781 t := v.Type 20782 _ = v.Args[1] 20783 o0 := v.Args[0] 20784 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20785 break 20786 } 20787 _ = o0.Args[1] 20788 o1 := o0.Args[0] 20789 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20790 break 20791 } 20792 _ = o1.Args[1] 20793 s0 := o1.Args[0] 20794 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 20795 break 20796 } 20797 y0 := s0.Args[0] 20798 if y0.Op != OpARM64MOVDnop { 20799 break 20800 } 20801 x0 := y0.Args[0] 20802 if x0.Op != OpARM64MOVBUloadidx { 20803 break 20804 } 20805 mem := x0.Args[2] 20806 ptr0 := x0.Args[0] 20807 idx0 := x0.Args[1] 20808 y1 := o1.Args[1] 20809 if y1.Op != OpARM64MOVDnop { 20810 break 20811 } 20812 x1 := y1.Args[0] 20813 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 1 { 20814 break 20815 } 20816 s := x1.Aux 20817 _ = x1.Args[1] 20818 p1 := x1.Args[0] 20819 if p1.Op != OpARM64ADD { 20820 break 20821 } 20822 idx1 := p1.Args[1] 20823 ptr1 := p1.Args[0] 20824 if mem != x1.Args[1] { 20825 break 20826 } 20827 y2 := o0.Args[1] 20828 if y2.Op != OpARM64MOVDnop { 20829 break 20830 } 20831 x2 := y2.Args[0] 20832 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 2 || x2.Aux != s { 20833 break 20834 } 20835 _ = x2.Args[1] 20836 p := x2.Args[0] 20837 if mem != x2.Args[1] { 20838 break 20839 } 20840 y3 := v.Args[1] 20841 if y3.Op != OpARM64MOVDnop { 20842 break 20843 } 20844 x3 := y3.Args[0] 20845 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 3 || x3.Aux != s { 20846 break 20847 } 20848 _ = x3.Args[1] 20849 if p != x3.Args[0] || mem != x3.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 20850 break 20851 } 20852 b = mergePoint(b, x0, x1, x2, x3) 20853 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t) 20854 v.reset(OpCopy) 20855 v.AddArg(v0) 20856 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUloadidx, t) 20857 v1.AddArg(ptr0) 20858 v1.AddArg(idx0) 20859 v1.AddArg(mem) 20860 v0.AddArg(v1) 20861 return true 20862 } 20863 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem)))) 20864 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 20865 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 20866 for { 20867 t := v.Type 20868 _ = v.Args[1] 20869 y3 := v.Args[0] 20870 if y3.Op != OpARM64MOVDnop { 20871 break 20872 } 20873 x3 := y3.Args[0] 20874 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 3 { 20875 break 20876 } 20877 s := x3.Aux 20878 mem := x3.Args[1] 20879 p := x3.Args[0] 20880 o0 := v.Args[1] 20881 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20882 break 20883 } 20884 _ = o0.Args[1] 20885 o1 := o0.Args[0] 20886 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20887 break 20888 } 20889 _ = o1.Args[1] 20890 s0 := o1.Args[0] 20891 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 20892 break 20893 } 20894 y0 := s0.Args[0] 20895 if y0.Op != OpARM64MOVDnop { 20896 break 20897 } 20898 x0 := y0.Args[0] 20899 if x0.Op != OpARM64MOVBUloadidx { 20900 break 20901 } 20902 _ = x0.Args[2] 20903 ptr0 := x0.Args[0] 20904 idx0 := x0.Args[1] 20905 if mem != x0.Args[2] { 20906 break 20907 } 20908 y1 := o1.Args[1] 20909 if y1.Op != OpARM64MOVDnop { 20910 break 20911 } 20912 x1 := y1.Args[0] 20913 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 1 || x1.Aux != s { 20914 break 20915 } 20916 _ = x1.Args[1] 20917 p1 := x1.Args[0] 20918 if p1.Op != OpARM64ADD { 20919 break 20920 } 20921 idx1 := p1.Args[1] 20922 ptr1 := p1.Args[0] 20923 if mem != x1.Args[1] { 20924 break 20925 } 20926 y2 := o0.Args[1] 20927 if y2.Op != OpARM64MOVDnop { 20928 break 20929 } 20930 x2 := y2.Args[0] 20931 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 2 || x2.Aux != s { 20932 break 20933 } 20934 _ = x2.Args[1] 20935 if p != x2.Args[0] || mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 20936 break 20937 } 20938 b = mergePoint(b, x0, x1, x2, x3) 20939 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t) 20940 v.reset(OpCopy) 20941 v.AddArg(v0) 20942 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 20943 v1.AddArg(ptr0) 20944 v1.AddArg(idx0) 20945 v1.AddArg(mem) 20946 v0.AddArg(v1) 20947 return true 20948 } 20949 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) 20950 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 20951 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 20952 for { 20953 t := v.Type 20954 _ = v.Args[1] 20955 o0 := v.Args[0] 20956 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 20957 break 20958 } 20959 _ = o0.Args[1] 20960 o1 := o0.Args[0] 20961 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 20962 break 20963 } 20964 _ = o1.Args[1] 20965 s0 := o1.Args[0] 20966 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 20967 break 20968 } 20969 y0 := s0.Args[0] 20970 if y0.Op != OpARM64MOVDnop { 20971 break 20972 } 20973 x0 := y0.Args[0] 20974 if x0.Op != OpARM64MOVBUloadidx { 20975 break 20976 } 20977 mem := x0.Args[2] 20978 ptr := x0.Args[0] 20979 idx := x0.Args[1] 20980 y1 := o1.Args[1] 20981 if y1.Op != OpARM64MOVDnop { 20982 break 20983 } 20984 x1 := y1.Args[0] 20985 if x1.Op != OpARM64MOVBUloadidx { 20986 break 20987 } 20988 _ = x1.Args[2] 20989 if ptr != x1.Args[0] { 20990 break 20991 } 20992 x1_1 := x1.Args[1] 20993 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] { 20994 break 20995 } 20996 y2 := o0.Args[1] 20997 if y2.Op != OpARM64MOVDnop { 20998 break 20999 } 21000 x2 := y2.Args[0] 21001 if x2.Op != OpARM64MOVBUloadidx { 21002 break 21003 } 21004 _ = x2.Args[2] 21005 if ptr != x2.Args[0] { 21006 break 21007 } 21008 x2_1 := x2.Args[1] 21009 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] { 21010 break 21011 } 21012 y3 := v.Args[1] 21013 if y3.Op != OpARM64MOVDnop { 21014 break 21015 } 21016 x3 := y3.Args[0] 21017 if x3.Op != OpARM64MOVBUloadidx { 21018 break 21019 } 21020 _ = x3.Args[2] 21021 if ptr != x3.Args[0] { 21022 break 21023 } 21024 x3_1 := x3.Args[1] 21025 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 21026 break 21027 } 21028 b = mergePoint(b, x0, x1, x2, x3) 21029 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 21030 v.reset(OpCopy) 21031 v.AddArg(v0) 21032 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 21033 v1.AddArg(ptr) 21034 v1.AddArg(idx) 21035 v1.AddArg(mem) 21036 v0.AddArg(v1) 21037 return true 21038 } 21039 return false 21040 } 21041 func rewriteValueARM64_OpARM64OR_40(v *Value) bool { 21042 b := v.Block 21043 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem)))) 21044 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 21045 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 21046 for { 21047 t := v.Type 21048 _ = v.Args[1] 21049 y3 := v.Args[0] 21050 if y3.Op != OpARM64MOVDnop { 21051 break 21052 } 21053 x3 := y3.Args[0] 21054 if x3.Op != OpARM64MOVBUloadidx { 21055 break 21056 } 21057 mem := x3.Args[2] 21058 ptr := x3.Args[0] 21059 x3_1 := x3.Args[1] 21060 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 3 { 21061 break 21062 } 21063 idx := x3_1.Args[0] 21064 o0 := v.Args[1] 21065 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21066 break 21067 } 21068 _ = o0.Args[1] 21069 o1 := o0.Args[0] 21070 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21071 break 21072 } 21073 _ = o1.Args[1] 21074 s0 := o1.Args[0] 21075 if s0.Op != OpARM64SLLconst || s0.AuxInt != 24 { 21076 break 21077 } 21078 y0 := s0.Args[0] 21079 if y0.Op != OpARM64MOVDnop { 21080 break 21081 } 21082 x0 := y0.Args[0] 21083 if x0.Op != OpARM64MOVBUloadidx { 21084 break 21085 } 21086 _ = x0.Args[2] 21087 if ptr != x0.Args[0] || idx != x0.Args[1] || mem != x0.Args[2] { 21088 break 21089 } 21090 y1 := o1.Args[1] 21091 if y1.Op != OpARM64MOVDnop { 21092 break 21093 } 21094 x1 := y1.Args[0] 21095 if x1.Op != OpARM64MOVBUloadidx { 21096 break 21097 } 21098 _ = x1.Args[2] 21099 if ptr != x1.Args[0] { 21100 break 21101 } 21102 x1_1 := x1.Args[1] 21103 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] { 21104 break 21105 } 21106 y2 := o0.Args[1] 21107 if y2.Op != OpARM64MOVDnop { 21108 break 21109 } 21110 x2 := y2.Args[0] 21111 if x2.Op != OpARM64MOVBUloadidx { 21112 break 21113 } 21114 _ = x2.Args[2] 21115 if ptr != x2.Args[0] { 21116 break 21117 } 21118 x2_1 := x2.Args[1] 21119 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 21120 break 21121 } 21122 b = mergePoint(b, x0, x1, x2, x3) 21123 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 21124 v.reset(OpCopy) 21125 v.AddArg(v0) 21126 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 21127 v1.AddArg(ptr) 21128 v1.AddArg(idx) 21129 v1.AddArg(mem) 21130 v0.AddArg(v1) 21131 return true 21132 } 21133 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 21134 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 21135 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 21136 for { 21137 t := v.Type 21138 _ = v.Args[1] 21139 o0 := v.Args[0] 21140 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21141 break 21142 } 21143 _ = o0.Args[1] 21144 o1 := o0.Args[0] 21145 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21146 break 21147 } 21148 _ = o1.Args[1] 21149 o2 := o1.Args[0] 21150 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 21151 break 21152 } 21153 _ = o2.Args[1] 21154 o3 := o2.Args[0] 21155 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 21156 break 21157 } 21158 _ = o3.Args[1] 21159 o4 := o3.Args[0] 21160 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 21161 break 21162 } 21163 _ = o4.Args[1] 21164 o5 := o4.Args[0] 21165 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 21166 break 21167 } 21168 _ = o5.Args[1] 21169 s0 := o5.Args[0] 21170 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 21171 break 21172 } 21173 y0 := s0.Args[0] 21174 if y0.Op != OpARM64MOVDnop { 21175 break 21176 } 21177 x0 := y0.Args[0] 21178 if x0.Op != OpARM64MOVBUload { 21179 break 21180 } 21181 i0 := x0.AuxInt 21182 s := x0.Aux 21183 mem := x0.Args[1] 21184 p := x0.Args[0] 21185 y1 := o5.Args[1] 21186 if y1.Op != OpARM64MOVDnop { 21187 break 21188 } 21189 x1 := y1.Args[0] 21190 if x1.Op != OpARM64MOVBUload { 21191 break 21192 } 21193 i1 := x1.AuxInt 21194 if x1.Aux != s { 21195 break 21196 } 21197 _ = x1.Args[1] 21198 if p != x1.Args[0] || mem != x1.Args[1] { 21199 break 21200 } 21201 y2 := o4.Args[1] 21202 if y2.Op != OpARM64MOVDnop { 21203 break 21204 } 21205 x2 := y2.Args[0] 21206 if x2.Op != OpARM64MOVBUload { 21207 break 21208 } 21209 i2 := x2.AuxInt 21210 if x2.Aux != s { 21211 break 21212 } 21213 _ = x2.Args[1] 21214 if p != x2.Args[0] || mem != x2.Args[1] { 21215 break 21216 } 21217 y3 := o3.Args[1] 21218 if y3.Op != OpARM64MOVDnop { 21219 break 21220 } 21221 x3 := y3.Args[0] 21222 if x3.Op != OpARM64MOVBUload { 21223 break 21224 } 21225 i3 := x3.AuxInt 21226 if x3.Aux != s { 21227 break 21228 } 21229 _ = x3.Args[1] 21230 if p != x3.Args[0] || mem != x3.Args[1] { 21231 break 21232 } 21233 y4 := o2.Args[1] 21234 if y4.Op != OpARM64MOVDnop { 21235 break 21236 } 21237 x4 := y4.Args[0] 21238 if x4.Op != OpARM64MOVBUload { 21239 break 21240 } 21241 i4 := x4.AuxInt 21242 if x4.Aux != s { 21243 break 21244 } 21245 _ = x4.Args[1] 21246 if p != x4.Args[0] || mem != x4.Args[1] { 21247 break 21248 } 21249 y5 := o1.Args[1] 21250 if y5.Op != OpARM64MOVDnop { 21251 break 21252 } 21253 x5 := y5.Args[0] 21254 if x5.Op != OpARM64MOVBUload { 21255 break 21256 } 21257 i5 := x5.AuxInt 21258 if x5.Aux != s { 21259 break 21260 } 21261 _ = x5.Args[1] 21262 if p != x5.Args[0] || mem != x5.Args[1] { 21263 break 21264 } 21265 y6 := o0.Args[1] 21266 if y6.Op != OpARM64MOVDnop { 21267 break 21268 } 21269 x6 := y6.Args[0] 21270 if x6.Op != OpARM64MOVBUload { 21271 break 21272 } 21273 i6 := x6.AuxInt 21274 if x6.Aux != s { 21275 break 21276 } 21277 _ = x6.Args[1] 21278 if p != x6.Args[0] || mem != x6.Args[1] { 21279 break 21280 } 21281 y7 := v.Args[1] 21282 if y7.Op != OpARM64MOVDnop { 21283 break 21284 } 21285 x7 := y7.Args[0] 21286 if x7.Op != OpARM64MOVBUload { 21287 break 21288 } 21289 i7 := x7.AuxInt 21290 if x7.Aux != s { 21291 break 21292 } 21293 _ = x7.Args[1] 21294 if p != x7.Args[0] || mem != x7.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 21295 break 21296 } 21297 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 21298 v0 := b.NewValue0(x7.Pos, OpARM64REV, t) 21299 v.reset(OpCopy) 21300 v.AddArg(v0) 21301 v1 := b.NewValue0(x7.Pos, OpARM64MOVDload, t) 21302 v1.Aux = s 21303 v2 := b.NewValue0(x7.Pos, OpOffPtr, p.Type) 21304 v2.AuxInt = i0 21305 v2.AddArg(p) 21306 v1.AddArg(v2) 21307 v1.AddArg(mem) 21308 v0.AddArg(v1) 21309 return true 21310 } 21311 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 21312 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 21313 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 21314 for { 21315 t := v.Type 21316 _ = v.Args[1] 21317 y7 := v.Args[0] 21318 if y7.Op != OpARM64MOVDnop { 21319 break 21320 } 21321 x7 := y7.Args[0] 21322 if x7.Op != OpARM64MOVBUload { 21323 break 21324 } 21325 i7 := x7.AuxInt 21326 s := x7.Aux 21327 mem := x7.Args[1] 21328 p := x7.Args[0] 21329 o0 := v.Args[1] 21330 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21331 break 21332 } 21333 _ = o0.Args[1] 21334 o1 := o0.Args[0] 21335 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21336 break 21337 } 21338 _ = o1.Args[1] 21339 o2 := o1.Args[0] 21340 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 21341 break 21342 } 21343 _ = o2.Args[1] 21344 o3 := o2.Args[0] 21345 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 21346 break 21347 } 21348 _ = o3.Args[1] 21349 o4 := o3.Args[0] 21350 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 21351 break 21352 } 21353 _ = o4.Args[1] 21354 o5 := o4.Args[0] 21355 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 21356 break 21357 } 21358 _ = o5.Args[1] 21359 s0 := o5.Args[0] 21360 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 21361 break 21362 } 21363 y0 := s0.Args[0] 21364 if y0.Op != OpARM64MOVDnop { 21365 break 21366 } 21367 x0 := y0.Args[0] 21368 if x0.Op != OpARM64MOVBUload { 21369 break 21370 } 21371 i0 := x0.AuxInt 21372 if x0.Aux != s { 21373 break 21374 } 21375 _ = x0.Args[1] 21376 if p != x0.Args[0] || mem != x0.Args[1] { 21377 break 21378 } 21379 y1 := o5.Args[1] 21380 if y1.Op != OpARM64MOVDnop { 21381 break 21382 } 21383 x1 := y1.Args[0] 21384 if x1.Op != OpARM64MOVBUload { 21385 break 21386 } 21387 i1 := x1.AuxInt 21388 if x1.Aux != s { 21389 break 21390 } 21391 _ = x1.Args[1] 21392 if p != x1.Args[0] || mem != x1.Args[1] { 21393 break 21394 } 21395 y2 := o4.Args[1] 21396 if y2.Op != OpARM64MOVDnop { 21397 break 21398 } 21399 x2 := y2.Args[0] 21400 if x2.Op != OpARM64MOVBUload { 21401 break 21402 } 21403 i2 := x2.AuxInt 21404 if x2.Aux != s { 21405 break 21406 } 21407 _ = x2.Args[1] 21408 if p != x2.Args[0] || mem != x2.Args[1] { 21409 break 21410 } 21411 y3 := o3.Args[1] 21412 if y3.Op != OpARM64MOVDnop { 21413 break 21414 } 21415 x3 := y3.Args[0] 21416 if x3.Op != OpARM64MOVBUload { 21417 break 21418 } 21419 i3 := x3.AuxInt 21420 if x3.Aux != s { 21421 break 21422 } 21423 _ = x3.Args[1] 21424 if p != x3.Args[0] || mem != x3.Args[1] { 21425 break 21426 } 21427 y4 := o2.Args[1] 21428 if y4.Op != OpARM64MOVDnop { 21429 break 21430 } 21431 x4 := y4.Args[0] 21432 if x4.Op != OpARM64MOVBUload { 21433 break 21434 } 21435 i4 := x4.AuxInt 21436 if x4.Aux != s { 21437 break 21438 } 21439 _ = x4.Args[1] 21440 if p != x4.Args[0] || mem != x4.Args[1] { 21441 break 21442 } 21443 y5 := o1.Args[1] 21444 if y5.Op != OpARM64MOVDnop { 21445 break 21446 } 21447 x5 := y5.Args[0] 21448 if x5.Op != OpARM64MOVBUload { 21449 break 21450 } 21451 i5 := x5.AuxInt 21452 if x5.Aux != s { 21453 break 21454 } 21455 _ = x5.Args[1] 21456 if p != x5.Args[0] || mem != x5.Args[1] { 21457 break 21458 } 21459 y6 := o0.Args[1] 21460 if y6.Op != OpARM64MOVDnop { 21461 break 21462 } 21463 x6 := y6.Args[0] 21464 if x6.Op != OpARM64MOVBUload { 21465 break 21466 } 21467 i6 := x6.AuxInt 21468 if x6.Aux != s { 21469 break 21470 } 21471 _ = x6.Args[1] 21472 if p != x6.Args[0] || mem != x6.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 21473 break 21474 } 21475 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 21476 v0 := b.NewValue0(x6.Pos, OpARM64REV, t) 21477 v.reset(OpCopy) 21478 v.AddArg(v0) 21479 v1 := b.NewValue0(x6.Pos, OpARM64MOVDload, t) 21480 v1.Aux = s 21481 v2 := b.NewValue0(x6.Pos, OpOffPtr, p.Type) 21482 v2.AuxInt = i0 21483 v2.AddArg(p) 21484 v1.AddArg(v2) 21485 v1.AddArg(mem) 21486 v0.AddArg(v1) 21487 return true 21488 } 21489 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [7] {s} p mem))) 21490 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 21491 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 21492 for { 21493 t := v.Type 21494 _ = v.Args[1] 21495 o0 := v.Args[0] 21496 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21497 break 21498 } 21499 _ = o0.Args[1] 21500 o1 := o0.Args[0] 21501 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21502 break 21503 } 21504 _ = o1.Args[1] 21505 o2 := o1.Args[0] 21506 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 21507 break 21508 } 21509 _ = o2.Args[1] 21510 o3 := o2.Args[0] 21511 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 21512 break 21513 } 21514 _ = o3.Args[1] 21515 o4 := o3.Args[0] 21516 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 21517 break 21518 } 21519 _ = o4.Args[1] 21520 o5 := o4.Args[0] 21521 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 21522 break 21523 } 21524 _ = o5.Args[1] 21525 s0 := o5.Args[0] 21526 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 21527 break 21528 } 21529 y0 := s0.Args[0] 21530 if y0.Op != OpARM64MOVDnop { 21531 break 21532 } 21533 x0 := y0.Args[0] 21534 if x0.Op != OpARM64MOVBUloadidx { 21535 break 21536 } 21537 mem := x0.Args[2] 21538 ptr0 := x0.Args[0] 21539 idx0 := x0.Args[1] 21540 y1 := o5.Args[1] 21541 if y1.Op != OpARM64MOVDnop { 21542 break 21543 } 21544 x1 := y1.Args[0] 21545 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 1 { 21546 break 21547 } 21548 s := x1.Aux 21549 _ = x1.Args[1] 21550 p1 := x1.Args[0] 21551 if p1.Op != OpARM64ADD { 21552 break 21553 } 21554 idx1 := p1.Args[1] 21555 ptr1 := p1.Args[0] 21556 if mem != x1.Args[1] { 21557 break 21558 } 21559 y2 := o4.Args[1] 21560 if y2.Op != OpARM64MOVDnop { 21561 break 21562 } 21563 x2 := y2.Args[0] 21564 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 2 || x2.Aux != s { 21565 break 21566 } 21567 _ = x2.Args[1] 21568 p := x2.Args[0] 21569 if mem != x2.Args[1] { 21570 break 21571 } 21572 y3 := o3.Args[1] 21573 if y3.Op != OpARM64MOVDnop { 21574 break 21575 } 21576 x3 := y3.Args[0] 21577 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 3 || x3.Aux != s { 21578 break 21579 } 21580 _ = x3.Args[1] 21581 if p != x3.Args[0] || mem != x3.Args[1] { 21582 break 21583 } 21584 y4 := o2.Args[1] 21585 if y4.Op != OpARM64MOVDnop { 21586 break 21587 } 21588 x4 := y4.Args[0] 21589 if x4.Op != OpARM64MOVBUload || x4.AuxInt != 4 || x4.Aux != s { 21590 break 21591 } 21592 _ = x4.Args[1] 21593 if p != x4.Args[0] || mem != x4.Args[1] { 21594 break 21595 } 21596 y5 := o1.Args[1] 21597 if y5.Op != OpARM64MOVDnop { 21598 break 21599 } 21600 x5 := y5.Args[0] 21601 if x5.Op != OpARM64MOVBUload || x5.AuxInt != 5 || x5.Aux != s { 21602 break 21603 } 21604 _ = x5.Args[1] 21605 if p != x5.Args[0] || mem != x5.Args[1] { 21606 break 21607 } 21608 y6 := o0.Args[1] 21609 if y6.Op != OpARM64MOVDnop { 21610 break 21611 } 21612 x6 := y6.Args[0] 21613 if x6.Op != OpARM64MOVBUload || x6.AuxInt != 6 || x6.Aux != s { 21614 break 21615 } 21616 _ = x6.Args[1] 21617 if p != x6.Args[0] || mem != x6.Args[1] { 21618 break 21619 } 21620 y7 := v.Args[1] 21621 if y7.Op != OpARM64MOVDnop { 21622 break 21623 } 21624 x7 := y7.Args[0] 21625 if x7.Op != OpARM64MOVBUload || x7.AuxInt != 7 || x7.Aux != s { 21626 break 21627 } 21628 _ = x7.Args[1] 21629 if p != x7.Args[0] || mem != x7.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 21630 break 21631 } 21632 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 21633 v0 := b.NewValue0(x7.Pos, OpARM64REV, t) 21634 v.reset(OpCopy) 21635 v.AddArg(v0) 21636 v1 := b.NewValue0(x7.Pos, OpARM64MOVDloadidx, t) 21637 v1.AddArg(ptr0) 21638 v1.AddArg(idx0) 21639 v1.AddArg(mem) 21640 v0.AddArg(v1) 21641 return true 21642 } 21643 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem)))) 21644 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 21645 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 21646 for { 21647 t := v.Type 21648 _ = v.Args[1] 21649 y7 := v.Args[0] 21650 if y7.Op != OpARM64MOVDnop { 21651 break 21652 } 21653 x7 := y7.Args[0] 21654 if x7.Op != OpARM64MOVBUload || x7.AuxInt != 7 { 21655 break 21656 } 21657 s := x7.Aux 21658 mem := x7.Args[1] 21659 p := x7.Args[0] 21660 o0 := v.Args[1] 21661 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21662 break 21663 } 21664 _ = o0.Args[1] 21665 o1 := o0.Args[0] 21666 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21667 break 21668 } 21669 _ = o1.Args[1] 21670 o2 := o1.Args[0] 21671 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 21672 break 21673 } 21674 _ = o2.Args[1] 21675 o3 := o2.Args[0] 21676 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 21677 break 21678 } 21679 _ = o3.Args[1] 21680 o4 := o3.Args[0] 21681 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 21682 break 21683 } 21684 _ = o4.Args[1] 21685 o5 := o4.Args[0] 21686 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 21687 break 21688 } 21689 _ = o5.Args[1] 21690 s0 := o5.Args[0] 21691 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 21692 break 21693 } 21694 y0 := s0.Args[0] 21695 if y0.Op != OpARM64MOVDnop { 21696 break 21697 } 21698 x0 := y0.Args[0] 21699 if x0.Op != OpARM64MOVBUloadidx { 21700 break 21701 } 21702 _ = x0.Args[2] 21703 ptr0 := x0.Args[0] 21704 idx0 := x0.Args[1] 21705 if mem != x0.Args[2] { 21706 break 21707 } 21708 y1 := o5.Args[1] 21709 if y1.Op != OpARM64MOVDnop { 21710 break 21711 } 21712 x1 := y1.Args[0] 21713 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 1 || x1.Aux != s { 21714 break 21715 } 21716 _ = x1.Args[1] 21717 p1 := x1.Args[0] 21718 if p1.Op != OpARM64ADD { 21719 break 21720 } 21721 idx1 := p1.Args[1] 21722 ptr1 := p1.Args[0] 21723 if mem != x1.Args[1] { 21724 break 21725 } 21726 y2 := o4.Args[1] 21727 if y2.Op != OpARM64MOVDnop { 21728 break 21729 } 21730 x2 := y2.Args[0] 21731 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 2 || x2.Aux != s { 21732 break 21733 } 21734 _ = x2.Args[1] 21735 if p != x2.Args[0] || mem != x2.Args[1] { 21736 break 21737 } 21738 y3 := o3.Args[1] 21739 if y3.Op != OpARM64MOVDnop { 21740 break 21741 } 21742 x3 := y3.Args[0] 21743 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 3 || x3.Aux != s { 21744 break 21745 } 21746 _ = x3.Args[1] 21747 if p != x3.Args[0] || mem != x3.Args[1] { 21748 break 21749 } 21750 y4 := o2.Args[1] 21751 if y4.Op != OpARM64MOVDnop { 21752 break 21753 } 21754 x4 := y4.Args[0] 21755 if x4.Op != OpARM64MOVBUload || x4.AuxInt != 4 || x4.Aux != s { 21756 break 21757 } 21758 _ = x4.Args[1] 21759 if p != x4.Args[0] || mem != x4.Args[1] { 21760 break 21761 } 21762 y5 := o1.Args[1] 21763 if y5.Op != OpARM64MOVDnop { 21764 break 21765 } 21766 x5 := y5.Args[0] 21767 if x5.Op != OpARM64MOVBUload || x5.AuxInt != 5 || x5.Aux != s { 21768 break 21769 } 21770 _ = x5.Args[1] 21771 if p != x5.Args[0] || mem != x5.Args[1] { 21772 break 21773 } 21774 y6 := o0.Args[1] 21775 if y6.Op != OpARM64MOVDnop { 21776 break 21777 } 21778 x6 := y6.Args[0] 21779 if x6.Op != OpARM64MOVBUload || x6.AuxInt != 6 || x6.Aux != s { 21780 break 21781 } 21782 _ = x6.Args[1] 21783 if p != x6.Args[0] || mem != x6.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 21784 break 21785 } 21786 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 21787 v0 := b.NewValue0(x6.Pos, OpARM64REV, t) 21788 v.reset(OpCopy) 21789 v.AddArg(v0) 21790 v1 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t) 21791 v1.AddArg(ptr0) 21792 v1.AddArg(idx0) 21793 v1.AddArg(mem) 21794 v0.AddArg(v1) 21795 return true 21796 } 21797 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) 21798 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 21799 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem)) 21800 for { 21801 t := v.Type 21802 _ = v.Args[1] 21803 o0 := v.Args[0] 21804 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21805 break 21806 } 21807 _ = o0.Args[1] 21808 o1 := o0.Args[0] 21809 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21810 break 21811 } 21812 _ = o1.Args[1] 21813 o2 := o1.Args[0] 21814 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 21815 break 21816 } 21817 _ = o2.Args[1] 21818 o3 := o2.Args[0] 21819 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 21820 break 21821 } 21822 _ = o3.Args[1] 21823 o4 := o3.Args[0] 21824 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 21825 break 21826 } 21827 _ = o4.Args[1] 21828 o5 := o4.Args[0] 21829 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 21830 break 21831 } 21832 _ = o5.Args[1] 21833 s0 := o5.Args[0] 21834 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 21835 break 21836 } 21837 y0 := s0.Args[0] 21838 if y0.Op != OpARM64MOVDnop { 21839 break 21840 } 21841 x0 := y0.Args[0] 21842 if x0.Op != OpARM64MOVBUloadidx { 21843 break 21844 } 21845 mem := x0.Args[2] 21846 ptr := x0.Args[0] 21847 idx := x0.Args[1] 21848 y1 := o5.Args[1] 21849 if y1.Op != OpARM64MOVDnop { 21850 break 21851 } 21852 x1 := y1.Args[0] 21853 if x1.Op != OpARM64MOVBUloadidx { 21854 break 21855 } 21856 _ = x1.Args[2] 21857 if ptr != x1.Args[0] { 21858 break 21859 } 21860 x1_1 := x1.Args[1] 21861 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] { 21862 break 21863 } 21864 y2 := o4.Args[1] 21865 if y2.Op != OpARM64MOVDnop { 21866 break 21867 } 21868 x2 := y2.Args[0] 21869 if x2.Op != OpARM64MOVBUloadidx { 21870 break 21871 } 21872 _ = x2.Args[2] 21873 if ptr != x2.Args[0] { 21874 break 21875 } 21876 x2_1 := x2.Args[1] 21877 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] { 21878 break 21879 } 21880 y3 := o3.Args[1] 21881 if y3.Op != OpARM64MOVDnop { 21882 break 21883 } 21884 x3 := y3.Args[0] 21885 if x3.Op != OpARM64MOVBUloadidx { 21886 break 21887 } 21888 _ = x3.Args[2] 21889 if ptr != x3.Args[0] { 21890 break 21891 } 21892 x3_1 := x3.Args[1] 21893 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] { 21894 break 21895 } 21896 y4 := o2.Args[1] 21897 if y4.Op != OpARM64MOVDnop { 21898 break 21899 } 21900 x4 := y4.Args[0] 21901 if x4.Op != OpARM64MOVBUloadidx { 21902 break 21903 } 21904 _ = x4.Args[2] 21905 if ptr != x4.Args[0] { 21906 break 21907 } 21908 x4_1 := x4.Args[1] 21909 if x4_1.Op != OpARM64ADDconst || x4_1.AuxInt != 4 || idx != x4_1.Args[0] || mem != x4.Args[2] { 21910 break 21911 } 21912 y5 := o1.Args[1] 21913 if y5.Op != OpARM64MOVDnop { 21914 break 21915 } 21916 x5 := y5.Args[0] 21917 if x5.Op != OpARM64MOVBUloadidx { 21918 break 21919 } 21920 _ = x5.Args[2] 21921 if ptr != x5.Args[0] { 21922 break 21923 } 21924 x5_1 := x5.Args[1] 21925 if x5_1.Op != OpARM64ADDconst || x5_1.AuxInt != 5 || idx != x5_1.Args[0] || mem != x5.Args[2] { 21926 break 21927 } 21928 y6 := o0.Args[1] 21929 if y6.Op != OpARM64MOVDnop { 21930 break 21931 } 21932 x6 := y6.Args[0] 21933 if x6.Op != OpARM64MOVBUloadidx { 21934 break 21935 } 21936 _ = x6.Args[2] 21937 if ptr != x6.Args[0] { 21938 break 21939 } 21940 x6_1 := x6.Args[1] 21941 if x6_1.Op != OpARM64ADDconst || x6_1.AuxInt != 6 || idx != x6_1.Args[0] || mem != x6.Args[2] { 21942 break 21943 } 21944 y7 := v.Args[1] 21945 if y7.Op != OpARM64MOVDnop { 21946 break 21947 } 21948 x7 := y7.Args[0] 21949 if x7.Op != OpARM64MOVBUloadidx { 21950 break 21951 } 21952 _ = x7.Args[2] 21953 if ptr != x7.Args[0] { 21954 break 21955 } 21956 x7_1 := x7.Args[1] 21957 if x7_1.Op != OpARM64ADDconst || x7_1.AuxInt != 7 || idx != x7_1.Args[0] || mem != x7.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 21958 break 21959 } 21960 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 21961 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 21962 v.reset(OpCopy) 21963 v.AddArg(v0) 21964 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 21965 v1.AddArg(ptr) 21966 v1.AddArg(idx) 21967 v1.AddArg(mem) 21968 v0.AddArg(v1) 21969 return true 21970 } 21971 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem)))) 21972 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 21973 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem)) 21974 for { 21975 t := v.Type 21976 _ = v.Args[1] 21977 y7 := v.Args[0] 21978 if y7.Op != OpARM64MOVDnop { 21979 break 21980 } 21981 x7 := y7.Args[0] 21982 if x7.Op != OpARM64MOVBUloadidx { 21983 break 21984 } 21985 mem := x7.Args[2] 21986 ptr := x7.Args[0] 21987 x7_1 := x7.Args[1] 21988 if x7_1.Op != OpARM64ADDconst || x7_1.AuxInt != 7 { 21989 break 21990 } 21991 idx := x7_1.Args[0] 21992 o0 := v.Args[1] 21993 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 8 { 21994 break 21995 } 21996 _ = o0.Args[1] 21997 o1 := o0.Args[0] 21998 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 16 { 21999 break 22000 } 22001 _ = o1.Args[1] 22002 o2 := o1.Args[0] 22003 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 24 { 22004 break 22005 } 22006 _ = o2.Args[1] 22007 o3 := o2.Args[0] 22008 if o3.Op != OpARM64ORshiftLL || o3.AuxInt != 32 { 22009 break 22010 } 22011 _ = o3.Args[1] 22012 o4 := o3.Args[0] 22013 if o4.Op != OpARM64ORshiftLL || o4.AuxInt != 40 { 22014 break 22015 } 22016 _ = o4.Args[1] 22017 o5 := o4.Args[0] 22018 if o5.Op != OpARM64ORshiftLL || o5.AuxInt != 48 { 22019 break 22020 } 22021 _ = o5.Args[1] 22022 s0 := o5.Args[0] 22023 if s0.Op != OpARM64SLLconst || s0.AuxInt != 56 { 22024 break 22025 } 22026 y0 := s0.Args[0] 22027 if y0.Op != OpARM64MOVDnop { 22028 break 22029 } 22030 x0 := y0.Args[0] 22031 if x0.Op != OpARM64MOVBUloadidx { 22032 break 22033 } 22034 _ = x0.Args[2] 22035 if ptr != x0.Args[0] || idx != x0.Args[1] || mem != x0.Args[2] { 22036 break 22037 } 22038 y1 := o5.Args[1] 22039 if y1.Op != OpARM64MOVDnop { 22040 break 22041 } 22042 x1 := y1.Args[0] 22043 if x1.Op != OpARM64MOVBUloadidx { 22044 break 22045 } 22046 _ = x1.Args[2] 22047 if ptr != x1.Args[0] { 22048 break 22049 } 22050 x1_1 := x1.Args[1] 22051 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] { 22052 break 22053 } 22054 y2 := o4.Args[1] 22055 if y2.Op != OpARM64MOVDnop { 22056 break 22057 } 22058 x2 := y2.Args[0] 22059 if x2.Op != OpARM64MOVBUloadidx { 22060 break 22061 } 22062 _ = x2.Args[2] 22063 if ptr != x2.Args[0] { 22064 break 22065 } 22066 x2_1 := x2.Args[1] 22067 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] { 22068 break 22069 } 22070 y3 := o3.Args[1] 22071 if y3.Op != OpARM64MOVDnop { 22072 break 22073 } 22074 x3 := y3.Args[0] 22075 if x3.Op != OpARM64MOVBUloadidx { 22076 break 22077 } 22078 _ = x3.Args[2] 22079 if ptr != x3.Args[0] { 22080 break 22081 } 22082 x3_1 := x3.Args[1] 22083 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 3 || idx != x3_1.Args[0] || mem != x3.Args[2] { 22084 break 22085 } 22086 y4 := o2.Args[1] 22087 if y4.Op != OpARM64MOVDnop { 22088 break 22089 } 22090 x4 := y4.Args[0] 22091 if x4.Op != OpARM64MOVBUloadidx { 22092 break 22093 } 22094 _ = x4.Args[2] 22095 if ptr != x4.Args[0] { 22096 break 22097 } 22098 x4_1 := x4.Args[1] 22099 if x4_1.Op != OpARM64ADDconst || x4_1.AuxInt != 4 || idx != x4_1.Args[0] || mem != x4.Args[2] { 22100 break 22101 } 22102 y5 := o1.Args[1] 22103 if y5.Op != OpARM64MOVDnop { 22104 break 22105 } 22106 x5 := y5.Args[0] 22107 if x5.Op != OpARM64MOVBUloadidx { 22108 break 22109 } 22110 _ = x5.Args[2] 22111 if ptr != x5.Args[0] { 22112 break 22113 } 22114 x5_1 := x5.Args[1] 22115 if x5_1.Op != OpARM64ADDconst || x5_1.AuxInt != 5 || idx != x5_1.Args[0] || mem != x5.Args[2] { 22116 break 22117 } 22118 y6 := o0.Args[1] 22119 if y6.Op != OpARM64MOVDnop { 22120 break 22121 } 22122 x6 := y6.Args[0] 22123 if x6.Op != OpARM64MOVBUloadidx { 22124 break 22125 } 22126 _ = x6.Args[2] 22127 if ptr != x6.Args[0] { 22128 break 22129 } 22130 x6_1 := x6.Args[1] 22131 if x6_1.Op != OpARM64ADDconst || x6_1.AuxInt != 6 || idx != x6_1.Args[0] || mem != x6.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 22132 break 22133 } 22134 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 22135 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 22136 v.reset(OpCopy) 22137 v.AddArg(v0) 22138 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 22139 v1.AddArg(ptr) 22140 v1.AddArg(idx) 22141 v1.AddArg(mem) 22142 v0.AddArg(v1) 22143 return true 22144 } 22145 return false 22146 } 22147 func rewriteValueARM64_OpARM64ORN_0(v *Value) bool { 22148 // match: (ORN x (MOVDconst [c])) 22149 // result: (ORconst [^c] x) 22150 for { 22151 _ = v.Args[1] 22152 x := v.Args[0] 22153 v_1 := v.Args[1] 22154 if v_1.Op != OpARM64MOVDconst { 22155 break 22156 } 22157 c := v_1.AuxInt 22158 v.reset(OpARM64ORconst) 22159 v.AuxInt = ^c 22160 v.AddArg(x) 22161 return true 22162 } 22163 // match: (ORN x x) 22164 // result: (MOVDconst [-1]) 22165 for { 22166 x := v.Args[1] 22167 if x != v.Args[0] { 22168 break 22169 } 22170 v.reset(OpARM64MOVDconst) 22171 v.AuxInt = -1 22172 return true 22173 } 22174 // match: (ORN x0 x1:(SLLconst [c] y)) 22175 // cond: clobberIfDead(x1) 22176 // result: (ORNshiftLL x0 y [c]) 22177 for { 22178 _ = v.Args[1] 22179 x0 := v.Args[0] 22180 x1 := v.Args[1] 22181 if x1.Op != OpARM64SLLconst { 22182 break 22183 } 22184 c := x1.AuxInt 22185 y := x1.Args[0] 22186 if !(clobberIfDead(x1)) { 22187 break 22188 } 22189 v.reset(OpARM64ORNshiftLL) 22190 v.AuxInt = c 22191 v.AddArg(x0) 22192 v.AddArg(y) 22193 return true 22194 } 22195 // match: (ORN x0 x1:(SRLconst [c] y)) 22196 // cond: clobberIfDead(x1) 22197 // result: (ORNshiftRL x0 y [c]) 22198 for { 22199 _ = v.Args[1] 22200 x0 := v.Args[0] 22201 x1 := v.Args[1] 22202 if x1.Op != OpARM64SRLconst { 22203 break 22204 } 22205 c := x1.AuxInt 22206 y := x1.Args[0] 22207 if !(clobberIfDead(x1)) { 22208 break 22209 } 22210 v.reset(OpARM64ORNshiftRL) 22211 v.AuxInt = c 22212 v.AddArg(x0) 22213 v.AddArg(y) 22214 return true 22215 } 22216 // match: (ORN x0 x1:(SRAconst [c] y)) 22217 // cond: clobberIfDead(x1) 22218 // result: (ORNshiftRA x0 y [c]) 22219 for { 22220 _ = v.Args[1] 22221 x0 := v.Args[0] 22222 x1 := v.Args[1] 22223 if x1.Op != OpARM64SRAconst { 22224 break 22225 } 22226 c := x1.AuxInt 22227 y := x1.Args[0] 22228 if !(clobberIfDead(x1)) { 22229 break 22230 } 22231 v.reset(OpARM64ORNshiftRA) 22232 v.AuxInt = c 22233 v.AddArg(x0) 22234 v.AddArg(y) 22235 return true 22236 } 22237 return false 22238 } 22239 func rewriteValueARM64_OpARM64ORNshiftLL_0(v *Value) bool { 22240 // match: (ORNshiftLL x (MOVDconst [c]) [d]) 22241 // result: (ORconst x [^int64(uint64(c)<<uint64(d))]) 22242 for { 22243 d := v.AuxInt 22244 _ = v.Args[1] 22245 x := v.Args[0] 22246 v_1 := v.Args[1] 22247 if v_1.Op != OpARM64MOVDconst { 22248 break 22249 } 22250 c := v_1.AuxInt 22251 v.reset(OpARM64ORconst) 22252 v.AuxInt = ^int64(uint64(c) << uint64(d)) 22253 v.AddArg(x) 22254 return true 22255 } 22256 // match: (ORNshiftLL x (SLLconst x [c]) [d]) 22257 // cond: c==d 22258 // result: (MOVDconst [-1]) 22259 for { 22260 d := v.AuxInt 22261 _ = v.Args[1] 22262 x := v.Args[0] 22263 v_1 := v.Args[1] 22264 if v_1.Op != OpARM64SLLconst { 22265 break 22266 } 22267 c := v_1.AuxInt 22268 if x != v_1.Args[0] || !(c == d) { 22269 break 22270 } 22271 v.reset(OpARM64MOVDconst) 22272 v.AuxInt = -1 22273 return true 22274 } 22275 return false 22276 } 22277 func rewriteValueARM64_OpARM64ORNshiftRA_0(v *Value) bool { 22278 // match: (ORNshiftRA x (MOVDconst [c]) [d]) 22279 // result: (ORconst x [^(c>>uint64(d))]) 22280 for { 22281 d := v.AuxInt 22282 _ = v.Args[1] 22283 x := v.Args[0] 22284 v_1 := v.Args[1] 22285 if v_1.Op != OpARM64MOVDconst { 22286 break 22287 } 22288 c := v_1.AuxInt 22289 v.reset(OpARM64ORconst) 22290 v.AuxInt = ^(c >> uint64(d)) 22291 v.AddArg(x) 22292 return true 22293 } 22294 // match: (ORNshiftRA x (SRAconst x [c]) [d]) 22295 // cond: c==d 22296 // result: (MOVDconst [-1]) 22297 for { 22298 d := v.AuxInt 22299 _ = v.Args[1] 22300 x := v.Args[0] 22301 v_1 := v.Args[1] 22302 if v_1.Op != OpARM64SRAconst { 22303 break 22304 } 22305 c := v_1.AuxInt 22306 if x != v_1.Args[0] || !(c == d) { 22307 break 22308 } 22309 v.reset(OpARM64MOVDconst) 22310 v.AuxInt = -1 22311 return true 22312 } 22313 return false 22314 } 22315 func rewriteValueARM64_OpARM64ORNshiftRL_0(v *Value) bool { 22316 // match: (ORNshiftRL x (MOVDconst [c]) [d]) 22317 // result: (ORconst x [^int64(uint64(c)>>uint64(d))]) 22318 for { 22319 d := v.AuxInt 22320 _ = v.Args[1] 22321 x := v.Args[0] 22322 v_1 := v.Args[1] 22323 if v_1.Op != OpARM64MOVDconst { 22324 break 22325 } 22326 c := v_1.AuxInt 22327 v.reset(OpARM64ORconst) 22328 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 22329 v.AddArg(x) 22330 return true 22331 } 22332 // match: (ORNshiftRL x (SRLconst x [c]) [d]) 22333 // cond: c==d 22334 // result: (MOVDconst [-1]) 22335 for { 22336 d := v.AuxInt 22337 _ = v.Args[1] 22338 x := v.Args[0] 22339 v_1 := v.Args[1] 22340 if v_1.Op != OpARM64SRLconst { 22341 break 22342 } 22343 c := v_1.AuxInt 22344 if x != v_1.Args[0] || !(c == d) { 22345 break 22346 } 22347 v.reset(OpARM64MOVDconst) 22348 v.AuxInt = -1 22349 return true 22350 } 22351 return false 22352 } 22353 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 22354 // match: (ORconst [0] x) 22355 // result: x 22356 for { 22357 if v.AuxInt != 0 { 22358 break 22359 } 22360 x := v.Args[0] 22361 v.reset(OpCopy) 22362 v.Type = x.Type 22363 v.AddArg(x) 22364 return true 22365 } 22366 // match: (ORconst [-1] _) 22367 // result: (MOVDconst [-1]) 22368 for { 22369 if v.AuxInt != -1 { 22370 break 22371 } 22372 v.reset(OpARM64MOVDconst) 22373 v.AuxInt = -1 22374 return true 22375 } 22376 // match: (ORconst [c] (MOVDconst [d])) 22377 // result: (MOVDconst [c|d]) 22378 for { 22379 c := v.AuxInt 22380 v_0 := v.Args[0] 22381 if v_0.Op != OpARM64MOVDconst { 22382 break 22383 } 22384 d := v_0.AuxInt 22385 v.reset(OpARM64MOVDconst) 22386 v.AuxInt = c | d 22387 return true 22388 } 22389 // match: (ORconst [c] (ORconst [d] x)) 22390 // result: (ORconst [c|d] x) 22391 for { 22392 c := v.AuxInt 22393 v_0 := v.Args[0] 22394 if v_0.Op != OpARM64ORconst { 22395 break 22396 } 22397 d := v_0.AuxInt 22398 x := v_0.Args[0] 22399 v.reset(OpARM64ORconst) 22400 v.AuxInt = c | d 22401 v.AddArg(x) 22402 return true 22403 } 22404 // match: (ORconst [c1] (ANDconst [c2] x)) 22405 // cond: c2|c1 == ^0 22406 // result: (ORconst [c1] x) 22407 for { 22408 c1 := v.AuxInt 22409 v_0 := v.Args[0] 22410 if v_0.Op != OpARM64ANDconst { 22411 break 22412 } 22413 c2 := v_0.AuxInt 22414 x := v_0.Args[0] 22415 if !(c2|c1 == ^0) { 22416 break 22417 } 22418 v.reset(OpARM64ORconst) 22419 v.AuxInt = c1 22420 v.AddArg(x) 22421 return true 22422 } 22423 return false 22424 } 22425 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 22426 b := v.Block 22427 typ := &b.Func.Config.Types 22428 // match: (ORshiftLL (MOVDconst [c]) x [d]) 22429 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 22430 for { 22431 d := v.AuxInt 22432 x := v.Args[1] 22433 v_0 := v.Args[0] 22434 if v_0.Op != OpARM64MOVDconst { 22435 break 22436 } 22437 c := v_0.AuxInt 22438 v.reset(OpARM64ORconst) 22439 v.AuxInt = c 22440 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 22441 v0.AuxInt = d 22442 v0.AddArg(x) 22443 v.AddArg(v0) 22444 return true 22445 } 22446 // match: (ORshiftLL x (MOVDconst [c]) [d]) 22447 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 22448 for { 22449 d := v.AuxInt 22450 _ = v.Args[1] 22451 x := v.Args[0] 22452 v_1 := v.Args[1] 22453 if v_1.Op != OpARM64MOVDconst { 22454 break 22455 } 22456 c := v_1.AuxInt 22457 v.reset(OpARM64ORconst) 22458 v.AuxInt = int64(uint64(c) << uint64(d)) 22459 v.AddArg(x) 22460 return true 22461 } 22462 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 22463 // cond: c==d 22464 // result: y 22465 for { 22466 d := v.AuxInt 22467 _ = v.Args[1] 22468 x := v.Args[0] 22469 y := v.Args[1] 22470 if y.Op != OpARM64SLLconst { 22471 break 22472 } 22473 c := y.AuxInt 22474 if x != y.Args[0] || !(c == d) { 22475 break 22476 } 22477 v.reset(OpCopy) 22478 v.Type = y.Type 22479 v.AddArg(y) 22480 return true 22481 } 22482 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 22483 // result: (RORconst [64-c] x) 22484 for { 22485 c := v.AuxInt 22486 x := v.Args[1] 22487 v_0 := v.Args[0] 22488 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c || x != v_0.Args[0] { 22489 break 22490 } 22491 v.reset(OpARM64RORconst) 22492 v.AuxInt = 64 - c 22493 v.AddArg(x) 22494 return true 22495 } 22496 // match: (ORshiftLL <t> [c] (UBFX [bfc] x) x) 22497 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 22498 // result: (RORWconst [32-c] x) 22499 for { 22500 t := v.Type 22501 c := v.AuxInt 22502 x := v.Args[1] 22503 v_0 := v.Args[0] 22504 if v_0.Op != OpARM64UBFX { 22505 break 22506 } 22507 bfc := v_0.AuxInt 22508 if x != v_0.Args[0] || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 22509 break 22510 } 22511 v.reset(OpARM64RORWconst) 22512 v.AuxInt = 32 - c 22513 v.AddArg(x) 22514 return true 22515 } 22516 // match: (ORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x) 22517 // result: (REV16W x) 22518 for { 22519 if v.Type != typ.UInt16 || v.AuxInt != 8 { 22520 break 22521 } 22522 x := v.Args[1] 22523 v_0 := v.Args[0] 22524 if v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || v_0.AuxInt != armBFAuxInt(8, 8) || x != v_0.Args[0] { 22525 break 22526 } 22527 v.reset(OpARM64REV16W) 22528 v.AddArg(x) 22529 return true 22530 } 22531 // match: (ORshiftLL [c] (SRLconst x [64-c]) x2) 22532 // result: (EXTRconst [64-c] x2 x) 22533 for { 22534 c := v.AuxInt 22535 x2 := v.Args[1] 22536 v_0 := v.Args[0] 22537 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c { 22538 break 22539 } 22540 x := v_0.Args[0] 22541 v.reset(OpARM64EXTRconst) 22542 v.AuxInt = 64 - c 22543 v.AddArg(x2) 22544 v.AddArg(x) 22545 return true 22546 } 22547 // match: (ORshiftLL <t> [c] (UBFX [bfc] x) x2) 22548 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 22549 // result: (EXTRWconst [32-c] x2 x) 22550 for { 22551 t := v.Type 22552 c := v.AuxInt 22553 x2 := v.Args[1] 22554 v_0 := v.Args[0] 22555 if v_0.Op != OpARM64UBFX { 22556 break 22557 } 22558 bfc := v_0.AuxInt 22559 x := v_0.Args[0] 22560 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 22561 break 22562 } 22563 v.reset(OpARM64EXTRWconst) 22564 v.AuxInt = 32 - c 22565 v.AddArg(x2) 22566 v.AddArg(x) 22567 return true 22568 } 22569 // match: (ORshiftLL [sc] (UBFX [bfc] x) (SRLconst [sc] y)) 22570 // cond: sc == getARM64BFwidth(bfc) 22571 // result: (BFXIL [bfc] y x) 22572 for { 22573 sc := v.AuxInt 22574 _ = v.Args[1] 22575 v_0 := v.Args[0] 22576 if v_0.Op != OpARM64UBFX { 22577 break 22578 } 22579 bfc := v_0.AuxInt 22580 x := v_0.Args[0] 22581 v_1 := v.Args[1] 22582 if v_1.Op != OpARM64SRLconst || v_1.AuxInt != sc { 22583 break 22584 } 22585 y := v_1.Args[0] 22586 if !(sc == getARM64BFwidth(bfc)) { 22587 break 22588 } 22589 v.reset(OpARM64BFXIL) 22590 v.AuxInt = bfc 22591 v.AddArg(y) 22592 v.AddArg(x) 22593 return true 22594 } 22595 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 22596 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 22597 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22598 for { 22599 t := v.Type 22600 if v.AuxInt != 8 { 22601 break 22602 } 22603 _ = v.Args[1] 22604 y0 := v.Args[0] 22605 if y0.Op != OpARM64MOVDnop { 22606 break 22607 } 22608 x0 := y0.Args[0] 22609 if x0.Op != OpARM64MOVBUload { 22610 break 22611 } 22612 i0 := x0.AuxInt 22613 s := x0.Aux 22614 mem := x0.Args[1] 22615 p := x0.Args[0] 22616 y1 := v.Args[1] 22617 if y1.Op != OpARM64MOVDnop { 22618 break 22619 } 22620 x1 := y1.Args[0] 22621 if x1.Op != OpARM64MOVBUload { 22622 break 22623 } 22624 i1 := x1.AuxInt 22625 if x1.Aux != s { 22626 break 22627 } 22628 _ = x1.Args[1] 22629 if p != x1.Args[0] || mem != x1.Args[1] || !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 22630 break 22631 } 22632 b = mergePoint(b, x0, x1) 22633 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t) 22634 v.reset(OpCopy) 22635 v.AddArg(v0) 22636 v0.Aux = s 22637 v1 := b.NewValue0(x1.Pos, OpOffPtr, p.Type) 22638 v1.AuxInt = i0 22639 v1.AddArg(p) 22640 v0.AddArg(v1) 22641 v0.AddArg(mem) 22642 return true 22643 } 22644 return false 22645 } 22646 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 22647 b := v.Block 22648 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) 22649 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 22650 // result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr0 idx0 mem) 22651 for { 22652 t := v.Type 22653 if v.AuxInt != 8 { 22654 break 22655 } 22656 _ = v.Args[1] 22657 y0 := v.Args[0] 22658 if y0.Op != OpARM64MOVDnop { 22659 break 22660 } 22661 x0 := y0.Args[0] 22662 if x0.Op != OpARM64MOVBUloadidx { 22663 break 22664 } 22665 mem := x0.Args[2] 22666 ptr0 := x0.Args[0] 22667 idx0 := x0.Args[1] 22668 y1 := v.Args[1] 22669 if y1.Op != OpARM64MOVDnop { 22670 break 22671 } 22672 x1 := y1.Args[0] 22673 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 1 { 22674 break 22675 } 22676 s := x1.Aux 22677 _ = x1.Args[1] 22678 p1 := x1.Args[0] 22679 if p1.Op != OpARM64ADD { 22680 break 22681 } 22682 idx1 := p1.Args[1] 22683 ptr1 := p1.Args[0] 22684 if mem != x1.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 22685 break 22686 } 22687 b = mergePoint(b, x0, x1) 22688 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUloadidx, t) 22689 v.reset(OpCopy) 22690 v.AddArg(v0) 22691 v0.AddArg(ptr0) 22692 v0.AddArg(idx0) 22693 v0.AddArg(mem) 22694 return true 22695 } 22696 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) 22697 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 22698 // result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr idx mem) 22699 for { 22700 t := v.Type 22701 if v.AuxInt != 8 { 22702 break 22703 } 22704 _ = v.Args[1] 22705 y0 := v.Args[0] 22706 if y0.Op != OpARM64MOVDnop { 22707 break 22708 } 22709 x0 := y0.Args[0] 22710 if x0.Op != OpARM64MOVBUloadidx { 22711 break 22712 } 22713 mem := x0.Args[2] 22714 ptr := x0.Args[0] 22715 idx := x0.Args[1] 22716 y1 := v.Args[1] 22717 if y1.Op != OpARM64MOVDnop { 22718 break 22719 } 22720 x1 := y1.Args[0] 22721 if x1.Op != OpARM64MOVBUloadidx { 22722 break 22723 } 22724 _ = x1.Args[2] 22725 if ptr != x1.Args[0] { 22726 break 22727 } 22728 x1_1 := x1.Args[1] 22729 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 22730 break 22731 } 22732 b = mergePoint(b, x0, x1) 22733 v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 22734 v.reset(OpCopy) 22735 v.AddArg(v0) 22736 v0.AddArg(ptr) 22737 v0.AddArg(idx) 22738 v0.AddArg(mem) 22739 return true 22740 } 22741 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 22742 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 22743 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22744 for { 22745 t := v.Type 22746 if v.AuxInt != 24 { 22747 break 22748 } 22749 _ = v.Args[1] 22750 o0 := v.Args[0] 22751 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 22752 break 22753 } 22754 _ = o0.Args[1] 22755 x0 := o0.Args[0] 22756 if x0.Op != OpARM64MOVHUload { 22757 break 22758 } 22759 i0 := x0.AuxInt 22760 s := x0.Aux 22761 mem := x0.Args[1] 22762 p := x0.Args[0] 22763 y1 := o0.Args[1] 22764 if y1.Op != OpARM64MOVDnop { 22765 break 22766 } 22767 x1 := y1.Args[0] 22768 if x1.Op != OpARM64MOVBUload { 22769 break 22770 } 22771 i2 := x1.AuxInt 22772 if x1.Aux != s { 22773 break 22774 } 22775 _ = x1.Args[1] 22776 if p != x1.Args[0] || mem != x1.Args[1] { 22777 break 22778 } 22779 y2 := v.Args[1] 22780 if y2.Op != OpARM64MOVDnop { 22781 break 22782 } 22783 x2 := y2.Args[0] 22784 if x2.Op != OpARM64MOVBUload { 22785 break 22786 } 22787 i3 := x2.AuxInt 22788 if x2.Aux != s { 22789 break 22790 } 22791 _ = x2.Args[1] 22792 if p != x2.Args[0] || mem != x2.Args[1] || !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 22793 break 22794 } 22795 b = mergePoint(b, x0, x1, x2) 22796 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 22797 v.reset(OpCopy) 22798 v.AddArg(v0) 22799 v0.Aux = s 22800 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 22801 v1.AuxInt = i0 22802 v1.AddArg(p) 22803 v0.AddArg(v1) 22804 v0.AddArg(mem) 22805 return true 22806 } 22807 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem))) 22808 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 22809 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 idx0 mem) 22810 for { 22811 t := v.Type 22812 if v.AuxInt != 24 { 22813 break 22814 } 22815 _ = v.Args[1] 22816 o0 := v.Args[0] 22817 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 22818 break 22819 } 22820 _ = o0.Args[1] 22821 x0 := o0.Args[0] 22822 if x0.Op != OpARM64MOVHUloadidx { 22823 break 22824 } 22825 mem := x0.Args[2] 22826 ptr0 := x0.Args[0] 22827 idx0 := x0.Args[1] 22828 y1 := o0.Args[1] 22829 if y1.Op != OpARM64MOVDnop { 22830 break 22831 } 22832 x1 := y1.Args[0] 22833 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 2 { 22834 break 22835 } 22836 s := x1.Aux 22837 _ = x1.Args[1] 22838 p1 := x1.Args[0] 22839 if p1.Op != OpARM64ADD { 22840 break 22841 } 22842 idx1 := p1.Args[1] 22843 ptr1 := p1.Args[0] 22844 if mem != x1.Args[1] { 22845 break 22846 } 22847 y2 := v.Args[1] 22848 if y2.Op != OpARM64MOVDnop { 22849 break 22850 } 22851 x2 := y2.Args[0] 22852 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 3 || x2.Aux != s { 22853 break 22854 } 22855 _ = x2.Args[1] 22856 p := x2.Args[0] 22857 if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 22858 break 22859 } 22860 b = mergePoint(b, x0, x1, x2) 22861 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 22862 v.reset(OpCopy) 22863 v.AddArg(v0) 22864 v0.AddArg(ptr0) 22865 v0.AddArg(idx0) 22866 v0.AddArg(mem) 22867 return true 22868 } 22869 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) 22870 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 22871 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr idx mem) 22872 for { 22873 t := v.Type 22874 if v.AuxInt != 24 { 22875 break 22876 } 22877 _ = v.Args[1] 22878 o0 := v.Args[0] 22879 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 22880 break 22881 } 22882 _ = o0.Args[1] 22883 x0 := o0.Args[0] 22884 if x0.Op != OpARM64MOVHUloadidx { 22885 break 22886 } 22887 mem := x0.Args[2] 22888 ptr := x0.Args[0] 22889 idx := x0.Args[1] 22890 y1 := o0.Args[1] 22891 if y1.Op != OpARM64MOVDnop { 22892 break 22893 } 22894 x1 := y1.Args[0] 22895 if x1.Op != OpARM64MOVBUloadidx { 22896 break 22897 } 22898 _ = x1.Args[2] 22899 if ptr != x1.Args[0] { 22900 break 22901 } 22902 x1_1 := x1.Args[1] 22903 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 2 || idx != x1_1.Args[0] || mem != x1.Args[2] { 22904 break 22905 } 22906 y2 := v.Args[1] 22907 if y2.Op != OpARM64MOVDnop { 22908 break 22909 } 22910 x2 := y2.Args[0] 22911 if x2.Op != OpARM64MOVBUloadidx { 22912 break 22913 } 22914 _ = x2.Args[2] 22915 if ptr != x2.Args[0] { 22916 break 22917 } 22918 x2_1 := x2.Args[1] 22919 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 3 || idx != x2_1.Args[0] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 22920 break 22921 } 22922 b = mergePoint(b, x0, x1, x2) 22923 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22924 v.reset(OpCopy) 22925 v.AddArg(v0) 22926 v0.AddArg(ptr) 22927 v0.AddArg(idx) 22928 v0.AddArg(mem) 22929 return true 22930 } 22931 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx2 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADDshiftLL [1] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem))) 22932 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 22933 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 (SLLconst <idx0.Type> [1] idx0) mem) 22934 for { 22935 t := v.Type 22936 if v.AuxInt != 24 { 22937 break 22938 } 22939 _ = v.Args[1] 22940 o0 := v.Args[0] 22941 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 22942 break 22943 } 22944 _ = o0.Args[1] 22945 x0 := o0.Args[0] 22946 if x0.Op != OpARM64MOVHUloadidx2 { 22947 break 22948 } 22949 mem := x0.Args[2] 22950 ptr0 := x0.Args[0] 22951 idx0 := x0.Args[1] 22952 y1 := o0.Args[1] 22953 if y1.Op != OpARM64MOVDnop { 22954 break 22955 } 22956 x1 := y1.Args[0] 22957 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 2 { 22958 break 22959 } 22960 s := x1.Aux 22961 _ = x1.Args[1] 22962 p1 := x1.Args[0] 22963 if p1.Op != OpARM64ADDshiftLL || p1.AuxInt != 1 { 22964 break 22965 } 22966 idx1 := p1.Args[1] 22967 ptr1 := p1.Args[0] 22968 if mem != x1.Args[1] { 22969 break 22970 } 22971 y2 := v.Args[1] 22972 if y2.Op != OpARM64MOVDnop { 22973 break 22974 } 22975 x2 := y2.Args[0] 22976 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 3 || x2.Aux != s { 22977 break 22978 } 22979 _ = x2.Args[1] 22980 p := x2.Args[0] 22981 if mem != x2.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 22982 break 22983 } 22984 b = mergePoint(b, x0, x1, x2) 22985 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 22986 v.reset(OpCopy) 22987 v.AddArg(v0) 22988 v0.AddArg(ptr0) 22989 v1 := b.NewValue0(x2.Pos, OpARM64SLLconst, idx0.Type) 22990 v1.AuxInt = 1 22991 v1.AddArg(idx0) 22992 v0.AddArg(v1) 22993 v0.AddArg(mem) 22994 return true 22995 } 22996 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 22997 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 22998 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22999 for { 23000 t := v.Type 23001 if v.AuxInt != 56 { 23002 break 23003 } 23004 _ = v.Args[1] 23005 o0 := v.Args[0] 23006 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23007 break 23008 } 23009 _ = o0.Args[1] 23010 o1 := o0.Args[0] 23011 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23012 break 23013 } 23014 _ = o1.Args[1] 23015 o2 := o1.Args[0] 23016 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23017 break 23018 } 23019 _ = o2.Args[1] 23020 x0 := o2.Args[0] 23021 if x0.Op != OpARM64MOVWUload { 23022 break 23023 } 23024 i0 := x0.AuxInt 23025 s := x0.Aux 23026 mem := x0.Args[1] 23027 p := x0.Args[0] 23028 y1 := o2.Args[1] 23029 if y1.Op != OpARM64MOVDnop { 23030 break 23031 } 23032 x1 := y1.Args[0] 23033 if x1.Op != OpARM64MOVBUload { 23034 break 23035 } 23036 i4 := x1.AuxInt 23037 if x1.Aux != s { 23038 break 23039 } 23040 _ = x1.Args[1] 23041 if p != x1.Args[0] || mem != x1.Args[1] { 23042 break 23043 } 23044 y2 := o1.Args[1] 23045 if y2.Op != OpARM64MOVDnop { 23046 break 23047 } 23048 x2 := y2.Args[0] 23049 if x2.Op != OpARM64MOVBUload { 23050 break 23051 } 23052 i5 := x2.AuxInt 23053 if x2.Aux != s { 23054 break 23055 } 23056 _ = x2.Args[1] 23057 if p != x2.Args[0] || mem != x2.Args[1] { 23058 break 23059 } 23060 y3 := o0.Args[1] 23061 if y3.Op != OpARM64MOVDnop { 23062 break 23063 } 23064 x3 := y3.Args[0] 23065 if x3.Op != OpARM64MOVBUload { 23066 break 23067 } 23068 i6 := x3.AuxInt 23069 if x3.Aux != s { 23070 break 23071 } 23072 _ = x3.Args[1] 23073 if p != x3.Args[0] || mem != x3.Args[1] { 23074 break 23075 } 23076 y4 := v.Args[1] 23077 if y4.Op != OpARM64MOVDnop { 23078 break 23079 } 23080 x4 := y4.Args[0] 23081 if x4.Op != OpARM64MOVBUload { 23082 break 23083 } 23084 i7 := x4.AuxInt 23085 if x4.Aux != s { 23086 break 23087 } 23088 _ = x4.Args[1] 23089 if p != x4.Args[0] || mem != x4.Args[1] || !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 23090 break 23091 } 23092 b = mergePoint(b, x0, x1, x2, x3, x4) 23093 v0 := b.NewValue0(x4.Pos, OpARM64MOVDload, t) 23094 v.reset(OpCopy) 23095 v.AddArg(v0) 23096 v0.Aux = s 23097 v1 := b.NewValue0(x4.Pos, OpOffPtr, p.Type) 23098 v1.AuxInt = i0 23099 v1.AddArg(p) 23100 v0.AddArg(v1) 23101 v0.AddArg(mem) 23102 return true 23103 } 23104 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem))) 23105 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 23106 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 idx0 mem) 23107 for { 23108 t := v.Type 23109 if v.AuxInt != 56 { 23110 break 23111 } 23112 _ = v.Args[1] 23113 o0 := v.Args[0] 23114 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23115 break 23116 } 23117 _ = o0.Args[1] 23118 o1 := o0.Args[0] 23119 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23120 break 23121 } 23122 _ = o1.Args[1] 23123 o2 := o1.Args[0] 23124 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23125 break 23126 } 23127 _ = o2.Args[1] 23128 x0 := o2.Args[0] 23129 if x0.Op != OpARM64MOVWUloadidx { 23130 break 23131 } 23132 mem := x0.Args[2] 23133 ptr0 := x0.Args[0] 23134 idx0 := x0.Args[1] 23135 y1 := o2.Args[1] 23136 if y1.Op != OpARM64MOVDnop { 23137 break 23138 } 23139 x1 := y1.Args[0] 23140 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 4 { 23141 break 23142 } 23143 s := x1.Aux 23144 _ = x1.Args[1] 23145 p1 := x1.Args[0] 23146 if p1.Op != OpARM64ADD { 23147 break 23148 } 23149 idx1 := p1.Args[1] 23150 ptr1 := p1.Args[0] 23151 if mem != x1.Args[1] { 23152 break 23153 } 23154 y2 := o1.Args[1] 23155 if y2.Op != OpARM64MOVDnop { 23156 break 23157 } 23158 x2 := y2.Args[0] 23159 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 5 || x2.Aux != s { 23160 break 23161 } 23162 _ = x2.Args[1] 23163 p := x2.Args[0] 23164 if mem != x2.Args[1] { 23165 break 23166 } 23167 y3 := o0.Args[1] 23168 if y3.Op != OpARM64MOVDnop { 23169 break 23170 } 23171 x3 := y3.Args[0] 23172 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 6 || x3.Aux != s { 23173 break 23174 } 23175 _ = x3.Args[1] 23176 if p != x3.Args[0] || mem != x3.Args[1] { 23177 break 23178 } 23179 y4 := v.Args[1] 23180 if y4.Op != OpARM64MOVDnop { 23181 break 23182 } 23183 x4 := y4.Args[0] 23184 if x4.Op != OpARM64MOVBUload || x4.AuxInt != 7 || x4.Aux != s { 23185 break 23186 } 23187 _ = x4.Args[1] 23188 if p != x4.Args[0] || mem != x4.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 23189 break 23190 } 23191 b = mergePoint(b, x0, x1, x2, x3, x4) 23192 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t) 23193 v.reset(OpCopy) 23194 v.AddArg(v0) 23195 v0.AddArg(ptr0) 23196 v0.AddArg(idx0) 23197 v0.AddArg(mem) 23198 return true 23199 } 23200 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx4 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADDshiftLL [2] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem))) 23201 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 23202 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 (SLLconst <idx0.Type> [2] idx0) mem) 23203 for { 23204 t := v.Type 23205 if v.AuxInt != 56 { 23206 break 23207 } 23208 _ = v.Args[1] 23209 o0 := v.Args[0] 23210 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23211 break 23212 } 23213 _ = o0.Args[1] 23214 o1 := o0.Args[0] 23215 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23216 break 23217 } 23218 _ = o1.Args[1] 23219 o2 := o1.Args[0] 23220 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23221 break 23222 } 23223 _ = o2.Args[1] 23224 x0 := o2.Args[0] 23225 if x0.Op != OpARM64MOVWUloadidx4 { 23226 break 23227 } 23228 mem := x0.Args[2] 23229 ptr0 := x0.Args[0] 23230 idx0 := x0.Args[1] 23231 y1 := o2.Args[1] 23232 if y1.Op != OpARM64MOVDnop { 23233 break 23234 } 23235 x1 := y1.Args[0] 23236 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 4 { 23237 break 23238 } 23239 s := x1.Aux 23240 _ = x1.Args[1] 23241 p1 := x1.Args[0] 23242 if p1.Op != OpARM64ADDshiftLL || p1.AuxInt != 2 { 23243 break 23244 } 23245 idx1 := p1.Args[1] 23246 ptr1 := p1.Args[0] 23247 if mem != x1.Args[1] { 23248 break 23249 } 23250 y2 := o1.Args[1] 23251 if y2.Op != OpARM64MOVDnop { 23252 break 23253 } 23254 x2 := y2.Args[0] 23255 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 5 || x2.Aux != s { 23256 break 23257 } 23258 _ = x2.Args[1] 23259 p := x2.Args[0] 23260 if mem != x2.Args[1] { 23261 break 23262 } 23263 y3 := o0.Args[1] 23264 if y3.Op != OpARM64MOVDnop { 23265 break 23266 } 23267 x3 := y3.Args[0] 23268 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 6 || x3.Aux != s { 23269 break 23270 } 23271 _ = x3.Args[1] 23272 if p != x3.Args[0] || mem != x3.Args[1] { 23273 break 23274 } 23275 y4 := v.Args[1] 23276 if y4.Op != OpARM64MOVDnop { 23277 break 23278 } 23279 x4 := y4.Args[0] 23280 if x4.Op != OpARM64MOVBUload || x4.AuxInt != 7 || x4.Aux != s { 23281 break 23282 } 23283 _ = x4.Args[1] 23284 if p != x4.Args[0] || mem != x4.Args[1] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 23285 break 23286 } 23287 b = mergePoint(b, x0, x1, x2, x3, x4) 23288 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t) 23289 v.reset(OpCopy) 23290 v.AddArg(v0) 23291 v0.AddArg(ptr0) 23292 v1 := b.NewValue0(x4.Pos, OpARM64SLLconst, idx0.Type) 23293 v1.AuxInt = 2 23294 v1.AddArg(idx0) 23295 v0.AddArg(v1) 23296 v0.AddArg(mem) 23297 return true 23298 } 23299 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) 23300 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 23301 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr idx mem) 23302 for { 23303 t := v.Type 23304 if v.AuxInt != 56 { 23305 break 23306 } 23307 _ = v.Args[1] 23308 o0 := v.Args[0] 23309 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23310 break 23311 } 23312 _ = o0.Args[1] 23313 o1 := o0.Args[0] 23314 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23315 break 23316 } 23317 _ = o1.Args[1] 23318 o2 := o1.Args[0] 23319 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23320 break 23321 } 23322 _ = o2.Args[1] 23323 x0 := o2.Args[0] 23324 if x0.Op != OpARM64MOVWUloadidx { 23325 break 23326 } 23327 mem := x0.Args[2] 23328 ptr := x0.Args[0] 23329 idx := x0.Args[1] 23330 y1 := o2.Args[1] 23331 if y1.Op != OpARM64MOVDnop { 23332 break 23333 } 23334 x1 := y1.Args[0] 23335 if x1.Op != OpARM64MOVBUloadidx { 23336 break 23337 } 23338 _ = x1.Args[2] 23339 if ptr != x1.Args[0] { 23340 break 23341 } 23342 x1_1 := x1.Args[1] 23343 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 4 || idx != x1_1.Args[0] || mem != x1.Args[2] { 23344 break 23345 } 23346 y2 := o1.Args[1] 23347 if y2.Op != OpARM64MOVDnop { 23348 break 23349 } 23350 x2 := y2.Args[0] 23351 if x2.Op != OpARM64MOVBUloadidx { 23352 break 23353 } 23354 _ = x2.Args[2] 23355 if ptr != x2.Args[0] { 23356 break 23357 } 23358 x2_1 := x2.Args[1] 23359 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 5 || idx != x2_1.Args[0] || mem != x2.Args[2] { 23360 break 23361 } 23362 y3 := o0.Args[1] 23363 if y3.Op != OpARM64MOVDnop { 23364 break 23365 } 23366 x3 := y3.Args[0] 23367 if x3.Op != OpARM64MOVBUloadidx { 23368 break 23369 } 23370 _ = x3.Args[2] 23371 if ptr != x3.Args[0] { 23372 break 23373 } 23374 x3_1 := x3.Args[1] 23375 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 6 || idx != x3_1.Args[0] || mem != x3.Args[2] { 23376 break 23377 } 23378 y4 := v.Args[1] 23379 if y4.Op != OpARM64MOVDnop { 23380 break 23381 } 23382 x4 := y4.Args[0] 23383 if x4.Op != OpARM64MOVBUloadidx { 23384 break 23385 } 23386 _ = x4.Args[2] 23387 if ptr != x4.Args[0] { 23388 break 23389 } 23390 x4_1 := x4.Args[1] 23391 if x4_1.Op != OpARM64ADDconst || x4_1.AuxInt != 7 || idx != x4_1.Args[0] || mem != x4.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 23392 break 23393 } 23394 b = mergePoint(b, x0, x1, x2, x3, x4) 23395 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 23396 v.reset(OpCopy) 23397 v.AddArg(v0) 23398 v0.AddArg(ptr) 23399 v0.AddArg(idx) 23400 v0.AddArg(mem) 23401 return true 23402 } 23403 return false 23404 } 23405 func rewriteValueARM64_OpARM64ORshiftLL_20(v *Value) bool { 23406 b := v.Block 23407 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 23408 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 23409 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 23410 for { 23411 t := v.Type 23412 if v.AuxInt != 8 { 23413 break 23414 } 23415 _ = v.Args[1] 23416 y0 := v.Args[0] 23417 if y0.Op != OpARM64MOVDnop { 23418 break 23419 } 23420 x0 := y0.Args[0] 23421 if x0.Op != OpARM64MOVBUload { 23422 break 23423 } 23424 i1 := x0.AuxInt 23425 s := x0.Aux 23426 mem := x0.Args[1] 23427 p := x0.Args[0] 23428 y1 := v.Args[1] 23429 if y1.Op != OpARM64MOVDnop { 23430 break 23431 } 23432 x1 := y1.Args[0] 23433 if x1.Op != OpARM64MOVBUload { 23434 break 23435 } 23436 i0 := x1.AuxInt 23437 if x1.Aux != s { 23438 break 23439 } 23440 _ = x1.Args[1] 23441 if p != x1.Args[0] || mem != x1.Args[1] || !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 23442 break 23443 } 23444 b = mergePoint(b, x0, x1) 23445 v0 := b.NewValue0(x1.Pos, OpARM64REV16W, t) 23446 v.reset(OpCopy) 23447 v.AddArg(v0) 23448 v1 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t) 23449 v1.AuxInt = i0 23450 v1.Aux = s 23451 v1.AddArg(p) 23452 v1.AddArg(mem) 23453 v0.AddArg(v1) 23454 return true 23455 } 23456 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr0 idx0 mem))) 23457 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 23458 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr0 idx0 mem)) 23459 for { 23460 t := v.Type 23461 if v.AuxInt != 8 { 23462 break 23463 } 23464 _ = v.Args[1] 23465 y0 := v.Args[0] 23466 if y0.Op != OpARM64MOVDnop { 23467 break 23468 } 23469 x0 := y0.Args[0] 23470 if x0.Op != OpARM64MOVBUload || x0.AuxInt != 1 { 23471 break 23472 } 23473 s := x0.Aux 23474 mem := x0.Args[1] 23475 p1 := x0.Args[0] 23476 if p1.Op != OpARM64ADD { 23477 break 23478 } 23479 idx1 := p1.Args[1] 23480 ptr1 := p1.Args[0] 23481 y1 := v.Args[1] 23482 if y1.Op != OpARM64MOVDnop { 23483 break 23484 } 23485 x1 := y1.Args[0] 23486 if x1.Op != OpARM64MOVBUloadidx { 23487 break 23488 } 23489 _ = x1.Args[2] 23490 ptr0 := x1.Args[0] 23491 idx0 := x1.Args[1] 23492 if mem != x1.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 23493 break 23494 } 23495 b = mergePoint(b, x0, x1) 23496 v0 := b.NewValue0(x0.Pos, OpARM64REV16W, t) 23497 v.reset(OpCopy) 23498 v.AddArg(v0) 23499 v1 := b.NewValue0(x0.Pos, OpARM64MOVHUloadidx, t) 23500 v1.AddArg(ptr0) 23501 v1.AddArg(idx0) 23502 v1.AddArg(mem) 23503 v0.AddArg(v1) 23504 return true 23505 } 23506 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [1] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr idx mem))) 23507 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 23508 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr idx mem)) 23509 for { 23510 t := v.Type 23511 if v.AuxInt != 8 { 23512 break 23513 } 23514 _ = v.Args[1] 23515 y0 := v.Args[0] 23516 if y0.Op != OpARM64MOVDnop { 23517 break 23518 } 23519 x0 := y0.Args[0] 23520 if x0.Op != OpARM64MOVBUloadidx { 23521 break 23522 } 23523 mem := x0.Args[2] 23524 ptr := x0.Args[0] 23525 x0_1 := x0.Args[1] 23526 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 1 { 23527 break 23528 } 23529 idx := x0_1.Args[0] 23530 y1 := v.Args[1] 23531 if y1.Op != OpARM64MOVDnop { 23532 break 23533 } 23534 x1 := y1.Args[0] 23535 if x1.Op != OpARM64MOVBUloadidx { 23536 break 23537 } 23538 _ = x1.Args[2] 23539 if ptr != x1.Args[0] || idx != x1.Args[1] || mem != x1.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 23540 break 23541 } 23542 b = mergePoint(b, x0, x1) 23543 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 23544 v.reset(OpCopy) 23545 v.AddArg(v0) 23546 v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 23547 v1.AddArg(ptr) 23548 v1.AddArg(idx) 23549 v1.AddArg(mem) 23550 v0.AddArg(v1) 23551 return true 23552 } 23553 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 23554 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 23555 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 23556 for { 23557 t := v.Type 23558 if v.AuxInt != 24 { 23559 break 23560 } 23561 _ = v.Args[1] 23562 o0 := v.Args[0] 23563 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 23564 break 23565 } 23566 _ = o0.Args[1] 23567 y0 := o0.Args[0] 23568 if y0.Op != OpARM64REV16W { 23569 break 23570 } 23571 x0 := y0.Args[0] 23572 if x0.Op != OpARM64MOVHUload { 23573 break 23574 } 23575 i2 := x0.AuxInt 23576 s := x0.Aux 23577 mem := x0.Args[1] 23578 p := x0.Args[0] 23579 y1 := o0.Args[1] 23580 if y1.Op != OpARM64MOVDnop { 23581 break 23582 } 23583 x1 := y1.Args[0] 23584 if x1.Op != OpARM64MOVBUload { 23585 break 23586 } 23587 i1 := x1.AuxInt 23588 if x1.Aux != s { 23589 break 23590 } 23591 _ = x1.Args[1] 23592 if p != x1.Args[0] || mem != x1.Args[1] { 23593 break 23594 } 23595 y2 := v.Args[1] 23596 if y2.Op != OpARM64MOVDnop { 23597 break 23598 } 23599 x2 := y2.Args[0] 23600 if x2.Op != OpARM64MOVBUload { 23601 break 23602 } 23603 i0 := x2.AuxInt 23604 if x2.Aux != s { 23605 break 23606 } 23607 _ = x2.Args[1] 23608 if p != x2.Args[0] || mem != x2.Args[1] || !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 23609 break 23610 } 23611 b = mergePoint(b, x0, x1, x2) 23612 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t) 23613 v.reset(OpCopy) 23614 v.AddArg(v0) 23615 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 23616 v1.Aux = s 23617 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 23618 v2.AuxInt = i0 23619 v2.AddArg(p) 23620 v1.AddArg(v2) 23621 v1.AddArg(mem) 23622 v0.AddArg(v1) 23623 return true 23624 } 23625 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr0 idx0 mem))) 23626 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 23627 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 23628 for { 23629 t := v.Type 23630 if v.AuxInt != 24 { 23631 break 23632 } 23633 _ = v.Args[1] 23634 o0 := v.Args[0] 23635 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 23636 break 23637 } 23638 _ = o0.Args[1] 23639 y0 := o0.Args[0] 23640 if y0.Op != OpARM64REV16W { 23641 break 23642 } 23643 x0 := y0.Args[0] 23644 if x0.Op != OpARM64MOVHUload || x0.AuxInt != 2 { 23645 break 23646 } 23647 s := x0.Aux 23648 mem := x0.Args[1] 23649 p := x0.Args[0] 23650 y1 := o0.Args[1] 23651 if y1.Op != OpARM64MOVDnop { 23652 break 23653 } 23654 x1 := y1.Args[0] 23655 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 1 || x1.Aux != s { 23656 break 23657 } 23658 _ = x1.Args[1] 23659 p1 := x1.Args[0] 23660 if p1.Op != OpARM64ADD { 23661 break 23662 } 23663 idx1 := p1.Args[1] 23664 ptr1 := p1.Args[0] 23665 if mem != x1.Args[1] { 23666 break 23667 } 23668 y2 := v.Args[1] 23669 if y2.Op != OpARM64MOVDnop { 23670 break 23671 } 23672 x2 := y2.Args[0] 23673 if x2.Op != OpARM64MOVBUloadidx { 23674 break 23675 } 23676 _ = x2.Args[2] 23677 ptr0 := x2.Args[0] 23678 idx0 := x2.Args[1] 23679 if mem != x2.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 23680 break 23681 } 23682 b = mergePoint(b, x0, x1, x2) 23683 v0 := b.NewValue0(x1.Pos, OpARM64REVW, t) 23684 v.reset(OpCopy) 23685 v.AddArg(v0) 23686 v1 := b.NewValue0(x1.Pos, OpARM64MOVWUloadidx, t) 23687 v1.AddArg(ptr0) 23688 v1.AddArg(idx0) 23689 v1.AddArg(mem) 23690 v0.AddArg(v1) 23691 return true 23692 } 23693 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUloadidx ptr (ADDconst [2] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr idx mem))) 23694 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 23695 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 23696 for { 23697 t := v.Type 23698 if v.AuxInt != 24 { 23699 break 23700 } 23701 _ = v.Args[1] 23702 o0 := v.Args[0] 23703 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 16 { 23704 break 23705 } 23706 _ = o0.Args[1] 23707 y0 := o0.Args[0] 23708 if y0.Op != OpARM64REV16W { 23709 break 23710 } 23711 x0 := y0.Args[0] 23712 if x0.Op != OpARM64MOVHUloadidx { 23713 break 23714 } 23715 mem := x0.Args[2] 23716 ptr := x0.Args[0] 23717 x0_1 := x0.Args[1] 23718 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 2 { 23719 break 23720 } 23721 idx := x0_1.Args[0] 23722 y1 := o0.Args[1] 23723 if y1.Op != OpARM64MOVDnop { 23724 break 23725 } 23726 x1 := y1.Args[0] 23727 if x1.Op != OpARM64MOVBUloadidx { 23728 break 23729 } 23730 _ = x1.Args[2] 23731 if ptr != x1.Args[0] { 23732 break 23733 } 23734 x1_1 := x1.Args[1] 23735 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 1 || idx != x1_1.Args[0] || mem != x1.Args[2] { 23736 break 23737 } 23738 y2 := v.Args[1] 23739 if y2.Op != OpARM64MOVDnop { 23740 break 23741 } 23742 x2 := y2.Args[0] 23743 if x2.Op != OpARM64MOVBUloadidx { 23744 break 23745 } 23746 _ = x2.Args[2] 23747 if ptr != x2.Args[0] || idx != x2.Args[1] || mem != x2.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 23748 break 23749 } 23750 b = mergePoint(b, x0, x1, x2) 23751 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 23752 v.reset(OpCopy) 23753 v.AddArg(v0) 23754 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 23755 v1.AddArg(ptr) 23756 v1.AddArg(idx) 23757 v1.AddArg(mem) 23758 v0.AddArg(v1) 23759 return true 23760 } 23761 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 23762 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 23763 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 23764 for { 23765 t := v.Type 23766 if v.AuxInt != 56 { 23767 break 23768 } 23769 _ = v.Args[1] 23770 o0 := v.Args[0] 23771 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23772 break 23773 } 23774 _ = o0.Args[1] 23775 o1 := o0.Args[0] 23776 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23777 break 23778 } 23779 _ = o1.Args[1] 23780 o2 := o1.Args[0] 23781 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23782 break 23783 } 23784 _ = o2.Args[1] 23785 y0 := o2.Args[0] 23786 if y0.Op != OpARM64REVW { 23787 break 23788 } 23789 x0 := y0.Args[0] 23790 if x0.Op != OpARM64MOVWUload { 23791 break 23792 } 23793 i4 := x0.AuxInt 23794 s := x0.Aux 23795 mem := x0.Args[1] 23796 p := x0.Args[0] 23797 y1 := o2.Args[1] 23798 if y1.Op != OpARM64MOVDnop { 23799 break 23800 } 23801 x1 := y1.Args[0] 23802 if x1.Op != OpARM64MOVBUload { 23803 break 23804 } 23805 i3 := x1.AuxInt 23806 if x1.Aux != s { 23807 break 23808 } 23809 _ = x1.Args[1] 23810 if p != x1.Args[0] || mem != x1.Args[1] { 23811 break 23812 } 23813 y2 := o1.Args[1] 23814 if y2.Op != OpARM64MOVDnop { 23815 break 23816 } 23817 x2 := y2.Args[0] 23818 if x2.Op != OpARM64MOVBUload { 23819 break 23820 } 23821 i2 := x2.AuxInt 23822 if x2.Aux != s { 23823 break 23824 } 23825 _ = x2.Args[1] 23826 if p != x2.Args[0] || mem != x2.Args[1] { 23827 break 23828 } 23829 y3 := o0.Args[1] 23830 if y3.Op != OpARM64MOVDnop { 23831 break 23832 } 23833 x3 := y3.Args[0] 23834 if x3.Op != OpARM64MOVBUload { 23835 break 23836 } 23837 i1 := x3.AuxInt 23838 if x3.Aux != s { 23839 break 23840 } 23841 _ = x3.Args[1] 23842 if p != x3.Args[0] || mem != x3.Args[1] { 23843 break 23844 } 23845 y4 := v.Args[1] 23846 if y4.Op != OpARM64MOVDnop { 23847 break 23848 } 23849 x4 := y4.Args[0] 23850 if x4.Op != OpARM64MOVBUload { 23851 break 23852 } 23853 i0 := x4.AuxInt 23854 if x4.Aux != s { 23855 break 23856 } 23857 _ = x4.Args[1] 23858 if p != x4.Args[0] || mem != x4.Args[1] || !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 23859 break 23860 } 23861 b = mergePoint(b, x0, x1, x2, x3, x4) 23862 v0 := b.NewValue0(x4.Pos, OpARM64REV, t) 23863 v.reset(OpCopy) 23864 v.AddArg(v0) 23865 v1 := b.NewValue0(x4.Pos, OpARM64MOVDload, t) 23866 v1.Aux = s 23867 v2 := b.NewValue0(x4.Pos, OpOffPtr, p.Type) 23868 v2.AuxInt = i0 23869 v2.AddArg(p) 23870 v1.AddArg(v2) 23871 v1.AddArg(mem) 23872 v0.AddArg(v1) 23873 return true 23874 } 23875 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr0 idx0 mem))) 23876 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 23877 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 23878 for { 23879 t := v.Type 23880 if v.AuxInt != 56 { 23881 break 23882 } 23883 _ = v.Args[1] 23884 o0 := v.Args[0] 23885 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23886 break 23887 } 23888 _ = o0.Args[1] 23889 o1 := o0.Args[0] 23890 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23891 break 23892 } 23893 _ = o1.Args[1] 23894 o2 := o1.Args[0] 23895 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23896 break 23897 } 23898 _ = o2.Args[1] 23899 y0 := o2.Args[0] 23900 if y0.Op != OpARM64REVW { 23901 break 23902 } 23903 x0 := y0.Args[0] 23904 if x0.Op != OpARM64MOVWUload || x0.AuxInt != 4 { 23905 break 23906 } 23907 s := x0.Aux 23908 mem := x0.Args[1] 23909 p := x0.Args[0] 23910 y1 := o2.Args[1] 23911 if y1.Op != OpARM64MOVDnop { 23912 break 23913 } 23914 x1 := y1.Args[0] 23915 if x1.Op != OpARM64MOVBUload || x1.AuxInt != 3 || x1.Aux != s { 23916 break 23917 } 23918 _ = x1.Args[1] 23919 if p != x1.Args[0] || mem != x1.Args[1] { 23920 break 23921 } 23922 y2 := o1.Args[1] 23923 if y2.Op != OpARM64MOVDnop { 23924 break 23925 } 23926 x2 := y2.Args[0] 23927 if x2.Op != OpARM64MOVBUload || x2.AuxInt != 2 || x2.Aux != s { 23928 break 23929 } 23930 _ = x2.Args[1] 23931 if p != x2.Args[0] || mem != x2.Args[1] { 23932 break 23933 } 23934 y3 := o0.Args[1] 23935 if y3.Op != OpARM64MOVDnop { 23936 break 23937 } 23938 x3 := y3.Args[0] 23939 if x3.Op != OpARM64MOVBUload || x3.AuxInt != 1 || x3.Aux != s { 23940 break 23941 } 23942 _ = x3.Args[1] 23943 p1 := x3.Args[0] 23944 if p1.Op != OpARM64ADD { 23945 break 23946 } 23947 idx1 := p1.Args[1] 23948 ptr1 := p1.Args[0] 23949 if mem != x3.Args[1] { 23950 break 23951 } 23952 y4 := v.Args[1] 23953 if y4.Op != OpARM64MOVDnop { 23954 break 23955 } 23956 x4 := y4.Args[0] 23957 if x4.Op != OpARM64MOVBUloadidx { 23958 break 23959 } 23960 _ = x4.Args[2] 23961 ptr0 := x4.Args[0] 23962 idx0 := x4.Args[1] 23963 if mem != x4.Args[2] || !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 23964 break 23965 } 23966 b = mergePoint(b, x0, x1, x2, x3, x4) 23967 v0 := b.NewValue0(x3.Pos, OpARM64REV, t) 23968 v.reset(OpCopy) 23969 v.AddArg(v0) 23970 v1 := b.NewValue0(x3.Pos, OpARM64MOVDloadidx, t) 23971 v1.AddArg(ptr0) 23972 v1.AddArg(idx0) 23973 v1.AddArg(mem) 23974 v0.AddArg(v1) 23975 return true 23976 } 23977 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUloadidx ptr (ADDconst [4] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr idx mem))) 23978 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 23979 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr idx mem)) 23980 for { 23981 t := v.Type 23982 if v.AuxInt != 56 { 23983 break 23984 } 23985 _ = v.Args[1] 23986 o0 := v.Args[0] 23987 if o0.Op != OpARM64ORshiftLL || o0.AuxInt != 48 { 23988 break 23989 } 23990 _ = o0.Args[1] 23991 o1 := o0.Args[0] 23992 if o1.Op != OpARM64ORshiftLL || o1.AuxInt != 40 { 23993 break 23994 } 23995 _ = o1.Args[1] 23996 o2 := o1.Args[0] 23997 if o2.Op != OpARM64ORshiftLL || o2.AuxInt != 32 { 23998 break 23999 } 24000 _ = o2.Args[1] 24001 y0 := o2.Args[0] 24002 if y0.Op != OpARM64REVW { 24003 break 24004 } 24005 x0 := y0.Args[0] 24006 if x0.Op != OpARM64MOVWUloadidx { 24007 break 24008 } 24009 mem := x0.Args[2] 24010 ptr := x0.Args[0] 24011 x0_1 := x0.Args[1] 24012 if x0_1.Op != OpARM64ADDconst || x0_1.AuxInt != 4 { 24013 break 24014 } 24015 idx := x0_1.Args[0] 24016 y1 := o2.Args[1] 24017 if y1.Op != OpARM64MOVDnop { 24018 break 24019 } 24020 x1 := y1.Args[0] 24021 if x1.Op != OpARM64MOVBUloadidx { 24022 break 24023 } 24024 _ = x1.Args[2] 24025 if ptr != x1.Args[0] { 24026 break 24027 } 24028 x1_1 := x1.Args[1] 24029 if x1_1.Op != OpARM64ADDconst || x1_1.AuxInt != 3 || idx != x1_1.Args[0] || mem != x1.Args[2] { 24030 break 24031 } 24032 y2 := o1.Args[1] 24033 if y2.Op != OpARM64MOVDnop { 24034 break 24035 } 24036 x2 := y2.Args[0] 24037 if x2.Op != OpARM64MOVBUloadidx { 24038 break 24039 } 24040 _ = x2.Args[2] 24041 if ptr != x2.Args[0] { 24042 break 24043 } 24044 x2_1 := x2.Args[1] 24045 if x2_1.Op != OpARM64ADDconst || x2_1.AuxInt != 2 || idx != x2_1.Args[0] || mem != x2.Args[2] { 24046 break 24047 } 24048 y3 := o0.Args[1] 24049 if y3.Op != OpARM64MOVDnop { 24050 break 24051 } 24052 x3 := y3.Args[0] 24053 if x3.Op != OpARM64MOVBUloadidx { 24054 break 24055 } 24056 _ = x3.Args[2] 24057 if ptr != x3.Args[0] { 24058 break 24059 } 24060 x3_1 := x3.Args[1] 24061 if x3_1.Op != OpARM64ADDconst || x3_1.AuxInt != 1 || idx != x3_1.Args[0] || mem != x3.Args[2] { 24062 break 24063 } 24064 y4 := v.Args[1] 24065 if y4.Op != OpARM64MOVDnop { 24066 break 24067 } 24068 x4 := y4.Args[0] 24069 if x4.Op != OpARM64MOVBUloadidx { 24070 break 24071 } 24072 _ = x4.Args[2] 24073 if ptr != x4.Args[0] || idx != x4.Args[1] || mem != x4.Args[2] || !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 24074 break 24075 } 24076 b = mergePoint(b, x0, x1, x2, x3, x4) 24077 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 24078 v.reset(OpCopy) 24079 v.AddArg(v0) 24080 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 24081 v1.AddArg(ptr) 24082 v1.AddArg(idx) 24083 v1.AddArg(mem) 24084 v0.AddArg(v1) 24085 return true 24086 } 24087 return false 24088 } 24089 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 24090 b := v.Block 24091 // match: (ORshiftRA (MOVDconst [c]) x [d]) 24092 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 24093 for { 24094 d := v.AuxInt 24095 x := v.Args[1] 24096 v_0 := v.Args[0] 24097 if v_0.Op != OpARM64MOVDconst { 24098 break 24099 } 24100 c := v_0.AuxInt 24101 v.reset(OpARM64ORconst) 24102 v.AuxInt = c 24103 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 24104 v0.AuxInt = d 24105 v0.AddArg(x) 24106 v.AddArg(v0) 24107 return true 24108 } 24109 // match: (ORshiftRA x (MOVDconst [c]) [d]) 24110 // result: (ORconst x [c>>uint64(d)]) 24111 for { 24112 d := v.AuxInt 24113 _ = v.Args[1] 24114 x := v.Args[0] 24115 v_1 := v.Args[1] 24116 if v_1.Op != OpARM64MOVDconst { 24117 break 24118 } 24119 c := v_1.AuxInt 24120 v.reset(OpARM64ORconst) 24121 v.AuxInt = c >> uint64(d) 24122 v.AddArg(x) 24123 return true 24124 } 24125 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 24126 // cond: c==d 24127 // result: y 24128 for { 24129 d := v.AuxInt 24130 _ = v.Args[1] 24131 x := v.Args[0] 24132 y := v.Args[1] 24133 if y.Op != OpARM64SRAconst { 24134 break 24135 } 24136 c := y.AuxInt 24137 if x != y.Args[0] || !(c == d) { 24138 break 24139 } 24140 v.reset(OpCopy) 24141 v.Type = y.Type 24142 v.AddArg(y) 24143 return true 24144 } 24145 return false 24146 } 24147 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 24148 b := v.Block 24149 // match: (ORshiftRL (MOVDconst [c]) x [d]) 24150 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 24151 for { 24152 d := v.AuxInt 24153 x := v.Args[1] 24154 v_0 := v.Args[0] 24155 if v_0.Op != OpARM64MOVDconst { 24156 break 24157 } 24158 c := v_0.AuxInt 24159 v.reset(OpARM64ORconst) 24160 v.AuxInt = c 24161 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 24162 v0.AuxInt = d 24163 v0.AddArg(x) 24164 v.AddArg(v0) 24165 return true 24166 } 24167 // match: (ORshiftRL x (MOVDconst [c]) [d]) 24168 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 24169 for { 24170 d := v.AuxInt 24171 _ = v.Args[1] 24172 x := v.Args[0] 24173 v_1 := v.Args[1] 24174 if v_1.Op != OpARM64MOVDconst { 24175 break 24176 } 24177 c := v_1.AuxInt 24178 v.reset(OpARM64ORconst) 24179 v.AuxInt = int64(uint64(c) >> uint64(d)) 24180 v.AddArg(x) 24181 return true 24182 } 24183 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 24184 // cond: c==d 24185 // result: y 24186 for { 24187 d := v.AuxInt 24188 _ = v.Args[1] 24189 x := v.Args[0] 24190 y := v.Args[1] 24191 if y.Op != OpARM64SRLconst { 24192 break 24193 } 24194 c := y.AuxInt 24195 if x != y.Args[0] || !(c == d) { 24196 break 24197 } 24198 v.reset(OpCopy) 24199 v.Type = y.Type 24200 v.AddArg(y) 24201 return true 24202 } 24203 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 24204 // result: (RORconst [ c] x) 24205 for { 24206 c := v.AuxInt 24207 x := v.Args[1] 24208 v_0 := v.Args[0] 24209 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 64-c || x != v_0.Args[0] { 24210 break 24211 } 24212 v.reset(OpARM64RORconst) 24213 v.AuxInt = c 24214 v.AddArg(x) 24215 return true 24216 } 24217 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 24218 // cond: c < 32 && t.Size() == 4 24219 // result: (RORWconst [c] x) 24220 for { 24221 t := v.Type 24222 c := v.AuxInt 24223 _ = v.Args[1] 24224 v_0 := v.Args[0] 24225 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 32-c { 24226 break 24227 } 24228 x := v_0.Args[0] 24229 v_1 := v.Args[1] 24230 if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) { 24231 break 24232 } 24233 v.reset(OpARM64RORWconst) 24234 v.AuxInt = c 24235 v.AddArg(x) 24236 return true 24237 } 24238 // match: (ORshiftRL [rc] (ANDconst [ac] x) (SLLconst [lc] y)) 24239 // cond: lc > rc && ac == ^((1<<uint(64-lc)-1) << uint64(lc-rc)) 24240 // result: (BFI [armBFAuxInt(lc-rc, 64-lc)] x y) 24241 for { 24242 rc := v.AuxInt 24243 _ = v.Args[1] 24244 v_0 := v.Args[0] 24245 if v_0.Op != OpARM64ANDconst { 24246 break 24247 } 24248 ac := v_0.AuxInt 24249 x := v_0.Args[0] 24250 v_1 := v.Args[1] 24251 if v_1.Op != OpARM64SLLconst { 24252 break 24253 } 24254 lc := v_1.AuxInt 24255 y := v_1.Args[0] 24256 if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) { 24257 break 24258 } 24259 v.reset(OpARM64BFI) 24260 v.AuxInt = armBFAuxInt(lc-rc, 64-lc) 24261 v.AddArg(x) 24262 v.AddArg(y) 24263 return true 24264 } 24265 // match: (ORshiftRL [rc] (ANDconst [ac] y) (SLLconst [lc] x)) 24266 // cond: lc < rc && ac == ^((1<<uint(64-rc)-1)) 24267 // result: (BFXIL [armBFAuxInt(rc-lc, 64-rc)] y x) 24268 for { 24269 rc := v.AuxInt 24270 _ = v.Args[1] 24271 v_0 := v.Args[0] 24272 if v_0.Op != OpARM64ANDconst { 24273 break 24274 } 24275 ac := v_0.AuxInt 24276 y := v_0.Args[0] 24277 v_1 := v.Args[1] 24278 if v_1.Op != OpARM64SLLconst { 24279 break 24280 } 24281 lc := v_1.AuxInt 24282 x := v_1.Args[0] 24283 if !(lc < rc && ac == ^(1<<uint(64-rc)-1)) { 24284 break 24285 } 24286 v.reset(OpARM64BFXIL) 24287 v.AuxInt = armBFAuxInt(rc-lc, 64-rc) 24288 v.AddArg(y) 24289 v.AddArg(x) 24290 return true 24291 } 24292 return false 24293 } 24294 func rewriteValueARM64_OpARM64RORWconst_0(v *Value) bool { 24295 // match: (RORWconst [c] (RORWconst [d] x)) 24296 // result: (RORWconst [(c+d)&31] x) 24297 for { 24298 c := v.AuxInt 24299 v_0 := v.Args[0] 24300 if v_0.Op != OpARM64RORWconst { 24301 break 24302 } 24303 d := v_0.AuxInt 24304 x := v_0.Args[0] 24305 v.reset(OpARM64RORWconst) 24306 v.AuxInt = (c + d) & 31 24307 v.AddArg(x) 24308 return true 24309 } 24310 return false 24311 } 24312 func rewriteValueARM64_OpARM64RORconst_0(v *Value) bool { 24313 // match: (RORconst [c] (RORconst [d] x)) 24314 // result: (RORconst [(c+d)&63] x) 24315 for { 24316 c := v.AuxInt 24317 v_0 := v.Args[0] 24318 if v_0.Op != OpARM64RORconst { 24319 break 24320 } 24321 d := v_0.AuxInt 24322 x := v_0.Args[0] 24323 v.reset(OpARM64RORconst) 24324 v.AuxInt = (c + d) & 63 24325 v.AddArg(x) 24326 return true 24327 } 24328 return false 24329 } 24330 func rewriteValueARM64_OpARM64SBCSflags_0(v *Value) bool { 24331 b := v.Block 24332 typ := &b.Func.Config.Types 24333 // match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> bo))))) 24334 // result: (SBCSflags x y bo) 24335 for { 24336 _ = v.Args[2] 24337 x := v.Args[0] 24338 y := v.Args[1] 24339 v_2 := v.Args[2] 24340 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 24341 break 24342 } 24343 v_2_0 := v_2.Args[0] 24344 if v_2_0.Op != OpARM64NEGSflags { 24345 break 24346 } 24347 v_2_0_0 := v_2_0.Args[0] 24348 if v_2_0_0.Op != OpARM64NEG || v_2_0_0.Type != typ.UInt64 { 24349 break 24350 } 24351 v_2_0_0_0 := v_2_0_0.Args[0] 24352 if v_2_0_0_0.Op != OpARM64NGCzerocarry || v_2_0_0_0.Type != typ.UInt64 { 24353 break 24354 } 24355 bo := v_2_0_0_0.Args[0] 24356 v.reset(OpARM64SBCSflags) 24357 v.AddArg(x) 24358 v.AddArg(y) 24359 v.AddArg(bo) 24360 return true 24361 } 24362 // match: (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags (MOVDconst [0])))) 24363 // result: (SUBSflags x y) 24364 for { 24365 _ = v.Args[2] 24366 x := v.Args[0] 24367 y := v.Args[1] 24368 v_2 := v.Args[2] 24369 if v_2.Op != OpSelect1 || v_2.Type != types.TypeFlags { 24370 break 24371 } 24372 v_2_0 := v_2.Args[0] 24373 if v_2_0.Op != OpARM64NEGSflags { 24374 break 24375 } 24376 v_2_0_0 := v_2_0.Args[0] 24377 if v_2_0_0.Op != OpARM64MOVDconst || v_2_0_0.AuxInt != 0 { 24378 break 24379 } 24380 v.reset(OpARM64SUBSflags) 24381 v.AddArg(x) 24382 v.AddArg(y) 24383 return true 24384 } 24385 return false 24386 } 24387 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 24388 // match: (SLL x (MOVDconst [c])) 24389 // result: (SLLconst x [c&63]) 24390 for { 24391 _ = v.Args[1] 24392 x := v.Args[0] 24393 v_1 := v.Args[1] 24394 if v_1.Op != OpARM64MOVDconst { 24395 break 24396 } 24397 c := v_1.AuxInt 24398 v.reset(OpARM64SLLconst) 24399 v.AuxInt = c & 63 24400 v.AddArg(x) 24401 return true 24402 } 24403 return false 24404 } 24405 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 24406 // match: (SLLconst [c] (MOVDconst [d])) 24407 // result: (MOVDconst [d<<uint64(c)]) 24408 for { 24409 c := v.AuxInt 24410 v_0 := v.Args[0] 24411 if v_0.Op != OpARM64MOVDconst { 24412 break 24413 } 24414 d := v_0.AuxInt 24415 v.reset(OpARM64MOVDconst) 24416 v.AuxInt = d << uint64(c) 24417 return true 24418 } 24419 // match: (SLLconst [c] (SRLconst [c] x)) 24420 // cond: 0 < c && c < 64 24421 // result: (ANDconst [^(1<<uint(c)-1)] x) 24422 for { 24423 c := v.AuxInt 24424 v_0 := v.Args[0] 24425 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != c { 24426 break 24427 } 24428 x := v_0.Args[0] 24429 if !(0 < c && c < 64) { 24430 break 24431 } 24432 v.reset(OpARM64ANDconst) 24433 v.AuxInt = ^(1<<uint(c) - 1) 24434 v.AddArg(x) 24435 return true 24436 } 24437 // match: (SLLconst [sc] (ANDconst [ac] x)) 24438 // cond: isARM64BFMask(sc, ac, 0) 24439 // result: (UBFIZ [armBFAuxInt(sc, arm64BFWidth(ac, 0))] x) 24440 for { 24441 sc := v.AuxInt 24442 v_0 := v.Args[0] 24443 if v_0.Op != OpARM64ANDconst { 24444 break 24445 } 24446 ac := v_0.AuxInt 24447 x := v_0.Args[0] 24448 if !(isARM64BFMask(sc, ac, 0)) { 24449 break 24450 } 24451 v.reset(OpARM64UBFIZ) 24452 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, 0)) 24453 v.AddArg(x) 24454 return true 24455 } 24456 // match: (SLLconst [sc] (MOVWUreg x)) 24457 // cond: isARM64BFMask(sc, 1<<32-1, 0) 24458 // result: (UBFIZ [armBFAuxInt(sc, 32)] x) 24459 for { 24460 sc := v.AuxInt 24461 v_0 := v.Args[0] 24462 if v_0.Op != OpARM64MOVWUreg { 24463 break 24464 } 24465 x := v_0.Args[0] 24466 if !(isARM64BFMask(sc, 1<<32-1, 0)) { 24467 break 24468 } 24469 v.reset(OpARM64UBFIZ) 24470 v.AuxInt = armBFAuxInt(sc, 32) 24471 v.AddArg(x) 24472 return true 24473 } 24474 // match: (SLLconst [sc] (MOVHUreg x)) 24475 // cond: isARM64BFMask(sc, 1<<16-1, 0) 24476 // result: (UBFIZ [armBFAuxInt(sc, 16)] x) 24477 for { 24478 sc := v.AuxInt 24479 v_0 := v.Args[0] 24480 if v_0.Op != OpARM64MOVHUreg { 24481 break 24482 } 24483 x := v_0.Args[0] 24484 if !(isARM64BFMask(sc, 1<<16-1, 0)) { 24485 break 24486 } 24487 v.reset(OpARM64UBFIZ) 24488 v.AuxInt = armBFAuxInt(sc, 16) 24489 v.AddArg(x) 24490 return true 24491 } 24492 // match: (SLLconst [sc] (MOVBUreg x)) 24493 // cond: isARM64BFMask(sc, 1<<8-1, 0) 24494 // result: (UBFIZ [armBFAuxInt(sc, 8)] x) 24495 for { 24496 sc := v.AuxInt 24497 v_0 := v.Args[0] 24498 if v_0.Op != OpARM64MOVBUreg { 24499 break 24500 } 24501 x := v_0.Args[0] 24502 if !(isARM64BFMask(sc, 1<<8-1, 0)) { 24503 break 24504 } 24505 v.reset(OpARM64UBFIZ) 24506 v.AuxInt = armBFAuxInt(sc, 8) 24507 v.AddArg(x) 24508 return true 24509 } 24510 // match: (SLLconst [sc] (UBFIZ [bfc] x)) 24511 // cond: sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64 24512 // result: (UBFIZ [armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))] x) 24513 for { 24514 sc := v.AuxInt 24515 v_0 := v.Args[0] 24516 if v_0.Op != OpARM64UBFIZ { 24517 break 24518 } 24519 bfc := v_0.AuxInt 24520 x := v_0.Args[0] 24521 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) { 24522 break 24523 } 24524 v.reset(OpARM64UBFIZ) 24525 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)) 24526 v.AddArg(x) 24527 return true 24528 } 24529 return false 24530 } 24531 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 24532 // match: (SRA x (MOVDconst [c])) 24533 // result: (SRAconst x [c&63]) 24534 for { 24535 _ = v.Args[1] 24536 x := v.Args[0] 24537 v_1 := v.Args[1] 24538 if v_1.Op != OpARM64MOVDconst { 24539 break 24540 } 24541 c := v_1.AuxInt 24542 v.reset(OpARM64SRAconst) 24543 v.AuxInt = c & 63 24544 v.AddArg(x) 24545 return true 24546 } 24547 return false 24548 } 24549 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 24550 // match: (SRAconst [c] (MOVDconst [d])) 24551 // result: (MOVDconst [d>>uint64(c)]) 24552 for { 24553 c := v.AuxInt 24554 v_0 := v.Args[0] 24555 if v_0.Op != OpARM64MOVDconst { 24556 break 24557 } 24558 d := v_0.AuxInt 24559 v.reset(OpARM64MOVDconst) 24560 v.AuxInt = d >> uint64(c) 24561 return true 24562 } 24563 // match: (SRAconst [rc] (SLLconst [lc] x)) 24564 // cond: lc > rc 24565 // result: (SBFIZ [armBFAuxInt(lc-rc, 64-lc)] x) 24566 for { 24567 rc := v.AuxInt 24568 v_0 := v.Args[0] 24569 if v_0.Op != OpARM64SLLconst { 24570 break 24571 } 24572 lc := v_0.AuxInt 24573 x := v_0.Args[0] 24574 if !(lc > rc) { 24575 break 24576 } 24577 v.reset(OpARM64SBFIZ) 24578 v.AuxInt = armBFAuxInt(lc-rc, 64-lc) 24579 v.AddArg(x) 24580 return true 24581 } 24582 // match: (SRAconst [rc] (SLLconst [lc] x)) 24583 // cond: lc <= rc 24584 // result: (SBFX [armBFAuxInt(rc-lc, 64-rc)] x) 24585 for { 24586 rc := v.AuxInt 24587 v_0 := v.Args[0] 24588 if v_0.Op != OpARM64SLLconst { 24589 break 24590 } 24591 lc := v_0.AuxInt 24592 x := v_0.Args[0] 24593 if !(lc <= rc) { 24594 break 24595 } 24596 v.reset(OpARM64SBFX) 24597 v.AuxInt = armBFAuxInt(rc-lc, 64-rc) 24598 v.AddArg(x) 24599 return true 24600 } 24601 // match: (SRAconst [rc] (MOVWreg x)) 24602 // cond: rc < 32 24603 // result: (SBFX [armBFAuxInt(rc, 32-rc)] x) 24604 for { 24605 rc := v.AuxInt 24606 v_0 := v.Args[0] 24607 if v_0.Op != OpARM64MOVWreg { 24608 break 24609 } 24610 x := v_0.Args[0] 24611 if !(rc < 32) { 24612 break 24613 } 24614 v.reset(OpARM64SBFX) 24615 v.AuxInt = armBFAuxInt(rc, 32-rc) 24616 v.AddArg(x) 24617 return true 24618 } 24619 // match: (SRAconst [rc] (MOVHreg x)) 24620 // cond: rc < 16 24621 // result: (SBFX [armBFAuxInt(rc, 16-rc)] x) 24622 for { 24623 rc := v.AuxInt 24624 v_0 := v.Args[0] 24625 if v_0.Op != OpARM64MOVHreg { 24626 break 24627 } 24628 x := v_0.Args[0] 24629 if !(rc < 16) { 24630 break 24631 } 24632 v.reset(OpARM64SBFX) 24633 v.AuxInt = armBFAuxInt(rc, 16-rc) 24634 v.AddArg(x) 24635 return true 24636 } 24637 // match: (SRAconst [rc] (MOVBreg x)) 24638 // cond: rc < 8 24639 // result: (SBFX [armBFAuxInt(rc, 8-rc)] x) 24640 for { 24641 rc := v.AuxInt 24642 v_0 := v.Args[0] 24643 if v_0.Op != OpARM64MOVBreg { 24644 break 24645 } 24646 x := v_0.Args[0] 24647 if !(rc < 8) { 24648 break 24649 } 24650 v.reset(OpARM64SBFX) 24651 v.AuxInt = armBFAuxInt(rc, 8-rc) 24652 v.AddArg(x) 24653 return true 24654 } 24655 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 24656 // cond: sc < getARM64BFlsb(bfc) 24657 // result: (SBFIZ [armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 24658 for { 24659 sc := v.AuxInt 24660 v_0 := v.Args[0] 24661 if v_0.Op != OpARM64SBFIZ { 24662 break 24663 } 24664 bfc := v_0.AuxInt 24665 x := v_0.Args[0] 24666 if !(sc < getARM64BFlsb(bfc)) { 24667 break 24668 } 24669 v.reset(OpARM64SBFIZ) 24670 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 24671 v.AddArg(x) 24672 return true 24673 } 24674 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 24675 // cond: sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 24676 // result: (SBFX [armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 24677 for { 24678 sc := v.AuxInt 24679 v_0 := v.Args[0] 24680 if v_0.Op != OpARM64SBFIZ { 24681 break 24682 } 24683 bfc := v_0.AuxInt 24684 x := v_0.Args[0] 24685 if !(sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 24686 break 24687 } 24688 v.reset(OpARM64SBFX) 24689 v.AuxInt = armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 24690 v.AddArg(x) 24691 return true 24692 } 24693 return false 24694 } 24695 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 24696 // match: (SRL x (MOVDconst [c])) 24697 // result: (SRLconst x [c&63]) 24698 for { 24699 _ = v.Args[1] 24700 x := v.Args[0] 24701 v_1 := v.Args[1] 24702 if v_1.Op != OpARM64MOVDconst { 24703 break 24704 } 24705 c := v_1.AuxInt 24706 v.reset(OpARM64SRLconst) 24707 v.AuxInt = c & 63 24708 v.AddArg(x) 24709 return true 24710 } 24711 return false 24712 } 24713 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 24714 // match: (SRLconst [c] (MOVDconst [d])) 24715 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 24716 for { 24717 c := v.AuxInt 24718 v_0 := v.Args[0] 24719 if v_0.Op != OpARM64MOVDconst { 24720 break 24721 } 24722 d := v_0.AuxInt 24723 v.reset(OpARM64MOVDconst) 24724 v.AuxInt = int64(uint64(d) >> uint64(c)) 24725 return true 24726 } 24727 // match: (SRLconst [c] (SLLconst [c] x)) 24728 // cond: 0 < c && c < 64 24729 // result: (ANDconst [1<<uint(64-c)-1] x) 24730 for { 24731 c := v.AuxInt 24732 v_0 := v.Args[0] 24733 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != c { 24734 break 24735 } 24736 x := v_0.Args[0] 24737 if !(0 < c && c < 64) { 24738 break 24739 } 24740 v.reset(OpARM64ANDconst) 24741 v.AuxInt = 1<<uint(64-c) - 1 24742 v.AddArg(x) 24743 return true 24744 } 24745 // match: (SRLconst [rc] (SLLconst [lc] x)) 24746 // cond: lc > rc 24747 // result: (UBFIZ [armBFAuxInt(lc-rc, 64-lc)] x) 24748 for { 24749 rc := v.AuxInt 24750 v_0 := v.Args[0] 24751 if v_0.Op != OpARM64SLLconst { 24752 break 24753 } 24754 lc := v_0.AuxInt 24755 x := v_0.Args[0] 24756 if !(lc > rc) { 24757 break 24758 } 24759 v.reset(OpARM64UBFIZ) 24760 v.AuxInt = armBFAuxInt(lc-rc, 64-lc) 24761 v.AddArg(x) 24762 return true 24763 } 24764 // match: (SRLconst [sc] (ANDconst [ac] x)) 24765 // cond: isARM64BFMask(sc, ac, sc) 24766 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(ac, sc))] x) 24767 for { 24768 sc := v.AuxInt 24769 v_0 := v.Args[0] 24770 if v_0.Op != OpARM64ANDconst { 24771 break 24772 } 24773 ac := v_0.AuxInt 24774 x := v_0.Args[0] 24775 if !(isARM64BFMask(sc, ac, sc)) { 24776 break 24777 } 24778 v.reset(OpARM64UBFX) 24779 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(ac, sc)) 24780 v.AddArg(x) 24781 return true 24782 } 24783 // match: (SRLconst [sc] (MOVWUreg x)) 24784 // cond: isARM64BFMask(sc, 1<<32-1, sc) 24785 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x) 24786 for { 24787 sc := v.AuxInt 24788 v_0 := v.Args[0] 24789 if v_0.Op != OpARM64MOVWUreg { 24790 break 24791 } 24792 x := v_0.Args[0] 24793 if !(isARM64BFMask(sc, 1<<32-1, sc)) { 24794 break 24795 } 24796 v.reset(OpARM64UBFX) 24797 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<32-1, sc)) 24798 v.AddArg(x) 24799 return true 24800 } 24801 // match: (SRLconst [sc] (MOVHUreg x)) 24802 // cond: isARM64BFMask(sc, 1<<16-1, sc) 24803 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x) 24804 for { 24805 sc := v.AuxInt 24806 v_0 := v.Args[0] 24807 if v_0.Op != OpARM64MOVHUreg { 24808 break 24809 } 24810 x := v_0.Args[0] 24811 if !(isARM64BFMask(sc, 1<<16-1, sc)) { 24812 break 24813 } 24814 v.reset(OpARM64UBFX) 24815 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<16-1, sc)) 24816 v.AddArg(x) 24817 return true 24818 } 24819 // match: (SRLconst [sc] (MOVBUreg x)) 24820 // cond: isARM64BFMask(sc, 1<<8-1, sc) 24821 // result: (UBFX [armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x) 24822 for { 24823 sc := v.AuxInt 24824 v_0 := v.Args[0] 24825 if v_0.Op != OpARM64MOVBUreg { 24826 break 24827 } 24828 x := v_0.Args[0] 24829 if !(isARM64BFMask(sc, 1<<8-1, sc)) { 24830 break 24831 } 24832 v.reset(OpARM64UBFX) 24833 v.AuxInt = armBFAuxInt(sc, arm64BFWidth(1<<8-1, sc)) 24834 v.AddArg(x) 24835 return true 24836 } 24837 // match: (SRLconst [rc] (SLLconst [lc] x)) 24838 // cond: lc < rc 24839 // result: (UBFX [armBFAuxInt(rc-lc, 64-rc)] x) 24840 for { 24841 rc := v.AuxInt 24842 v_0 := v.Args[0] 24843 if v_0.Op != OpARM64SLLconst { 24844 break 24845 } 24846 lc := v_0.AuxInt 24847 x := v_0.Args[0] 24848 if !(lc < rc) { 24849 break 24850 } 24851 v.reset(OpARM64UBFX) 24852 v.AuxInt = armBFAuxInt(rc-lc, 64-rc) 24853 v.AddArg(x) 24854 return true 24855 } 24856 // match: (SRLconst [sc] (UBFX [bfc] x)) 24857 // cond: sc < getARM64BFwidth(bfc) 24858 // result: (UBFX [armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)] x) 24859 for { 24860 sc := v.AuxInt 24861 v_0 := v.Args[0] 24862 if v_0.Op != OpARM64UBFX { 24863 break 24864 } 24865 bfc := v_0.AuxInt 24866 x := v_0.Args[0] 24867 if !(sc < getARM64BFwidth(bfc)) { 24868 break 24869 } 24870 v.reset(OpARM64UBFX) 24871 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc) 24872 v.AddArg(x) 24873 return true 24874 } 24875 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 24876 // cond: sc == getARM64BFlsb(bfc) 24877 // result: (ANDconst [1<<uint(getARM64BFwidth(bfc))-1] x) 24878 for { 24879 sc := v.AuxInt 24880 v_0 := v.Args[0] 24881 if v_0.Op != OpARM64UBFIZ { 24882 break 24883 } 24884 bfc := v_0.AuxInt 24885 x := v_0.Args[0] 24886 if !(sc == getARM64BFlsb(bfc)) { 24887 break 24888 } 24889 v.reset(OpARM64ANDconst) 24890 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1 24891 v.AddArg(x) 24892 return true 24893 } 24894 return false 24895 } 24896 func rewriteValueARM64_OpARM64SRLconst_10(v *Value) bool { 24897 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 24898 // cond: sc < getARM64BFlsb(bfc) 24899 // result: (UBFIZ [armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 24900 for { 24901 sc := v.AuxInt 24902 v_0 := v.Args[0] 24903 if v_0.Op != OpARM64UBFIZ { 24904 break 24905 } 24906 bfc := v_0.AuxInt 24907 x := v_0.Args[0] 24908 if !(sc < getARM64BFlsb(bfc)) { 24909 break 24910 } 24911 v.reset(OpARM64UBFIZ) 24912 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 24913 v.AddArg(x) 24914 return true 24915 } 24916 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 24917 // cond: sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 24918 // result: (UBFX [armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 24919 for { 24920 sc := v.AuxInt 24921 v_0 := v.Args[0] 24922 if v_0.Op != OpARM64UBFIZ { 24923 break 24924 } 24925 bfc := v_0.AuxInt 24926 x := v_0.Args[0] 24927 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 24928 break 24929 } 24930 v.reset(OpARM64UBFX) 24931 v.AuxInt = armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 24932 v.AddArg(x) 24933 return true 24934 } 24935 return false 24936 } 24937 func rewriteValueARM64_OpARM64STP_0(v *Value) bool { 24938 b := v.Block 24939 config := b.Func.Config 24940 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 24941 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 24942 // result: (STP [off1+off2] {sym} ptr val1 val2 mem) 24943 for { 24944 off1 := v.AuxInt 24945 sym := v.Aux 24946 mem := v.Args[3] 24947 v_0 := v.Args[0] 24948 if v_0.Op != OpARM64ADDconst { 24949 break 24950 } 24951 off2 := v_0.AuxInt 24952 ptr := v_0.Args[0] 24953 val1 := v.Args[1] 24954 val2 := v.Args[2] 24955 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 24956 break 24957 } 24958 v.reset(OpARM64STP) 24959 v.AuxInt = off1 + off2 24960 v.Aux = sym 24961 v.AddArg(ptr) 24962 v.AddArg(val1) 24963 v.AddArg(val2) 24964 v.AddArg(mem) 24965 return true 24966 } 24967 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 24968 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 24969 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 24970 for { 24971 off1 := v.AuxInt 24972 sym1 := v.Aux 24973 mem := v.Args[3] 24974 v_0 := v.Args[0] 24975 if v_0.Op != OpARM64MOVDaddr { 24976 break 24977 } 24978 off2 := v_0.AuxInt 24979 sym2 := v_0.Aux 24980 ptr := v_0.Args[0] 24981 val1 := v.Args[1] 24982 val2 := v.Args[2] 24983 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 24984 break 24985 } 24986 v.reset(OpARM64STP) 24987 v.AuxInt = off1 + off2 24988 v.Aux = mergeSym(sym1, sym2) 24989 v.AddArg(ptr) 24990 v.AddArg(val1) 24991 v.AddArg(val2) 24992 v.AddArg(mem) 24993 return true 24994 } 24995 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 24996 // result: (MOVQstorezero [off] {sym} ptr mem) 24997 for { 24998 off := v.AuxInt 24999 sym := v.Aux 25000 mem := v.Args[3] 25001 ptr := v.Args[0] 25002 v_1 := v.Args[1] 25003 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 0 { 25004 break 25005 } 25006 v_2 := v.Args[2] 25007 if v_2.Op != OpARM64MOVDconst || v_2.AuxInt != 0 { 25008 break 25009 } 25010 v.reset(OpARM64MOVQstorezero) 25011 v.AuxInt = off 25012 v.Aux = sym 25013 v.AddArg(ptr) 25014 v.AddArg(mem) 25015 return true 25016 } 25017 return false 25018 } 25019 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 25020 b := v.Block 25021 // match: (SUB x (MOVDconst [c])) 25022 // result: (SUBconst [c] x) 25023 for { 25024 _ = v.Args[1] 25025 x := v.Args[0] 25026 v_1 := v.Args[1] 25027 if v_1.Op != OpARM64MOVDconst { 25028 break 25029 } 25030 c := v_1.AuxInt 25031 v.reset(OpARM64SUBconst) 25032 v.AuxInt = c 25033 v.AddArg(x) 25034 return true 25035 } 25036 // match: (SUB a l:(MUL x y)) 25037 // cond: l.Uses==1 && clobber(l) 25038 // result: (MSUB a x y) 25039 for { 25040 _ = v.Args[1] 25041 a := v.Args[0] 25042 l := v.Args[1] 25043 if l.Op != OpARM64MUL { 25044 break 25045 } 25046 y := l.Args[1] 25047 x := l.Args[0] 25048 if !(l.Uses == 1 && clobber(l)) { 25049 break 25050 } 25051 v.reset(OpARM64MSUB) 25052 v.AddArg(a) 25053 v.AddArg(x) 25054 v.AddArg(y) 25055 return true 25056 } 25057 // match: (SUB a l:(MNEG x y)) 25058 // cond: l.Uses==1 && clobber(l) 25059 // result: (MADD a x y) 25060 for { 25061 _ = v.Args[1] 25062 a := v.Args[0] 25063 l := v.Args[1] 25064 if l.Op != OpARM64MNEG { 25065 break 25066 } 25067 y := l.Args[1] 25068 x := l.Args[0] 25069 if !(l.Uses == 1 && clobber(l)) { 25070 break 25071 } 25072 v.reset(OpARM64MADD) 25073 v.AddArg(a) 25074 v.AddArg(x) 25075 v.AddArg(y) 25076 return true 25077 } 25078 // match: (SUB a l:(MULW x y)) 25079 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 25080 // result: (MSUBW a x y) 25081 for { 25082 _ = v.Args[1] 25083 a := v.Args[0] 25084 l := v.Args[1] 25085 if l.Op != OpARM64MULW { 25086 break 25087 } 25088 y := l.Args[1] 25089 x := l.Args[0] 25090 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 25091 break 25092 } 25093 v.reset(OpARM64MSUBW) 25094 v.AddArg(a) 25095 v.AddArg(x) 25096 v.AddArg(y) 25097 return true 25098 } 25099 // match: (SUB a l:(MNEGW x y)) 25100 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 25101 // result: (MADDW a x y) 25102 for { 25103 _ = v.Args[1] 25104 a := v.Args[0] 25105 l := v.Args[1] 25106 if l.Op != OpARM64MNEGW { 25107 break 25108 } 25109 y := l.Args[1] 25110 x := l.Args[0] 25111 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 25112 break 25113 } 25114 v.reset(OpARM64MADDW) 25115 v.AddArg(a) 25116 v.AddArg(x) 25117 v.AddArg(y) 25118 return true 25119 } 25120 // match: (SUB x x) 25121 // result: (MOVDconst [0]) 25122 for { 25123 x := v.Args[1] 25124 if x != v.Args[0] { 25125 break 25126 } 25127 v.reset(OpARM64MOVDconst) 25128 v.AuxInt = 0 25129 return true 25130 } 25131 // match: (SUB x (SUB y z)) 25132 // result: (SUB (ADD <v.Type> x z) y) 25133 for { 25134 _ = v.Args[1] 25135 x := v.Args[0] 25136 v_1 := v.Args[1] 25137 if v_1.Op != OpARM64SUB { 25138 break 25139 } 25140 z := v_1.Args[1] 25141 y := v_1.Args[0] 25142 v.reset(OpARM64SUB) 25143 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 25144 v0.AddArg(x) 25145 v0.AddArg(z) 25146 v.AddArg(v0) 25147 v.AddArg(y) 25148 return true 25149 } 25150 // match: (SUB (SUB x y) z) 25151 // result: (SUB x (ADD <y.Type> y z)) 25152 for { 25153 z := v.Args[1] 25154 v_0 := v.Args[0] 25155 if v_0.Op != OpARM64SUB { 25156 break 25157 } 25158 y := v_0.Args[1] 25159 x := v_0.Args[0] 25160 v.reset(OpARM64SUB) 25161 v.AddArg(x) 25162 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 25163 v0.AddArg(y) 25164 v0.AddArg(z) 25165 v.AddArg(v0) 25166 return true 25167 } 25168 // match: (SUB x0 x1:(SLLconst [c] y)) 25169 // cond: clobberIfDead(x1) 25170 // result: (SUBshiftLL x0 y [c]) 25171 for { 25172 _ = v.Args[1] 25173 x0 := v.Args[0] 25174 x1 := v.Args[1] 25175 if x1.Op != OpARM64SLLconst { 25176 break 25177 } 25178 c := x1.AuxInt 25179 y := x1.Args[0] 25180 if !(clobberIfDead(x1)) { 25181 break 25182 } 25183 v.reset(OpARM64SUBshiftLL) 25184 v.AuxInt = c 25185 v.AddArg(x0) 25186 v.AddArg(y) 25187 return true 25188 } 25189 // match: (SUB x0 x1:(SRLconst [c] y)) 25190 // cond: clobberIfDead(x1) 25191 // result: (SUBshiftRL x0 y [c]) 25192 for { 25193 _ = v.Args[1] 25194 x0 := v.Args[0] 25195 x1 := v.Args[1] 25196 if x1.Op != OpARM64SRLconst { 25197 break 25198 } 25199 c := x1.AuxInt 25200 y := x1.Args[0] 25201 if !(clobberIfDead(x1)) { 25202 break 25203 } 25204 v.reset(OpARM64SUBshiftRL) 25205 v.AuxInt = c 25206 v.AddArg(x0) 25207 v.AddArg(y) 25208 return true 25209 } 25210 return false 25211 } 25212 func rewriteValueARM64_OpARM64SUB_10(v *Value) bool { 25213 // match: (SUB x0 x1:(SRAconst [c] y)) 25214 // cond: clobberIfDead(x1) 25215 // result: (SUBshiftRA x0 y [c]) 25216 for { 25217 _ = v.Args[1] 25218 x0 := v.Args[0] 25219 x1 := v.Args[1] 25220 if x1.Op != OpARM64SRAconst { 25221 break 25222 } 25223 c := x1.AuxInt 25224 y := x1.Args[0] 25225 if !(clobberIfDead(x1)) { 25226 break 25227 } 25228 v.reset(OpARM64SUBshiftRA) 25229 v.AuxInt = c 25230 v.AddArg(x0) 25231 v.AddArg(y) 25232 return true 25233 } 25234 return false 25235 } 25236 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 25237 // match: (SUBconst [0] x) 25238 // result: x 25239 for { 25240 if v.AuxInt != 0 { 25241 break 25242 } 25243 x := v.Args[0] 25244 v.reset(OpCopy) 25245 v.Type = x.Type 25246 v.AddArg(x) 25247 return true 25248 } 25249 // match: (SUBconst [c] (MOVDconst [d])) 25250 // result: (MOVDconst [d-c]) 25251 for { 25252 c := v.AuxInt 25253 v_0 := v.Args[0] 25254 if v_0.Op != OpARM64MOVDconst { 25255 break 25256 } 25257 d := v_0.AuxInt 25258 v.reset(OpARM64MOVDconst) 25259 v.AuxInt = d - c 25260 return true 25261 } 25262 // match: (SUBconst [c] (SUBconst [d] x)) 25263 // result: (ADDconst [-c-d] x) 25264 for { 25265 c := v.AuxInt 25266 v_0 := v.Args[0] 25267 if v_0.Op != OpARM64SUBconst { 25268 break 25269 } 25270 d := v_0.AuxInt 25271 x := v_0.Args[0] 25272 v.reset(OpARM64ADDconst) 25273 v.AuxInt = -c - d 25274 v.AddArg(x) 25275 return true 25276 } 25277 // match: (SUBconst [c] (ADDconst [d] x)) 25278 // result: (ADDconst [-c+d] x) 25279 for { 25280 c := v.AuxInt 25281 v_0 := v.Args[0] 25282 if v_0.Op != OpARM64ADDconst { 25283 break 25284 } 25285 d := v_0.AuxInt 25286 x := v_0.Args[0] 25287 v.reset(OpARM64ADDconst) 25288 v.AuxInt = -c + d 25289 v.AddArg(x) 25290 return true 25291 } 25292 return false 25293 } 25294 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 25295 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 25296 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 25297 for { 25298 d := v.AuxInt 25299 _ = v.Args[1] 25300 x := v.Args[0] 25301 v_1 := v.Args[1] 25302 if v_1.Op != OpARM64MOVDconst { 25303 break 25304 } 25305 c := v_1.AuxInt 25306 v.reset(OpARM64SUBconst) 25307 v.AuxInt = int64(uint64(c) << uint64(d)) 25308 v.AddArg(x) 25309 return true 25310 } 25311 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 25312 // cond: c==d 25313 // result: (MOVDconst [0]) 25314 for { 25315 d := v.AuxInt 25316 _ = v.Args[1] 25317 x := v.Args[0] 25318 v_1 := v.Args[1] 25319 if v_1.Op != OpARM64SLLconst { 25320 break 25321 } 25322 c := v_1.AuxInt 25323 if x != v_1.Args[0] || !(c == d) { 25324 break 25325 } 25326 v.reset(OpARM64MOVDconst) 25327 v.AuxInt = 0 25328 return true 25329 } 25330 return false 25331 } 25332 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 25333 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 25334 // result: (SUBconst x [c>>uint64(d)]) 25335 for { 25336 d := v.AuxInt 25337 _ = v.Args[1] 25338 x := v.Args[0] 25339 v_1 := v.Args[1] 25340 if v_1.Op != OpARM64MOVDconst { 25341 break 25342 } 25343 c := v_1.AuxInt 25344 v.reset(OpARM64SUBconst) 25345 v.AuxInt = c >> uint64(d) 25346 v.AddArg(x) 25347 return true 25348 } 25349 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 25350 // cond: c==d 25351 // result: (MOVDconst [0]) 25352 for { 25353 d := v.AuxInt 25354 _ = v.Args[1] 25355 x := v.Args[0] 25356 v_1 := v.Args[1] 25357 if v_1.Op != OpARM64SRAconst { 25358 break 25359 } 25360 c := v_1.AuxInt 25361 if x != v_1.Args[0] || !(c == d) { 25362 break 25363 } 25364 v.reset(OpARM64MOVDconst) 25365 v.AuxInt = 0 25366 return true 25367 } 25368 return false 25369 } 25370 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 25371 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 25372 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 25373 for { 25374 d := v.AuxInt 25375 _ = v.Args[1] 25376 x := v.Args[0] 25377 v_1 := v.Args[1] 25378 if v_1.Op != OpARM64MOVDconst { 25379 break 25380 } 25381 c := v_1.AuxInt 25382 v.reset(OpARM64SUBconst) 25383 v.AuxInt = int64(uint64(c) >> uint64(d)) 25384 v.AddArg(x) 25385 return true 25386 } 25387 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 25388 // cond: c==d 25389 // result: (MOVDconst [0]) 25390 for { 25391 d := v.AuxInt 25392 _ = v.Args[1] 25393 x := v.Args[0] 25394 v_1 := v.Args[1] 25395 if v_1.Op != OpARM64SRLconst { 25396 break 25397 } 25398 c := v_1.AuxInt 25399 if x != v_1.Args[0] || !(c == d) { 25400 break 25401 } 25402 v.reset(OpARM64MOVDconst) 25403 v.AuxInt = 0 25404 return true 25405 } 25406 return false 25407 } 25408 func rewriteValueARM64_OpARM64TST_0(v *Value) bool { 25409 // match: (TST x (MOVDconst [c])) 25410 // result: (TSTconst [c] x) 25411 for { 25412 _ = v.Args[1] 25413 x := v.Args[0] 25414 v_1 := v.Args[1] 25415 if v_1.Op != OpARM64MOVDconst { 25416 break 25417 } 25418 c := v_1.AuxInt 25419 v.reset(OpARM64TSTconst) 25420 v.AuxInt = c 25421 v.AddArg(x) 25422 return true 25423 } 25424 // match: (TST (MOVDconst [c]) x) 25425 // result: (TSTconst [c] x) 25426 for { 25427 x := v.Args[1] 25428 v_0 := v.Args[0] 25429 if v_0.Op != OpARM64MOVDconst { 25430 break 25431 } 25432 c := v_0.AuxInt 25433 v.reset(OpARM64TSTconst) 25434 v.AuxInt = c 25435 v.AddArg(x) 25436 return true 25437 } 25438 // match: (TST x0 x1:(SLLconst [c] y)) 25439 // cond: clobberIfDead(x1) 25440 // result: (TSTshiftLL x0 y [c]) 25441 for { 25442 _ = v.Args[1] 25443 x0 := v.Args[0] 25444 x1 := v.Args[1] 25445 if x1.Op != OpARM64SLLconst { 25446 break 25447 } 25448 c := x1.AuxInt 25449 y := x1.Args[0] 25450 if !(clobberIfDead(x1)) { 25451 break 25452 } 25453 v.reset(OpARM64TSTshiftLL) 25454 v.AuxInt = c 25455 v.AddArg(x0) 25456 v.AddArg(y) 25457 return true 25458 } 25459 // match: (TST x1:(SLLconst [c] y) x0) 25460 // cond: clobberIfDead(x1) 25461 // result: (TSTshiftLL x0 y [c]) 25462 for { 25463 x0 := v.Args[1] 25464 x1 := v.Args[0] 25465 if x1.Op != OpARM64SLLconst { 25466 break 25467 } 25468 c := x1.AuxInt 25469 y := x1.Args[0] 25470 if !(clobberIfDead(x1)) { 25471 break 25472 } 25473 v.reset(OpARM64TSTshiftLL) 25474 v.AuxInt = c 25475 v.AddArg(x0) 25476 v.AddArg(y) 25477 return true 25478 } 25479 // match: (TST x0 x1:(SRLconst [c] y)) 25480 // cond: clobberIfDead(x1) 25481 // result: (TSTshiftRL x0 y [c]) 25482 for { 25483 _ = v.Args[1] 25484 x0 := v.Args[0] 25485 x1 := v.Args[1] 25486 if x1.Op != OpARM64SRLconst { 25487 break 25488 } 25489 c := x1.AuxInt 25490 y := x1.Args[0] 25491 if !(clobberIfDead(x1)) { 25492 break 25493 } 25494 v.reset(OpARM64TSTshiftRL) 25495 v.AuxInt = c 25496 v.AddArg(x0) 25497 v.AddArg(y) 25498 return true 25499 } 25500 // match: (TST x1:(SRLconst [c] y) x0) 25501 // cond: clobberIfDead(x1) 25502 // result: (TSTshiftRL x0 y [c]) 25503 for { 25504 x0 := v.Args[1] 25505 x1 := v.Args[0] 25506 if x1.Op != OpARM64SRLconst { 25507 break 25508 } 25509 c := x1.AuxInt 25510 y := x1.Args[0] 25511 if !(clobberIfDead(x1)) { 25512 break 25513 } 25514 v.reset(OpARM64TSTshiftRL) 25515 v.AuxInt = c 25516 v.AddArg(x0) 25517 v.AddArg(y) 25518 return true 25519 } 25520 // match: (TST x0 x1:(SRAconst [c] y)) 25521 // cond: clobberIfDead(x1) 25522 // result: (TSTshiftRA x0 y [c]) 25523 for { 25524 _ = v.Args[1] 25525 x0 := v.Args[0] 25526 x1 := v.Args[1] 25527 if x1.Op != OpARM64SRAconst { 25528 break 25529 } 25530 c := x1.AuxInt 25531 y := x1.Args[0] 25532 if !(clobberIfDead(x1)) { 25533 break 25534 } 25535 v.reset(OpARM64TSTshiftRA) 25536 v.AuxInt = c 25537 v.AddArg(x0) 25538 v.AddArg(y) 25539 return true 25540 } 25541 // match: (TST x1:(SRAconst [c] y) x0) 25542 // cond: clobberIfDead(x1) 25543 // result: (TSTshiftRA x0 y [c]) 25544 for { 25545 x0 := v.Args[1] 25546 x1 := v.Args[0] 25547 if x1.Op != OpARM64SRAconst { 25548 break 25549 } 25550 c := x1.AuxInt 25551 y := x1.Args[0] 25552 if !(clobberIfDead(x1)) { 25553 break 25554 } 25555 v.reset(OpARM64TSTshiftRA) 25556 v.AuxInt = c 25557 v.AddArg(x0) 25558 v.AddArg(y) 25559 return true 25560 } 25561 return false 25562 } 25563 func rewriteValueARM64_OpARM64TSTW_0(v *Value) bool { 25564 // match: (TSTW x (MOVDconst [c])) 25565 // result: (TSTWconst [c] x) 25566 for { 25567 _ = v.Args[1] 25568 x := v.Args[0] 25569 v_1 := v.Args[1] 25570 if v_1.Op != OpARM64MOVDconst { 25571 break 25572 } 25573 c := v_1.AuxInt 25574 v.reset(OpARM64TSTWconst) 25575 v.AuxInt = c 25576 v.AddArg(x) 25577 return true 25578 } 25579 // match: (TSTW (MOVDconst [c]) x) 25580 // result: (TSTWconst [c] x) 25581 for { 25582 x := v.Args[1] 25583 v_0 := v.Args[0] 25584 if v_0.Op != OpARM64MOVDconst { 25585 break 25586 } 25587 c := v_0.AuxInt 25588 v.reset(OpARM64TSTWconst) 25589 v.AuxInt = c 25590 v.AddArg(x) 25591 return true 25592 } 25593 return false 25594 } 25595 func rewriteValueARM64_OpARM64TSTWconst_0(v *Value) bool { 25596 // match: (TSTWconst (MOVDconst [x]) [y]) 25597 // cond: int32(x&y)==0 25598 // result: (FlagEQ) 25599 for { 25600 y := v.AuxInt 25601 v_0 := v.Args[0] 25602 if v_0.Op != OpARM64MOVDconst { 25603 break 25604 } 25605 x := v_0.AuxInt 25606 if !(int32(x&y) == 0) { 25607 break 25608 } 25609 v.reset(OpARM64FlagEQ) 25610 return true 25611 } 25612 // match: (TSTWconst (MOVDconst [x]) [y]) 25613 // cond: int32(x&y)<0 25614 // result: (FlagLT_UGT) 25615 for { 25616 y := v.AuxInt 25617 v_0 := v.Args[0] 25618 if v_0.Op != OpARM64MOVDconst { 25619 break 25620 } 25621 x := v_0.AuxInt 25622 if !(int32(x&y) < 0) { 25623 break 25624 } 25625 v.reset(OpARM64FlagLT_UGT) 25626 return true 25627 } 25628 // match: (TSTWconst (MOVDconst [x]) [y]) 25629 // cond: int32(x&y)>0 25630 // result: (FlagGT_UGT) 25631 for { 25632 y := v.AuxInt 25633 v_0 := v.Args[0] 25634 if v_0.Op != OpARM64MOVDconst { 25635 break 25636 } 25637 x := v_0.AuxInt 25638 if !(int32(x&y) > 0) { 25639 break 25640 } 25641 v.reset(OpARM64FlagGT_UGT) 25642 return true 25643 } 25644 return false 25645 } 25646 func rewriteValueARM64_OpARM64TSTconst_0(v *Value) bool { 25647 // match: (TSTconst (MOVDconst [x]) [y]) 25648 // cond: int64(x&y)==0 25649 // result: (FlagEQ) 25650 for { 25651 y := v.AuxInt 25652 v_0 := v.Args[0] 25653 if v_0.Op != OpARM64MOVDconst { 25654 break 25655 } 25656 x := v_0.AuxInt 25657 if !(int64(x&y) == 0) { 25658 break 25659 } 25660 v.reset(OpARM64FlagEQ) 25661 return true 25662 } 25663 // match: (TSTconst (MOVDconst [x]) [y]) 25664 // cond: int64(x&y)<0 25665 // result: (FlagLT_UGT) 25666 for { 25667 y := v.AuxInt 25668 v_0 := v.Args[0] 25669 if v_0.Op != OpARM64MOVDconst { 25670 break 25671 } 25672 x := v_0.AuxInt 25673 if !(int64(x&y) < 0) { 25674 break 25675 } 25676 v.reset(OpARM64FlagLT_UGT) 25677 return true 25678 } 25679 // match: (TSTconst (MOVDconst [x]) [y]) 25680 // cond: int64(x&y)>0 25681 // result: (FlagGT_UGT) 25682 for { 25683 y := v.AuxInt 25684 v_0 := v.Args[0] 25685 if v_0.Op != OpARM64MOVDconst { 25686 break 25687 } 25688 x := v_0.AuxInt 25689 if !(int64(x&y) > 0) { 25690 break 25691 } 25692 v.reset(OpARM64FlagGT_UGT) 25693 return true 25694 } 25695 return false 25696 } 25697 func rewriteValueARM64_OpARM64TSTshiftLL_0(v *Value) bool { 25698 b := v.Block 25699 // match: (TSTshiftLL (MOVDconst [c]) x [d]) 25700 // result: (TSTconst [c] (SLLconst <x.Type> x [d])) 25701 for { 25702 d := v.AuxInt 25703 x := v.Args[1] 25704 v_0 := v.Args[0] 25705 if v_0.Op != OpARM64MOVDconst { 25706 break 25707 } 25708 c := v_0.AuxInt 25709 v.reset(OpARM64TSTconst) 25710 v.AuxInt = c 25711 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 25712 v0.AuxInt = d 25713 v0.AddArg(x) 25714 v.AddArg(v0) 25715 return true 25716 } 25717 // match: (TSTshiftLL x (MOVDconst [c]) [d]) 25718 // result: (TSTconst x [int64(uint64(c)<<uint64(d))]) 25719 for { 25720 d := v.AuxInt 25721 _ = v.Args[1] 25722 x := v.Args[0] 25723 v_1 := v.Args[1] 25724 if v_1.Op != OpARM64MOVDconst { 25725 break 25726 } 25727 c := v_1.AuxInt 25728 v.reset(OpARM64TSTconst) 25729 v.AuxInt = int64(uint64(c) << uint64(d)) 25730 v.AddArg(x) 25731 return true 25732 } 25733 return false 25734 } 25735 func rewriteValueARM64_OpARM64TSTshiftRA_0(v *Value) bool { 25736 b := v.Block 25737 // match: (TSTshiftRA (MOVDconst [c]) x [d]) 25738 // result: (TSTconst [c] (SRAconst <x.Type> x [d])) 25739 for { 25740 d := v.AuxInt 25741 x := v.Args[1] 25742 v_0 := v.Args[0] 25743 if v_0.Op != OpARM64MOVDconst { 25744 break 25745 } 25746 c := v_0.AuxInt 25747 v.reset(OpARM64TSTconst) 25748 v.AuxInt = c 25749 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 25750 v0.AuxInt = d 25751 v0.AddArg(x) 25752 v.AddArg(v0) 25753 return true 25754 } 25755 // match: (TSTshiftRA x (MOVDconst [c]) [d]) 25756 // result: (TSTconst x [c>>uint64(d)]) 25757 for { 25758 d := v.AuxInt 25759 _ = v.Args[1] 25760 x := v.Args[0] 25761 v_1 := v.Args[1] 25762 if v_1.Op != OpARM64MOVDconst { 25763 break 25764 } 25765 c := v_1.AuxInt 25766 v.reset(OpARM64TSTconst) 25767 v.AuxInt = c >> uint64(d) 25768 v.AddArg(x) 25769 return true 25770 } 25771 return false 25772 } 25773 func rewriteValueARM64_OpARM64TSTshiftRL_0(v *Value) bool { 25774 b := v.Block 25775 // match: (TSTshiftRL (MOVDconst [c]) x [d]) 25776 // result: (TSTconst [c] (SRLconst <x.Type> x [d])) 25777 for { 25778 d := v.AuxInt 25779 x := v.Args[1] 25780 v_0 := v.Args[0] 25781 if v_0.Op != OpARM64MOVDconst { 25782 break 25783 } 25784 c := v_0.AuxInt 25785 v.reset(OpARM64TSTconst) 25786 v.AuxInt = c 25787 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 25788 v0.AuxInt = d 25789 v0.AddArg(x) 25790 v.AddArg(v0) 25791 return true 25792 } 25793 // match: (TSTshiftRL x (MOVDconst [c]) [d]) 25794 // result: (TSTconst x [int64(uint64(c)>>uint64(d))]) 25795 for { 25796 d := v.AuxInt 25797 _ = v.Args[1] 25798 x := v.Args[0] 25799 v_1 := v.Args[1] 25800 if v_1.Op != OpARM64MOVDconst { 25801 break 25802 } 25803 c := v_1.AuxInt 25804 v.reset(OpARM64TSTconst) 25805 v.AuxInt = int64(uint64(c) >> uint64(d)) 25806 v.AddArg(x) 25807 return true 25808 } 25809 return false 25810 } 25811 func rewriteValueARM64_OpARM64UBFIZ_0(v *Value) bool { 25812 // match: (UBFIZ [bfc] (SLLconst [sc] x)) 25813 // cond: sc < getARM64BFwidth(bfc) 25814 // result: (UBFIZ [armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)] x) 25815 for { 25816 bfc := v.AuxInt 25817 v_0 := v.Args[0] 25818 if v_0.Op != OpARM64SLLconst { 25819 break 25820 } 25821 sc := v_0.AuxInt 25822 x := v_0.Args[0] 25823 if !(sc < getARM64BFwidth(bfc)) { 25824 break 25825 } 25826 v.reset(OpARM64UBFIZ) 25827 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc) 25828 v.AddArg(x) 25829 return true 25830 } 25831 return false 25832 } 25833 func rewriteValueARM64_OpARM64UBFX_0(v *Value) bool { 25834 // match: (UBFX [bfc] (SRLconst [sc] x)) 25835 // cond: sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64 25836 // result: (UBFX [armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))] x) 25837 for { 25838 bfc := v.AuxInt 25839 v_0 := v.Args[0] 25840 if v_0.Op != OpARM64SRLconst { 25841 break 25842 } 25843 sc := v_0.AuxInt 25844 x := v_0.Args[0] 25845 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) { 25846 break 25847 } 25848 v.reset(OpARM64UBFX) 25849 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)) 25850 v.AddArg(x) 25851 return true 25852 } 25853 // match: (UBFX [bfc] (SLLconst [sc] x)) 25854 // cond: sc == getARM64BFlsb(bfc) 25855 // result: (ANDconst [1<<uint(getARM64BFwidth(bfc))-1] x) 25856 for { 25857 bfc := v.AuxInt 25858 v_0 := v.Args[0] 25859 if v_0.Op != OpARM64SLLconst { 25860 break 25861 } 25862 sc := v_0.AuxInt 25863 x := v_0.Args[0] 25864 if !(sc == getARM64BFlsb(bfc)) { 25865 break 25866 } 25867 v.reset(OpARM64ANDconst) 25868 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1 25869 v.AddArg(x) 25870 return true 25871 } 25872 // match: (UBFX [bfc] (SLLconst [sc] x)) 25873 // cond: sc < getARM64BFlsb(bfc) 25874 // result: (UBFX [armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 25875 for { 25876 bfc := v.AuxInt 25877 v_0 := v.Args[0] 25878 if v_0.Op != OpARM64SLLconst { 25879 break 25880 } 25881 sc := v_0.AuxInt 25882 x := v_0.Args[0] 25883 if !(sc < getARM64BFlsb(bfc)) { 25884 break 25885 } 25886 v.reset(OpARM64UBFX) 25887 v.AuxInt = armBFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 25888 v.AddArg(x) 25889 return true 25890 } 25891 // match: (UBFX [bfc] (SLLconst [sc] x)) 25892 // cond: sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 25893 // result: (UBFIZ [armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 25894 for { 25895 bfc := v.AuxInt 25896 v_0 := v.Args[0] 25897 if v_0.Op != OpARM64SLLconst { 25898 break 25899 } 25900 sc := v_0.AuxInt 25901 x := v_0.Args[0] 25902 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 25903 break 25904 } 25905 v.reset(OpARM64UBFIZ) 25906 v.AuxInt = armBFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 25907 v.AddArg(x) 25908 return true 25909 } 25910 return false 25911 } 25912 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 25913 // match: (UDIV x (MOVDconst [1])) 25914 // result: x 25915 for { 25916 _ = v.Args[1] 25917 x := v.Args[0] 25918 v_1 := v.Args[1] 25919 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 1 { 25920 break 25921 } 25922 v.reset(OpCopy) 25923 v.Type = x.Type 25924 v.AddArg(x) 25925 return true 25926 } 25927 // match: (UDIV x (MOVDconst [c])) 25928 // cond: isPowerOfTwo(c) 25929 // result: (SRLconst [log2(c)] x) 25930 for { 25931 _ = v.Args[1] 25932 x := v.Args[0] 25933 v_1 := v.Args[1] 25934 if v_1.Op != OpARM64MOVDconst { 25935 break 25936 } 25937 c := v_1.AuxInt 25938 if !(isPowerOfTwo(c)) { 25939 break 25940 } 25941 v.reset(OpARM64SRLconst) 25942 v.AuxInt = log2(c) 25943 v.AddArg(x) 25944 return true 25945 } 25946 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 25947 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 25948 for { 25949 _ = v.Args[1] 25950 v_0 := v.Args[0] 25951 if v_0.Op != OpARM64MOVDconst { 25952 break 25953 } 25954 c := v_0.AuxInt 25955 v_1 := v.Args[1] 25956 if v_1.Op != OpARM64MOVDconst { 25957 break 25958 } 25959 d := v_1.AuxInt 25960 v.reset(OpARM64MOVDconst) 25961 v.AuxInt = int64(uint64(c) / uint64(d)) 25962 return true 25963 } 25964 return false 25965 } 25966 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 25967 // match: (UDIVW x (MOVDconst [c])) 25968 // cond: uint32(c)==1 25969 // result: x 25970 for { 25971 _ = v.Args[1] 25972 x := v.Args[0] 25973 v_1 := v.Args[1] 25974 if v_1.Op != OpARM64MOVDconst { 25975 break 25976 } 25977 c := v_1.AuxInt 25978 if !(uint32(c) == 1) { 25979 break 25980 } 25981 v.reset(OpCopy) 25982 v.Type = x.Type 25983 v.AddArg(x) 25984 return true 25985 } 25986 // match: (UDIVW x (MOVDconst [c])) 25987 // cond: isPowerOfTwo(c) && is32Bit(c) 25988 // result: (SRLconst [log2(c)] x) 25989 for { 25990 _ = v.Args[1] 25991 x := v.Args[0] 25992 v_1 := v.Args[1] 25993 if v_1.Op != OpARM64MOVDconst { 25994 break 25995 } 25996 c := v_1.AuxInt 25997 if !(isPowerOfTwo(c) && is32Bit(c)) { 25998 break 25999 } 26000 v.reset(OpARM64SRLconst) 26001 v.AuxInt = log2(c) 26002 v.AddArg(x) 26003 return true 26004 } 26005 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 26006 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 26007 for { 26008 _ = v.Args[1] 26009 v_0 := v.Args[0] 26010 if v_0.Op != OpARM64MOVDconst { 26011 break 26012 } 26013 c := v_0.AuxInt 26014 v_1 := v.Args[1] 26015 if v_1.Op != OpARM64MOVDconst { 26016 break 26017 } 26018 d := v_1.AuxInt 26019 v.reset(OpARM64MOVDconst) 26020 v.AuxInt = int64(uint32(c) / uint32(d)) 26021 return true 26022 } 26023 return false 26024 } 26025 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 26026 b := v.Block 26027 typ := &b.Func.Config.Types 26028 // match: (UMOD <typ.UInt64> x y) 26029 // result: (MSUB <typ.UInt64> x y (UDIV <typ.UInt64> x y)) 26030 for { 26031 if v.Type != typ.UInt64 { 26032 break 26033 } 26034 y := v.Args[1] 26035 x := v.Args[0] 26036 v.reset(OpARM64MSUB) 26037 v.Type = typ.UInt64 26038 v.AddArg(x) 26039 v.AddArg(y) 26040 v0 := b.NewValue0(v.Pos, OpARM64UDIV, typ.UInt64) 26041 v0.AddArg(x) 26042 v0.AddArg(y) 26043 v.AddArg(v0) 26044 return true 26045 } 26046 // match: (UMOD _ (MOVDconst [1])) 26047 // result: (MOVDconst [0]) 26048 for { 26049 _ = v.Args[1] 26050 v_1 := v.Args[1] 26051 if v_1.Op != OpARM64MOVDconst || v_1.AuxInt != 1 { 26052 break 26053 } 26054 v.reset(OpARM64MOVDconst) 26055 v.AuxInt = 0 26056 return true 26057 } 26058 // match: (UMOD x (MOVDconst [c])) 26059 // cond: isPowerOfTwo(c) 26060 // result: (ANDconst [c-1] x) 26061 for { 26062 _ = v.Args[1] 26063 x := v.Args[0] 26064 v_1 := v.Args[1] 26065 if v_1.Op != OpARM64MOVDconst { 26066 break 26067 } 26068 c := v_1.AuxInt 26069 if !(isPowerOfTwo(c)) { 26070 break 26071 } 26072 v.reset(OpARM64ANDconst) 26073 v.AuxInt = c - 1 26074 v.AddArg(x) 26075 return true 26076 } 26077 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 26078 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 26079 for { 26080 _ = v.Args[1] 26081 v_0 := v.Args[0] 26082 if v_0.Op != OpARM64MOVDconst { 26083 break 26084 } 26085 c := v_0.AuxInt 26086 v_1 := v.Args[1] 26087 if v_1.Op != OpARM64MOVDconst { 26088 break 26089 } 26090 d := v_1.AuxInt 26091 v.reset(OpARM64MOVDconst) 26092 v.AuxInt = int64(uint64(c) % uint64(d)) 26093 return true 26094 } 26095 return false 26096 } 26097 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 26098 b := v.Block 26099 typ := &b.Func.Config.Types 26100 // match: (UMODW <typ.UInt32> x y) 26101 // result: (MSUBW <typ.UInt32> x y (UDIVW <typ.UInt32> x y)) 26102 for { 26103 if v.Type != typ.UInt32 { 26104 break 26105 } 26106 y := v.Args[1] 26107 x := v.Args[0] 26108 v.reset(OpARM64MSUBW) 26109 v.Type = typ.UInt32 26110 v.AddArg(x) 26111 v.AddArg(y) 26112 v0 := b.NewValue0(v.Pos, OpARM64UDIVW, typ.UInt32) 26113 v0.AddArg(x) 26114 v0.AddArg(y) 26115 v.AddArg(v0) 26116 return true 26117 } 26118 // match: (UMODW _ (MOVDconst [c])) 26119 // cond: uint32(c)==1 26120 // result: (MOVDconst [0]) 26121 for { 26122 _ = v.Args[1] 26123 v_1 := v.Args[1] 26124 if v_1.Op != OpARM64MOVDconst { 26125 break 26126 } 26127 c := v_1.AuxInt 26128 if !(uint32(c) == 1) { 26129 break 26130 } 26131 v.reset(OpARM64MOVDconst) 26132 v.AuxInt = 0 26133 return true 26134 } 26135 // match: (UMODW x (MOVDconst [c])) 26136 // cond: isPowerOfTwo(c) && is32Bit(c) 26137 // result: (ANDconst [c-1] x) 26138 for { 26139 _ = v.Args[1] 26140 x := v.Args[0] 26141 v_1 := v.Args[1] 26142 if v_1.Op != OpARM64MOVDconst { 26143 break 26144 } 26145 c := v_1.AuxInt 26146 if !(isPowerOfTwo(c) && is32Bit(c)) { 26147 break 26148 } 26149 v.reset(OpARM64ANDconst) 26150 v.AuxInt = c - 1 26151 v.AddArg(x) 26152 return true 26153 } 26154 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 26155 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 26156 for { 26157 _ = v.Args[1] 26158 v_0 := v.Args[0] 26159 if v_0.Op != OpARM64MOVDconst { 26160 break 26161 } 26162 c := v_0.AuxInt 26163 v_1 := v.Args[1] 26164 if v_1.Op != OpARM64MOVDconst { 26165 break 26166 } 26167 d := v_1.AuxInt 26168 v.reset(OpARM64MOVDconst) 26169 v.AuxInt = int64(uint32(c) % uint32(d)) 26170 return true 26171 } 26172 return false 26173 } 26174 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 26175 // match: (XOR x (MOVDconst [c])) 26176 // result: (XORconst [c] x) 26177 for { 26178 _ = v.Args[1] 26179 x := v.Args[0] 26180 v_1 := v.Args[1] 26181 if v_1.Op != OpARM64MOVDconst { 26182 break 26183 } 26184 c := v_1.AuxInt 26185 v.reset(OpARM64XORconst) 26186 v.AuxInt = c 26187 v.AddArg(x) 26188 return true 26189 } 26190 // match: (XOR (MOVDconst [c]) x) 26191 // result: (XORconst [c] x) 26192 for { 26193 x := v.Args[1] 26194 v_0 := v.Args[0] 26195 if v_0.Op != OpARM64MOVDconst { 26196 break 26197 } 26198 c := v_0.AuxInt 26199 v.reset(OpARM64XORconst) 26200 v.AuxInt = c 26201 v.AddArg(x) 26202 return true 26203 } 26204 // match: (XOR x x) 26205 // result: (MOVDconst [0]) 26206 for { 26207 x := v.Args[1] 26208 if x != v.Args[0] { 26209 break 26210 } 26211 v.reset(OpARM64MOVDconst) 26212 v.AuxInt = 0 26213 return true 26214 } 26215 // match: (XOR x (MVN y)) 26216 // result: (EON x y) 26217 for { 26218 _ = v.Args[1] 26219 x := v.Args[0] 26220 v_1 := v.Args[1] 26221 if v_1.Op != OpARM64MVN { 26222 break 26223 } 26224 y := v_1.Args[0] 26225 v.reset(OpARM64EON) 26226 v.AddArg(x) 26227 v.AddArg(y) 26228 return true 26229 } 26230 // match: (XOR (MVN y) x) 26231 // result: (EON x y) 26232 for { 26233 x := v.Args[1] 26234 v_0 := v.Args[0] 26235 if v_0.Op != OpARM64MVN { 26236 break 26237 } 26238 y := v_0.Args[0] 26239 v.reset(OpARM64EON) 26240 v.AddArg(x) 26241 v.AddArg(y) 26242 return true 26243 } 26244 // match: (XOR x0 x1:(SLLconst [c] y)) 26245 // cond: clobberIfDead(x1) 26246 // result: (XORshiftLL x0 y [c]) 26247 for { 26248 _ = v.Args[1] 26249 x0 := v.Args[0] 26250 x1 := v.Args[1] 26251 if x1.Op != OpARM64SLLconst { 26252 break 26253 } 26254 c := x1.AuxInt 26255 y := x1.Args[0] 26256 if !(clobberIfDead(x1)) { 26257 break 26258 } 26259 v.reset(OpARM64XORshiftLL) 26260 v.AuxInt = c 26261 v.AddArg(x0) 26262 v.AddArg(y) 26263 return true 26264 } 26265 // match: (XOR x1:(SLLconst [c] y) x0) 26266 // cond: clobberIfDead(x1) 26267 // result: (XORshiftLL x0 y [c]) 26268 for { 26269 x0 := v.Args[1] 26270 x1 := v.Args[0] 26271 if x1.Op != OpARM64SLLconst { 26272 break 26273 } 26274 c := x1.AuxInt 26275 y := x1.Args[0] 26276 if !(clobberIfDead(x1)) { 26277 break 26278 } 26279 v.reset(OpARM64XORshiftLL) 26280 v.AuxInt = c 26281 v.AddArg(x0) 26282 v.AddArg(y) 26283 return true 26284 } 26285 // match: (XOR x0 x1:(SRLconst [c] y)) 26286 // cond: clobberIfDead(x1) 26287 // result: (XORshiftRL x0 y [c]) 26288 for { 26289 _ = v.Args[1] 26290 x0 := v.Args[0] 26291 x1 := v.Args[1] 26292 if x1.Op != OpARM64SRLconst { 26293 break 26294 } 26295 c := x1.AuxInt 26296 y := x1.Args[0] 26297 if !(clobberIfDead(x1)) { 26298 break 26299 } 26300 v.reset(OpARM64XORshiftRL) 26301 v.AuxInt = c 26302 v.AddArg(x0) 26303 v.AddArg(y) 26304 return true 26305 } 26306 // match: (XOR x1:(SRLconst [c] y) x0) 26307 // cond: clobberIfDead(x1) 26308 // result: (XORshiftRL x0 y [c]) 26309 for { 26310 x0 := v.Args[1] 26311 x1 := v.Args[0] 26312 if x1.Op != OpARM64SRLconst { 26313 break 26314 } 26315 c := x1.AuxInt 26316 y := x1.Args[0] 26317 if !(clobberIfDead(x1)) { 26318 break 26319 } 26320 v.reset(OpARM64XORshiftRL) 26321 v.AuxInt = c 26322 v.AddArg(x0) 26323 v.AddArg(y) 26324 return true 26325 } 26326 // match: (XOR x0 x1:(SRAconst [c] y)) 26327 // cond: clobberIfDead(x1) 26328 // result: (XORshiftRA x0 y [c]) 26329 for { 26330 _ = v.Args[1] 26331 x0 := v.Args[0] 26332 x1 := v.Args[1] 26333 if x1.Op != OpARM64SRAconst { 26334 break 26335 } 26336 c := x1.AuxInt 26337 y := x1.Args[0] 26338 if !(clobberIfDead(x1)) { 26339 break 26340 } 26341 v.reset(OpARM64XORshiftRA) 26342 v.AuxInt = c 26343 v.AddArg(x0) 26344 v.AddArg(y) 26345 return true 26346 } 26347 return false 26348 } 26349 func rewriteValueARM64_OpARM64XOR_10(v *Value) bool { 26350 b := v.Block 26351 typ := &b.Func.Config.Types 26352 // match: (XOR x1:(SRAconst [c] y) x0) 26353 // cond: clobberIfDead(x1) 26354 // result: (XORshiftRA x0 y [c]) 26355 for { 26356 x0 := v.Args[1] 26357 x1 := v.Args[0] 26358 if x1.Op != OpARM64SRAconst { 26359 break 26360 } 26361 c := x1.AuxInt 26362 y := x1.Args[0] 26363 if !(clobberIfDead(x1)) { 26364 break 26365 } 26366 v.reset(OpARM64XORshiftRA) 26367 v.AuxInt = c 26368 v.AddArg(x0) 26369 v.AddArg(y) 26370 return true 26371 } 26372 // match: (XOR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 26373 // cond: cc.(Op) == OpARM64LessThanU 26374 // result: (ROR x (NEG <t> y)) 26375 for { 26376 _ = v.Args[1] 26377 v_0 := v.Args[0] 26378 if v_0.Op != OpARM64SLL { 26379 break 26380 } 26381 _ = v_0.Args[1] 26382 x := v_0.Args[0] 26383 v_0_1 := v_0.Args[1] 26384 if v_0_1.Op != OpARM64ANDconst { 26385 break 26386 } 26387 t := v_0_1.Type 26388 if v_0_1.AuxInt != 63 { 26389 break 26390 } 26391 y := v_0_1.Args[0] 26392 v_1 := v.Args[1] 26393 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 { 26394 break 26395 } 26396 cc := v_1.Aux 26397 _ = v_1.Args[1] 26398 v_1_0 := v_1.Args[0] 26399 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt64 { 26400 break 26401 } 26402 _ = v_1_0.Args[1] 26403 if x != v_1_0.Args[0] { 26404 break 26405 } 26406 v_1_0_1 := v_1_0.Args[1] 26407 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 26408 break 26409 } 26410 _ = v_1_0_1.Args[1] 26411 v_1_0_1_0 := v_1_0_1.Args[0] 26412 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 { 26413 break 26414 } 26415 v_1_0_1_1 := v_1_0_1.Args[1] 26416 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] { 26417 break 26418 } 26419 v_1_1 := v_1.Args[1] 26420 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 26421 break 26422 } 26423 v_1_1_0 := v_1_1.Args[0] 26424 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 26425 break 26426 } 26427 _ = v_1_1_0.Args[1] 26428 v_1_1_0_0 := v_1_1_0.Args[0] 26429 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 { 26430 break 26431 } 26432 v_1_1_0_1 := v_1_1_0.Args[1] 26433 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26434 break 26435 } 26436 v.reset(OpARM64ROR) 26437 v.AddArg(x) 26438 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 26439 v0.AddArg(y) 26440 v.AddArg(v0) 26441 return true 26442 } 26443 // match: (XOR (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 26444 // cond: cc.(Op) == OpARM64LessThanU 26445 // result: (ROR x (NEG <t> y)) 26446 for { 26447 _ = v.Args[1] 26448 v_0 := v.Args[0] 26449 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt64 { 26450 break 26451 } 26452 cc := v_0.Aux 26453 _ = v_0.Args[1] 26454 v_0_0 := v_0.Args[0] 26455 if v_0_0.Op != OpARM64SRL || v_0_0.Type != typ.UInt64 { 26456 break 26457 } 26458 _ = v_0_0.Args[1] 26459 x := v_0_0.Args[0] 26460 v_0_0_1 := v_0_0.Args[1] 26461 if v_0_0_1.Op != OpARM64SUB { 26462 break 26463 } 26464 t := v_0_0_1.Type 26465 _ = v_0_0_1.Args[1] 26466 v_0_0_1_0 := v_0_0_1.Args[0] 26467 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 64 { 26468 break 26469 } 26470 v_0_0_1_1 := v_0_0_1.Args[1] 26471 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 63 { 26472 break 26473 } 26474 y := v_0_0_1_1.Args[0] 26475 v_0_1 := v_0.Args[1] 26476 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 26477 break 26478 } 26479 v_0_1_0 := v_0_1.Args[0] 26480 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 26481 break 26482 } 26483 _ = v_0_1_0.Args[1] 26484 v_0_1_0_0 := v_0_1_0.Args[0] 26485 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 64 { 26486 break 26487 } 26488 v_0_1_0_1 := v_0_1_0.Args[1] 26489 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 63 || y != v_0_1_0_1.Args[0] { 26490 break 26491 } 26492 v_1 := v.Args[1] 26493 if v_1.Op != OpARM64SLL { 26494 break 26495 } 26496 _ = v_1.Args[1] 26497 if x != v_1.Args[0] { 26498 break 26499 } 26500 v_1_1 := v_1.Args[1] 26501 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 63 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26502 break 26503 } 26504 v.reset(OpARM64ROR) 26505 v.AddArg(x) 26506 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 26507 v0.AddArg(y) 26508 v.AddArg(v0) 26509 return true 26510 } 26511 // match: (XOR (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 26512 // cond: cc.(Op) == OpARM64LessThanU 26513 // result: (ROR x y) 26514 for { 26515 _ = v.Args[1] 26516 v_0 := v.Args[0] 26517 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt64 { 26518 break 26519 } 26520 _ = v_0.Args[1] 26521 x := v_0.Args[0] 26522 v_0_1 := v_0.Args[1] 26523 if v_0_1.Op != OpARM64ANDconst { 26524 break 26525 } 26526 t := v_0_1.Type 26527 if v_0_1.AuxInt != 63 { 26528 break 26529 } 26530 y := v_0_1.Args[0] 26531 v_1 := v.Args[1] 26532 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt64 { 26533 break 26534 } 26535 cc := v_1.Aux 26536 _ = v_1.Args[1] 26537 v_1_0 := v_1.Args[0] 26538 if v_1_0.Op != OpARM64SLL { 26539 break 26540 } 26541 _ = v_1_0.Args[1] 26542 if x != v_1_0.Args[0] { 26543 break 26544 } 26545 v_1_0_1 := v_1_0.Args[1] 26546 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 26547 break 26548 } 26549 _ = v_1_0_1.Args[1] 26550 v_1_0_1_0 := v_1_0_1.Args[0] 26551 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 64 { 26552 break 26553 } 26554 v_1_0_1_1 := v_1_0_1.Args[1] 26555 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 63 || y != v_1_0_1_1.Args[0] { 26556 break 26557 } 26558 v_1_1 := v_1.Args[1] 26559 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 26560 break 26561 } 26562 v_1_1_0 := v_1_1.Args[0] 26563 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 26564 break 26565 } 26566 _ = v_1_1_0.Args[1] 26567 v_1_1_0_0 := v_1_1_0.Args[0] 26568 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 64 { 26569 break 26570 } 26571 v_1_1_0_1 := v_1_1_0.Args[1] 26572 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 63 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26573 break 26574 } 26575 v.reset(OpARM64ROR) 26576 v.AddArg(x) 26577 v.AddArg(y) 26578 return true 26579 } 26580 // match: (XOR (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 26581 // cond: cc.(Op) == OpARM64LessThanU 26582 // result: (ROR x y) 26583 for { 26584 _ = v.Args[1] 26585 v_0 := v.Args[0] 26586 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt64 { 26587 break 26588 } 26589 cc := v_0.Aux 26590 _ = v_0.Args[1] 26591 v_0_0 := v_0.Args[0] 26592 if v_0_0.Op != OpARM64SLL { 26593 break 26594 } 26595 _ = v_0_0.Args[1] 26596 x := v_0_0.Args[0] 26597 v_0_0_1 := v_0_0.Args[1] 26598 if v_0_0_1.Op != OpARM64SUB { 26599 break 26600 } 26601 t := v_0_0_1.Type 26602 _ = v_0_0_1.Args[1] 26603 v_0_0_1_0 := v_0_0_1.Args[0] 26604 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 64 { 26605 break 26606 } 26607 v_0_0_1_1 := v_0_0_1.Args[1] 26608 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 63 { 26609 break 26610 } 26611 y := v_0_0_1_1.Args[0] 26612 v_0_1 := v_0.Args[1] 26613 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 26614 break 26615 } 26616 v_0_1_0 := v_0_1.Args[0] 26617 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 26618 break 26619 } 26620 _ = v_0_1_0.Args[1] 26621 v_0_1_0_0 := v_0_1_0.Args[0] 26622 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 64 { 26623 break 26624 } 26625 v_0_1_0_1 := v_0_1_0.Args[1] 26626 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 63 || y != v_0_1_0_1.Args[0] { 26627 break 26628 } 26629 v_1 := v.Args[1] 26630 if v_1.Op != OpARM64SRL || v_1.Type != typ.UInt64 { 26631 break 26632 } 26633 _ = v_1.Args[1] 26634 if x != v_1.Args[0] { 26635 break 26636 } 26637 v_1_1 := v_1.Args[1] 26638 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 63 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26639 break 26640 } 26641 v.reset(OpARM64ROR) 26642 v.AddArg(x) 26643 v.AddArg(y) 26644 return true 26645 } 26646 // match: (XOR (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 26647 // cond: cc.(Op) == OpARM64LessThanU 26648 // result: (RORW x (NEG <t> y)) 26649 for { 26650 _ = v.Args[1] 26651 v_0 := v.Args[0] 26652 if v_0.Op != OpARM64SLL { 26653 break 26654 } 26655 _ = v_0.Args[1] 26656 x := v_0.Args[0] 26657 v_0_1 := v_0.Args[1] 26658 if v_0_1.Op != OpARM64ANDconst { 26659 break 26660 } 26661 t := v_0_1.Type 26662 if v_0_1.AuxInt != 31 { 26663 break 26664 } 26665 y := v_0_1.Args[0] 26666 v_1 := v.Args[1] 26667 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 { 26668 break 26669 } 26670 cc := v_1.Aux 26671 _ = v_1.Args[1] 26672 v_1_0 := v_1.Args[0] 26673 if v_1_0.Op != OpARM64SRL || v_1_0.Type != typ.UInt32 { 26674 break 26675 } 26676 _ = v_1_0.Args[1] 26677 v_1_0_0 := v_1_0.Args[0] 26678 if v_1_0_0.Op != OpARM64MOVWUreg || x != v_1_0_0.Args[0] { 26679 break 26680 } 26681 v_1_0_1 := v_1_0.Args[1] 26682 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 26683 break 26684 } 26685 _ = v_1_0_1.Args[1] 26686 v_1_0_1_0 := v_1_0_1.Args[0] 26687 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 { 26688 break 26689 } 26690 v_1_0_1_1 := v_1_0_1.Args[1] 26691 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] { 26692 break 26693 } 26694 v_1_1 := v_1.Args[1] 26695 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 26696 break 26697 } 26698 v_1_1_0 := v_1_1.Args[0] 26699 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 26700 break 26701 } 26702 _ = v_1_1_0.Args[1] 26703 v_1_1_0_0 := v_1_1_0.Args[0] 26704 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 { 26705 break 26706 } 26707 v_1_1_0_1 := v_1_1_0.Args[1] 26708 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26709 break 26710 } 26711 v.reset(OpARM64RORW) 26712 v.AddArg(x) 26713 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 26714 v0.AddArg(y) 26715 v.AddArg(v0) 26716 return true 26717 } 26718 // match: (XOR (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 26719 // cond: cc.(Op) == OpARM64LessThanU 26720 // result: (RORW x (NEG <t> y)) 26721 for { 26722 _ = v.Args[1] 26723 v_0 := v.Args[0] 26724 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt32 { 26725 break 26726 } 26727 cc := v_0.Aux 26728 _ = v_0.Args[1] 26729 v_0_0 := v_0.Args[0] 26730 if v_0_0.Op != OpARM64SRL || v_0_0.Type != typ.UInt32 { 26731 break 26732 } 26733 _ = v_0_0.Args[1] 26734 v_0_0_0 := v_0_0.Args[0] 26735 if v_0_0_0.Op != OpARM64MOVWUreg { 26736 break 26737 } 26738 x := v_0_0_0.Args[0] 26739 v_0_0_1 := v_0_0.Args[1] 26740 if v_0_0_1.Op != OpARM64SUB { 26741 break 26742 } 26743 t := v_0_0_1.Type 26744 _ = v_0_0_1.Args[1] 26745 v_0_0_1_0 := v_0_0_1.Args[0] 26746 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 32 { 26747 break 26748 } 26749 v_0_0_1_1 := v_0_0_1.Args[1] 26750 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 31 { 26751 break 26752 } 26753 y := v_0_0_1_1.Args[0] 26754 v_0_1 := v_0.Args[1] 26755 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 26756 break 26757 } 26758 v_0_1_0 := v_0_1.Args[0] 26759 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 26760 break 26761 } 26762 _ = v_0_1_0.Args[1] 26763 v_0_1_0_0 := v_0_1_0.Args[0] 26764 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 32 { 26765 break 26766 } 26767 v_0_1_0_1 := v_0_1_0.Args[1] 26768 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 31 || y != v_0_1_0_1.Args[0] { 26769 break 26770 } 26771 v_1 := v.Args[1] 26772 if v_1.Op != OpARM64SLL { 26773 break 26774 } 26775 _ = v_1.Args[1] 26776 if x != v_1.Args[0] { 26777 break 26778 } 26779 v_1_1 := v_1.Args[1] 26780 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 31 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26781 break 26782 } 26783 v.reset(OpARM64RORW) 26784 v.AddArg(x) 26785 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 26786 v0.AddArg(y) 26787 v.AddArg(v0) 26788 return true 26789 } 26790 // match: (XOR (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 26791 // cond: cc.(Op) == OpARM64LessThanU 26792 // result: (RORW x y) 26793 for { 26794 _ = v.Args[1] 26795 v_0 := v.Args[0] 26796 if v_0.Op != OpARM64SRL || v_0.Type != typ.UInt32 { 26797 break 26798 } 26799 _ = v_0.Args[1] 26800 v_0_0 := v_0.Args[0] 26801 if v_0_0.Op != OpARM64MOVWUreg { 26802 break 26803 } 26804 x := v_0_0.Args[0] 26805 v_0_1 := v_0.Args[1] 26806 if v_0_1.Op != OpARM64ANDconst { 26807 break 26808 } 26809 t := v_0_1.Type 26810 if v_0_1.AuxInt != 31 { 26811 break 26812 } 26813 y := v_0_1.Args[0] 26814 v_1 := v.Args[1] 26815 if v_1.Op != OpARM64CSEL0 || v_1.Type != typ.UInt32 { 26816 break 26817 } 26818 cc := v_1.Aux 26819 _ = v_1.Args[1] 26820 v_1_0 := v_1.Args[0] 26821 if v_1_0.Op != OpARM64SLL { 26822 break 26823 } 26824 _ = v_1_0.Args[1] 26825 if x != v_1_0.Args[0] { 26826 break 26827 } 26828 v_1_0_1 := v_1_0.Args[1] 26829 if v_1_0_1.Op != OpARM64SUB || v_1_0_1.Type != t { 26830 break 26831 } 26832 _ = v_1_0_1.Args[1] 26833 v_1_0_1_0 := v_1_0_1.Args[0] 26834 if v_1_0_1_0.Op != OpARM64MOVDconst || v_1_0_1_0.AuxInt != 32 { 26835 break 26836 } 26837 v_1_0_1_1 := v_1_0_1.Args[1] 26838 if v_1_0_1_1.Op != OpARM64ANDconst || v_1_0_1_1.Type != t || v_1_0_1_1.AuxInt != 31 || y != v_1_0_1_1.Args[0] { 26839 break 26840 } 26841 v_1_1 := v_1.Args[1] 26842 if v_1_1.Op != OpARM64CMPconst || v_1_1.AuxInt != 64 { 26843 break 26844 } 26845 v_1_1_0 := v_1_1.Args[0] 26846 if v_1_1_0.Op != OpARM64SUB || v_1_1_0.Type != t { 26847 break 26848 } 26849 _ = v_1_1_0.Args[1] 26850 v_1_1_0_0 := v_1_1_0.Args[0] 26851 if v_1_1_0_0.Op != OpARM64MOVDconst || v_1_1_0_0.AuxInt != 32 { 26852 break 26853 } 26854 v_1_1_0_1 := v_1_1_0.Args[1] 26855 if v_1_1_0_1.Op != OpARM64ANDconst || v_1_1_0_1.Type != t || v_1_1_0_1.AuxInt != 31 || y != v_1_1_0_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26856 break 26857 } 26858 v.reset(OpARM64RORW) 26859 v.AddArg(x) 26860 v.AddArg(y) 26861 return true 26862 } 26863 // match: (XOR (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 26864 // cond: cc.(Op) == OpARM64LessThanU 26865 // result: (RORW x y) 26866 for { 26867 _ = v.Args[1] 26868 v_0 := v.Args[0] 26869 if v_0.Op != OpARM64CSEL0 || v_0.Type != typ.UInt32 { 26870 break 26871 } 26872 cc := v_0.Aux 26873 _ = v_0.Args[1] 26874 v_0_0 := v_0.Args[0] 26875 if v_0_0.Op != OpARM64SLL { 26876 break 26877 } 26878 _ = v_0_0.Args[1] 26879 x := v_0_0.Args[0] 26880 v_0_0_1 := v_0_0.Args[1] 26881 if v_0_0_1.Op != OpARM64SUB { 26882 break 26883 } 26884 t := v_0_0_1.Type 26885 _ = v_0_0_1.Args[1] 26886 v_0_0_1_0 := v_0_0_1.Args[0] 26887 if v_0_0_1_0.Op != OpARM64MOVDconst || v_0_0_1_0.AuxInt != 32 { 26888 break 26889 } 26890 v_0_0_1_1 := v_0_0_1.Args[1] 26891 if v_0_0_1_1.Op != OpARM64ANDconst || v_0_0_1_1.Type != t || v_0_0_1_1.AuxInt != 31 { 26892 break 26893 } 26894 y := v_0_0_1_1.Args[0] 26895 v_0_1 := v_0.Args[1] 26896 if v_0_1.Op != OpARM64CMPconst || v_0_1.AuxInt != 64 { 26897 break 26898 } 26899 v_0_1_0 := v_0_1.Args[0] 26900 if v_0_1_0.Op != OpARM64SUB || v_0_1_0.Type != t { 26901 break 26902 } 26903 _ = v_0_1_0.Args[1] 26904 v_0_1_0_0 := v_0_1_0.Args[0] 26905 if v_0_1_0_0.Op != OpARM64MOVDconst || v_0_1_0_0.AuxInt != 32 { 26906 break 26907 } 26908 v_0_1_0_1 := v_0_1_0.Args[1] 26909 if v_0_1_0_1.Op != OpARM64ANDconst || v_0_1_0_1.Type != t || v_0_1_0_1.AuxInt != 31 || y != v_0_1_0_1.Args[0] { 26910 break 26911 } 26912 v_1 := v.Args[1] 26913 if v_1.Op != OpARM64SRL || v_1.Type != typ.UInt32 { 26914 break 26915 } 26916 _ = v_1.Args[1] 26917 v_1_0 := v_1.Args[0] 26918 if v_1_0.Op != OpARM64MOVWUreg || x != v_1_0.Args[0] { 26919 break 26920 } 26921 v_1_1 := v_1.Args[1] 26922 if v_1_1.Op != OpARM64ANDconst || v_1_1.Type != t || v_1_1.AuxInt != 31 || y != v_1_1.Args[0] || !(cc.(Op) == OpARM64LessThanU) { 26923 break 26924 } 26925 v.reset(OpARM64RORW) 26926 v.AddArg(x) 26927 v.AddArg(y) 26928 return true 26929 } 26930 return false 26931 } 26932 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 26933 // match: (XORconst [0] x) 26934 // result: x 26935 for { 26936 if v.AuxInt != 0 { 26937 break 26938 } 26939 x := v.Args[0] 26940 v.reset(OpCopy) 26941 v.Type = x.Type 26942 v.AddArg(x) 26943 return true 26944 } 26945 // match: (XORconst [-1] x) 26946 // result: (MVN x) 26947 for { 26948 if v.AuxInt != -1 { 26949 break 26950 } 26951 x := v.Args[0] 26952 v.reset(OpARM64MVN) 26953 v.AddArg(x) 26954 return true 26955 } 26956 // match: (XORconst [c] (MOVDconst [d])) 26957 // result: (MOVDconst [c^d]) 26958 for { 26959 c := v.AuxInt 26960 v_0 := v.Args[0] 26961 if v_0.Op != OpARM64MOVDconst { 26962 break 26963 } 26964 d := v_0.AuxInt 26965 v.reset(OpARM64MOVDconst) 26966 v.AuxInt = c ^ d 26967 return true 26968 } 26969 // match: (XORconst [c] (XORconst [d] x)) 26970 // result: (XORconst [c^d] x) 26971 for { 26972 c := v.AuxInt 26973 v_0 := v.Args[0] 26974 if v_0.Op != OpARM64XORconst { 26975 break 26976 } 26977 d := v_0.AuxInt 26978 x := v_0.Args[0] 26979 v.reset(OpARM64XORconst) 26980 v.AuxInt = c ^ d 26981 v.AddArg(x) 26982 return true 26983 } 26984 return false 26985 } 26986 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 26987 b := v.Block 26988 typ := &b.Func.Config.Types 26989 // match: (XORshiftLL (MOVDconst [c]) x [d]) 26990 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 26991 for { 26992 d := v.AuxInt 26993 x := v.Args[1] 26994 v_0 := v.Args[0] 26995 if v_0.Op != OpARM64MOVDconst { 26996 break 26997 } 26998 c := v_0.AuxInt 26999 v.reset(OpARM64XORconst) 27000 v.AuxInt = c 27001 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 27002 v0.AuxInt = d 27003 v0.AddArg(x) 27004 v.AddArg(v0) 27005 return true 27006 } 27007 // match: (XORshiftLL x (MOVDconst [c]) [d]) 27008 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 27009 for { 27010 d := v.AuxInt 27011 _ = v.Args[1] 27012 x := v.Args[0] 27013 v_1 := v.Args[1] 27014 if v_1.Op != OpARM64MOVDconst { 27015 break 27016 } 27017 c := v_1.AuxInt 27018 v.reset(OpARM64XORconst) 27019 v.AuxInt = int64(uint64(c) << uint64(d)) 27020 v.AddArg(x) 27021 return true 27022 } 27023 // match: (XORshiftLL x (SLLconst x [c]) [d]) 27024 // cond: c==d 27025 // result: (MOVDconst [0]) 27026 for { 27027 d := v.AuxInt 27028 _ = v.Args[1] 27029 x := v.Args[0] 27030 v_1 := v.Args[1] 27031 if v_1.Op != OpARM64SLLconst { 27032 break 27033 } 27034 c := v_1.AuxInt 27035 if x != v_1.Args[0] || !(c == d) { 27036 break 27037 } 27038 v.reset(OpARM64MOVDconst) 27039 v.AuxInt = 0 27040 return true 27041 } 27042 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 27043 // result: (RORconst [64-c] x) 27044 for { 27045 c := v.AuxInt 27046 x := v.Args[1] 27047 v_0 := v.Args[0] 27048 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c || x != v_0.Args[0] { 27049 break 27050 } 27051 v.reset(OpARM64RORconst) 27052 v.AuxInt = 64 - c 27053 v.AddArg(x) 27054 return true 27055 } 27056 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x) 27057 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 27058 // result: (RORWconst [32-c] x) 27059 for { 27060 t := v.Type 27061 c := v.AuxInt 27062 x := v.Args[1] 27063 v_0 := v.Args[0] 27064 if v_0.Op != OpARM64UBFX { 27065 break 27066 } 27067 bfc := v_0.AuxInt 27068 if x != v_0.Args[0] || !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 27069 break 27070 } 27071 v.reset(OpARM64RORWconst) 27072 v.AuxInt = 32 - c 27073 v.AddArg(x) 27074 return true 27075 } 27076 // match: (XORshiftLL <typ.UInt16> [8] (UBFX <typ.UInt16> [armBFAuxInt(8, 8)] x) x) 27077 // result: (REV16W x) 27078 for { 27079 if v.Type != typ.UInt16 || v.AuxInt != 8 { 27080 break 27081 } 27082 x := v.Args[1] 27083 v_0 := v.Args[0] 27084 if v_0.Op != OpARM64UBFX || v_0.Type != typ.UInt16 || v_0.AuxInt != armBFAuxInt(8, 8) || x != v_0.Args[0] { 27085 break 27086 } 27087 v.reset(OpARM64REV16W) 27088 v.AddArg(x) 27089 return true 27090 } 27091 // match: (XORshiftLL [c] (SRLconst x [64-c]) x2) 27092 // result: (EXTRconst [64-c] x2 x) 27093 for { 27094 c := v.AuxInt 27095 x2 := v.Args[1] 27096 v_0 := v.Args[0] 27097 if v_0.Op != OpARM64SRLconst || v_0.AuxInt != 64-c { 27098 break 27099 } 27100 x := v_0.Args[0] 27101 v.reset(OpARM64EXTRconst) 27102 v.AuxInt = 64 - c 27103 v.AddArg(x2) 27104 v.AddArg(x) 27105 return true 27106 } 27107 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x2) 27108 // cond: c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c) 27109 // result: (EXTRWconst [32-c] x2 x) 27110 for { 27111 t := v.Type 27112 c := v.AuxInt 27113 x2 := v.Args[1] 27114 v_0 := v.Args[0] 27115 if v_0.Op != OpARM64UBFX { 27116 break 27117 } 27118 bfc := v_0.AuxInt 27119 x := v_0.Args[0] 27120 if !(c < 32 && t.Size() == 4 && bfc == armBFAuxInt(32-c, c)) { 27121 break 27122 } 27123 v.reset(OpARM64EXTRWconst) 27124 v.AuxInt = 32 - c 27125 v.AddArg(x2) 27126 v.AddArg(x) 27127 return true 27128 } 27129 return false 27130 } 27131 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 27132 b := v.Block 27133 // match: (XORshiftRA (MOVDconst [c]) x [d]) 27134 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 27135 for { 27136 d := v.AuxInt 27137 x := v.Args[1] 27138 v_0 := v.Args[0] 27139 if v_0.Op != OpARM64MOVDconst { 27140 break 27141 } 27142 c := v_0.AuxInt 27143 v.reset(OpARM64XORconst) 27144 v.AuxInt = c 27145 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 27146 v0.AuxInt = d 27147 v0.AddArg(x) 27148 v.AddArg(v0) 27149 return true 27150 } 27151 // match: (XORshiftRA x (MOVDconst [c]) [d]) 27152 // result: (XORconst x [c>>uint64(d)]) 27153 for { 27154 d := v.AuxInt 27155 _ = v.Args[1] 27156 x := v.Args[0] 27157 v_1 := v.Args[1] 27158 if v_1.Op != OpARM64MOVDconst { 27159 break 27160 } 27161 c := v_1.AuxInt 27162 v.reset(OpARM64XORconst) 27163 v.AuxInt = c >> uint64(d) 27164 v.AddArg(x) 27165 return true 27166 } 27167 // match: (XORshiftRA x (SRAconst x [c]) [d]) 27168 // cond: c==d 27169 // result: (MOVDconst [0]) 27170 for { 27171 d := v.AuxInt 27172 _ = v.Args[1] 27173 x := v.Args[0] 27174 v_1 := v.Args[1] 27175 if v_1.Op != OpARM64SRAconst { 27176 break 27177 } 27178 c := v_1.AuxInt 27179 if x != v_1.Args[0] || !(c == d) { 27180 break 27181 } 27182 v.reset(OpARM64MOVDconst) 27183 v.AuxInt = 0 27184 return true 27185 } 27186 return false 27187 } 27188 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 27189 b := v.Block 27190 // match: (XORshiftRL (MOVDconst [c]) x [d]) 27191 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 27192 for { 27193 d := v.AuxInt 27194 x := v.Args[1] 27195 v_0 := v.Args[0] 27196 if v_0.Op != OpARM64MOVDconst { 27197 break 27198 } 27199 c := v_0.AuxInt 27200 v.reset(OpARM64XORconst) 27201 v.AuxInt = c 27202 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 27203 v0.AuxInt = d 27204 v0.AddArg(x) 27205 v.AddArg(v0) 27206 return true 27207 } 27208 // match: (XORshiftRL x (MOVDconst [c]) [d]) 27209 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 27210 for { 27211 d := v.AuxInt 27212 _ = v.Args[1] 27213 x := v.Args[0] 27214 v_1 := v.Args[1] 27215 if v_1.Op != OpARM64MOVDconst { 27216 break 27217 } 27218 c := v_1.AuxInt 27219 v.reset(OpARM64XORconst) 27220 v.AuxInt = int64(uint64(c) >> uint64(d)) 27221 v.AddArg(x) 27222 return true 27223 } 27224 // match: (XORshiftRL x (SRLconst x [c]) [d]) 27225 // cond: c==d 27226 // result: (MOVDconst [0]) 27227 for { 27228 d := v.AuxInt 27229 _ = v.Args[1] 27230 x := v.Args[0] 27231 v_1 := v.Args[1] 27232 if v_1.Op != OpARM64SRLconst { 27233 break 27234 } 27235 c := v_1.AuxInt 27236 if x != v_1.Args[0] || !(c == d) { 27237 break 27238 } 27239 v.reset(OpARM64MOVDconst) 27240 v.AuxInt = 0 27241 return true 27242 } 27243 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 27244 // result: (RORconst [ c] x) 27245 for { 27246 c := v.AuxInt 27247 x := v.Args[1] 27248 v_0 := v.Args[0] 27249 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 64-c || x != v_0.Args[0] { 27250 break 27251 } 27252 v.reset(OpARM64RORconst) 27253 v.AuxInt = c 27254 v.AddArg(x) 27255 return true 27256 } 27257 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 27258 // cond: c < 32 && t.Size() == 4 27259 // result: (RORWconst [c] x) 27260 for { 27261 t := v.Type 27262 c := v.AuxInt 27263 _ = v.Args[1] 27264 v_0 := v.Args[0] 27265 if v_0.Op != OpARM64SLLconst || v_0.AuxInt != 32-c { 27266 break 27267 } 27268 x := v_0.Args[0] 27269 v_1 := v.Args[1] 27270 if v_1.Op != OpARM64MOVWUreg || x != v_1.Args[0] || !(c < 32 && t.Size() == 4) { 27271 break 27272 } 27273 v.reset(OpARM64RORWconst) 27274 v.AuxInt = c 27275 v.AddArg(x) 27276 return true 27277 } 27278 return false 27279 } 27280 func rewriteValueARM64_OpAbs_0(v *Value) bool { 27281 // match: (Abs x) 27282 // result: (FABSD x) 27283 for { 27284 x := v.Args[0] 27285 v.reset(OpARM64FABSD) 27286 v.AddArg(x) 27287 return true 27288 } 27289 } 27290 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 27291 // match: (Add16 x y) 27292 // result: (ADD x y) 27293 for { 27294 y := v.Args[1] 27295 x := v.Args[0] 27296 v.reset(OpARM64ADD) 27297 v.AddArg(x) 27298 v.AddArg(y) 27299 return true 27300 } 27301 } 27302 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 27303 // match: (Add32 x y) 27304 // result: (ADD x y) 27305 for { 27306 y := v.Args[1] 27307 x := v.Args[0] 27308 v.reset(OpARM64ADD) 27309 v.AddArg(x) 27310 v.AddArg(y) 27311 return true 27312 } 27313 } 27314 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 27315 // match: (Add32F x y) 27316 // result: (FADDS x y) 27317 for { 27318 y := v.Args[1] 27319 x := v.Args[0] 27320 v.reset(OpARM64FADDS) 27321 v.AddArg(x) 27322 v.AddArg(y) 27323 return true 27324 } 27325 } 27326 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 27327 // match: (Add64 x y) 27328 // result: (ADD x y) 27329 for { 27330 y := v.Args[1] 27331 x := v.Args[0] 27332 v.reset(OpARM64ADD) 27333 v.AddArg(x) 27334 v.AddArg(y) 27335 return true 27336 } 27337 } 27338 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 27339 // match: (Add64F x y) 27340 // result: (FADDD x y) 27341 for { 27342 y := v.Args[1] 27343 x := v.Args[0] 27344 v.reset(OpARM64FADDD) 27345 v.AddArg(x) 27346 v.AddArg(y) 27347 return true 27348 } 27349 } 27350 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 27351 // match: (Add8 x y) 27352 // result: (ADD x y) 27353 for { 27354 y := v.Args[1] 27355 x := v.Args[0] 27356 v.reset(OpARM64ADD) 27357 v.AddArg(x) 27358 v.AddArg(y) 27359 return true 27360 } 27361 } 27362 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 27363 // match: (AddPtr x y) 27364 // result: (ADD x y) 27365 for { 27366 y := v.Args[1] 27367 x := v.Args[0] 27368 v.reset(OpARM64ADD) 27369 v.AddArg(x) 27370 v.AddArg(y) 27371 return true 27372 } 27373 } 27374 func rewriteValueARM64_OpAddr_0(v *Value) bool { 27375 // match: (Addr {sym} base) 27376 // result: (MOVDaddr {sym} base) 27377 for { 27378 sym := v.Aux 27379 base := v.Args[0] 27380 v.reset(OpARM64MOVDaddr) 27381 v.Aux = sym 27382 v.AddArg(base) 27383 return true 27384 } 27385 } 27386 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 27387 // match: (And16 x y) 27388 // result: (AND x y) 27389 for { 27390 y := v.Args[1] 27391 x := v.Args[0] 27392 v.reset(OpARM64AND) 27393 v.AddArg(x) 27394 v.AddArg(y) 27395 return true 27396 } 27397 } 27398 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 27399 // match: (And32 x y) 27400 // result: (AND x y) 27401 for { 27402 y := v.Args[1] 27403 x := v.Args[0] 27404 v.reset(OpARM64AND) 27405 v.AddArg(x) 27406 v.AddArg(y) 27407 return true 27408 } 27409 } 27410 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 27411 // match: (And64 x y) 27412 // result: (AND x y) 27413 for { 27414 y := v.Args[1] 27415 x := v.Args[0] 27416 v.reset(OpARM64AND) 27417 v.AddArg(x) 27418 v.AddArg(y) 27419 return true 27420 } 27421 } 27422 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 27423 // match: (And8 x y) 27424 // result: (AND x y) 27425 for { 27426 y := v.Args[1] 27427 x := v.Args[0] 27428 v.reset(OpARM64AND) 27429 v.AddArg(x) 27430 v.AddArg(y) 27431 return true 27432 } 27433 } 27434 func rewriteValueARM64_OpAndB_0(v *Value) bool { 27435 // match: (AndB x y) 27436 // result: (AND x y) 27437 for { 27438 y := v.Args[1] 27439 x := v.Args[0] 27440 v.reset(OpARM64AND) 27441 v.AddArg(x) 27442 v.AddArg(y) 27443 return true 27444 } 27445 } 27446 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 27447 // match: (AtomicAdd32 ptr val mem) 27448 // result: (LoweredAtomicAdd32 ptr val mem) 27449 for { 27450 mem := v.Args[2] 27451 ptr := v.Args[0] 27452 val := v.Args[1] 27453 v.reset(OpARM64LoweredAtomicAdd32) 27454 v.AddArg(ptr) 27455 v.AddArg(val) 27456 v.AddArg(mem) 27457 return true 27458 } 27459 } 27460 func rewriteValueARM64_OpAtomicAdd32Variant_0(v *Value) bool { 27461 // match: (AtomicAdd32Variant ptr val mem) 27462 // result: (LoweredAtomicAdd32Variant ptr val mem) 27463 for { 27464 mem := v.Args[2] 27465 ptr := v.Args[0] 27466 val := v.Args[1] 27467 v.reset(OpARM64LoweredAtomicAdd32Variant) 27468 v.AddArg(ptr) 27469 v.AddArg(val) 27470 v.AddArg(mem) 27471 return true 27472 } 27473 } 27474 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 27475 // match: (AtomicAdd64 ptr val mem) 27476 // result: (LoweredAtomicAdd64 ptr val mem) 27477 for { 27478 mem := v.Args[2] 27479 ptr := v.Args[0] 27480 val := v.Args[1] 27481 v.reset(OpARM64LoweredAtomicAdd64) 27482 v.AddArg(ptr) 27483 v.AddArg(val) 27484 v.AddArg(mem) 27485 return true 27486 } 27487 } 27488 func rewriteValueARM64_OpAtomicAdd64Variant_0(v *Value) bool { 27489 // match: (AtomicAdd64Variant ptr val mem) 27490 // result: (LoweredAtomicAdd64Variant ptr val mem) 27491 for { 27492 mem := v.Args[2] 27493 ptr := v.Args[0] 27494 val := v.Args[1] 27495 v.reset(OpARM64LoweredAtomicAdd64Variant) 27496 v.AddArg(ptr) 27497 v.AddArg(val) 27498 v.AddArg(mem) 27499 return true 27500 } 27501 } 27502 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 27503 b := v.Block 27504 typ := &b.Func.Config.Types 27505 // match: (AtomicAnd8 ptr val mem) 27506 // result: (Select1 (LoweredAtomicAnd8 ptr val mem)) 27507 for { 27508 mem := v.Args[2] 27509 ptr := v.Args[0] 27510 val := v.Args[1] 27511 v.reset(OpSelect1) 27512 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem)) 27513 v0.AddArg(ptr) 27514 v0.AddArg(val) 27515 v0.AddArg(mem) 27516 v.AddArg(v0) 27517 return true 27518 } 27519 } 27520 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 27521 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 27522 // result: (LoweredAtomicCas32 ptr old new_ mem) 27523 for { 27524 mem := v.Args[3] 27525 ptr := v.Args[0] 27526 old := v.Args[1] 27527 new_ := v.Args[2] 27528 v.reset(OpARM64LoweredAtomicCas32) 27529 v.AddArg(ptr) 27530 v.AddArg(old) 27531 v.AddArg(new_) 27532 v.AddArg(mem) 27533 return true 27534 } 27535 } 27536 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 27537 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 27538 // result: (LoweredAtomicCas64 ptr old new_ mem) 27539 for { 27540 mem := v.Args[3] 27541 ptr := v.Args[0] 27542 old := v.Args[1] 27543 new_ := v.Args[2] 27544 v.reset(OpARM64LoweredAtomicCas64) 27545 v.AddArg(ptr) 27546 v.AddArg(old) 27547 v.AddArg(new_) 27548 v.AddArg(mem) 27549 return true 27550 } 27551 } 27552 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 27553 // match: (AtomicExchange32 ptr val mem) 27554 // result: (LoweredAtomicExchange32 ptr val mem) 27555 for { 27556 mem := v.Args[2] 27557 ptr := v.Args[0] 27558 val := v.Args[1] 27559 v.reset(OpARM64LoweredAtomicExchange32) 27560 v.AddArg(ptr) 27561 v.AddArg(val) 27562 v.AddArg(mem) 27563 return true 27564 } 27565 } 27566 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 27567 // match: (AtomicExchange64 ptr val mem) 27568 // result: (LoweredAtomicExchange64 ptr val mem) 27569 for { 27570 mem := v.Args[2] 27571 ptr := v.Args[0] 27572 val := v.Args[1] 27573 v.reset(OpARM64LoweredAtomicExchange64) 27574 v.AddArg(ptr) 27575 v.AddArg(val) 27576 v.AddArg(mem) 27577 return true 27578 } 27579 } 27580 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 27581 // match: (AtomicLoad32 ptr mem) 27582 // result: (LDARW ptr mem) 27583 for { 27584 mem := v.Args[1] 27585 ptr := v.Args[0] 27586 v.reset(OpARM64LDARW) 27587 v.AddArg(ptr) 27588 v.AddArg(mem) 27589 return true 27590 } 27591 } 27592 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 27593 // match: (AtomicLoad64 ptr mem) 27594 // result: (LDAR ptr mem) 27595 for { 27596 mem := v.Args[1] 27597 ptr := v.Args[0] 27598 v.reset(OpARM64LDAR) 27599 v.AddArg(ptr) 27600 v.AddArg(mem) 27601 return true 27602 } 27603 } 27604 func rewriteValueARM64_OpAtomicLoad8_0(v *Value) bool { 27605 // match: (AtomicLoad8 ptr mem) 27606 // result: (LDARB ptr mem) 27607 for { 27608 mem := v.Args[1] 27609 ptr := v.Args[0] 27610 v.reset(OpARM64LDARB) 27611 v.AddArg(ptr) 27612 v.AddArg(mem) 27613 return true 27614 } 27615 } 27616 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 27617 // match: (AtomicLoadPtr ptr mem) 27618 // result: (LDAR ptr mem) 27619 for { 27620 mem := v.Args[1] 27621 ptr := v.Args[0] 27622 v.reset(OpARM64LDAR) 27623 v.AddArg(ptr) 27624 v.AddArg(mem) 27625 return true 27626 } 27627 } 27628 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 27629 b := v.Block 27630 typ := &b.Func.Config.Types 27631 // match: (AtomicOr8 ptr val mem) 27632 // result: (Select1 (LoweredAtomicOr8 ptr val mem)) 27633 for { 27634 mem := v.Args[2] 27635 ptr := v.Args[0] 27636 val := v.Args[1] 27637 v.reset(OpSelect1) 27638 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem)) 27639 v0.AddArg(ptr) 27640 v0.AddArg(val) 27641 v0.AddArg(mem) 27642 v.AddArg(v0) 27643 return true 27644 } 27645 } 27646 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 27647 // match: (AtomicStore32 ptr val mem) 27648 // result: (STLRW ptr val mem) 27649 for { 27650 mem := v.Args[2] 27651 ptr := v.Args[0] 27652 val := v.Args[1] 27653 v.reset(OpARM64STLRW) 27654 v.AddArg(ptr) 27655 v.AddArg(val) 27656 v.AddArg(mem) 27657 return true 27658 } 27659 } 27660 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 27661 // match: (AtomicStore64 ptr val mem) 27662 // result: (STLR ptr val mem) 27663 for { 27664 mem := v.Args[2] 27665 ptr := v.Args[0] 27666 val := v.Args[1] 27667 v.reset(OpARM64STLR) 27668 v.AddArg(ptr) 27669 v.AddArg(val) 27670 v.AddArg(mem) 27671 return true 27672 } 27673 } 27674 func rewriteValueARM64_OpAtomicStore8_0(v *Value) bool { 27675 // match: (AtomicStore8 ptr val mem) 27676 // result: (STLRB ptr val mem) 27677 for { 27678 mem := v.Args[2] 27679 ptr := v.Args[0] 27680 val := v.Args[1] 27681 v.reset(OpARM64STLRB) 27682 v.AddArg(ptr) 27683 v.AddArg(val) 27684 v.AddArg(mem) 27685 return true 27686 } 27687 } 27688 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 27689 // match: (AtomicStorePtrNoWB ptr val mem) 27690 // result: (STLR ptr val mem) 27691 for { 27692 mem := v.Args[2] 27693 ptr := v.Args[0] 27694 val := v.Args[1] 27695 v.reset(OpARM64STLR) 27696 v.AddArg(ptr) 27697 v.AddArg(val) 27698 v.AddArg(mem) 27699 return true 27700 } 27701 } 27702 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 27703 b := v.Block 27704 // match: (Avg64u <t> x y) 27705 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 27706 for { 27707 t := v.Type 27708 y := v.Args[1] 27709 x := v.Args[0] 27710 v.reset(OpARM64ADD) 27711 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 27712 v0.AuxInt = 1 27713 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 27714 v1.AddArg(x) 27715 v1.AddArg(y) 27716 v0.AddArg(v1) 27717 v.AddArg(v0) 27718 v.AddArg(y) 27719 return true 27720 } 27721 } 27722 func rewriteValueARM64_OpBitLen32_0(v *Value) bool { 27723 b := v.Block 27724 typ := &b.Func.Config.Types 27725 // match: (BitLen32 x) 27726 // result: (SUB (MOVDconst [32]) (CLZW <typ.Int> x)) 27727 for { 27728 x := v.Args[0] 27729 v.reset(OpARM64SUB) 27730 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 27731 v0.AuxInt = 32 27732 v.AddArg(v0) 27733 v1 := b.NewValue0(v.Pos, OpARM64CLZW, typ.Int) 27734 v1.AddArg(x) 27735 v.AddArg(v1) 27736 return true 27737 } 27738 } 27739 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 27740 b := v.Block 27741 typ := &b.Func.Config.Types 27742 // match: (BitLen64 x) 27743 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 27744 for { 27745 x := v.Args[0] 27746 v.reset(OpARM64SUB) 27747 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 27748 v0.AuxInt = 64 27749 v.AddArg(v0) 27750 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 27751 v1.AddArg(x) 27752 v.AddArg(v1) 27753 return true 27754 } 27755 } 27756 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 27757 b := v.Block 27758 typ := &b.Func.Config.Types 27759 // match: (BitRev16 x) 27760 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 27761 for { 27762 x := v.Args[0] 27763 v.reset(OpARM64SRLconst) 27764 v.AuxInt = 48 27765 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 27766 v0.AddArg(x) 27767 v.AddArg(v0) 27768 return true 27769 } 27770 } 27771 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 27772 // match: (BitRev32 x) 27773 // result: (RBITW x) 27774 for { 27775 x := v.Args[0] 27776 v.reset(OpARM64RBITW) 27777 v.AddArg(x) 27778 return true 27779 } 27780 } 27781 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 27782 // match: (BitRev64 x) 27783 // result: (RBIT x) 27784 for { 27785 x := v.Args[0] 27786 v.reset(OpARM64RBIT) 27787 v.AddArg(x) 27788 return true 27789 } 27790 } 27791 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 27792 b := v.Block 27793 typ := &b.Func.Config.Types 27794 // match: (BitRev8 x) 27795 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 27796 for { 27797 x := v.Args[0] 27798 v.reset(OpARM64SRLconst) 27799 v.AuxInt = 56 27800 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 27801 v0.AddArg(x) 27802 v.AddArg(v0) 27803 return true 27804 } 27805 } 27806 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 27807 // match: (Bswap32 x) 27808 // result: (REVW x) 27809 for { 27810 x := v.Args[0] 27811 v.reset(OpARM64REVW) 27812 v.AddArg(x) 27813 return true 27814 } 27815 } 27816 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 27817 // match: (Bswap64 x) 27818 // result: (REV x) 27819 for { 27820 x := v.Args[0] 27821 v.reset(OpARM64REV) 27822 v.AddArg(x) 27823 return true 27824 } 27825 } 27826 func rewriteValueARM64_OpCeil_0(v *Value) bool { 27827 // match: (Ceil x) 27828 // result: (FRINTPD x) 27829 for { 27830 x := v.Args[0] 27831 v.reset(OpARM64FRINTPD) 27832 v.AddArg(x) 27833 return true 27834 } 27835 } 27836 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 27837 // match: (ClosureCall [argwid] entry closure mem) 27838 // result: (CALLclosure [argwid] entry closure mem) 27839 for { 27840 argwid := v.AuxInt 27841 mem := v.Args[2] 27842 entry := v.Args[0] 27843 closure := v.Args[1] 27844 v.reset(OpARM64CALLclosure) 27845 v.AuxInt = argwid 27846 v.AddArg(entry) 27847 v.AddArg(closure) 27848 v.AddArg(mem) 27849 return true 27850 } 27851 } 27852 func rewriteValueARM64_OpCom16_0(v *Value) bool { 27853 // match: (Com16 x) 27854 // result: (MVN x) 27855 for { 27856 x := v.Args[0] 27857 v.reset(OpARM64MVN) 27858 v.AddArg(x) 27859 return true 27860 } 27861 } 27862 func rewriteValueARM64_OpCom32_0(v *Value) bool { 27863 // match: (Com32 x) 27864 // result: (MVN x) 27865 for { 27866 x := v.Args[0] 27867 v.reset(OpARM64MVN) 27868 v.AddArg(x) 27869 return true 27870 } 27871 } 27872 func rewriteValueARM64_OpCom64_0(v *Value) bool { 27873 // match: (Com64 x) 27874 // result: (MVN x) 27875 for { 27876 x := v.Args[0] 27877 v.reset(OpARM64MVN) 27878 v.AddArg(x) 27879 return true 27880 } 27881 } 27882 func rewriteValueARM64_OpCom8_0(v *Value) bool { 27883 // match: (Com8 x) 27884 // result: (MVN x) 27885 for { 27886 x := v.Args[0] 27887 v.reset(OpARM64MVN) 27888 v.AddArg(x) 27889 return true 27890 } 27891 } 27892 func rewriteValueARM64_OpCondSelect_0(v *Value) bool { 27893 b := v.Block 27894 // match: (CondSelect x y boolval) 27895 // cond: flagArg(boolval) != nil 27896 // result: (CSEL {boolval.Op} x y flagArg(boolval)) 27897 for { 27898 boolval := v.Args[2] 27899 x := v.Args[0] 27900 y := v.Args[1] 27901 if !(flagArg(boolval) != nil) { 27902 break 27903 } 27904 v.reset(OpARM64CSEL) 27905 v.Aux = boolval.Op 27906 v.AddArg(x) 27907 v.AddArg(y) 27908 v.AddArg(flagArg(boolval)) 27909 return true 27910 } 27911 // match: (CondSelect x y boolval) 27912 // cond: flagArg(boolval) == nil 27913 // result: (CSEL {OpARM64NotEqual} x y (CMPWconst [0] boolval)) 27914 for { 27915 boolval := v.Args[2] 27916 x := v.Args[0] 27917 y := v.Args[1] 27918 if !(flagArg(boolval) == nil) { 27919 break 27920 } 27921 v.reset(OpARM64CSEL) 27922 v.Aux = OpARM64NotEqual 27923 v.AddArg(x) 27924 v.AddArg(y) 27925 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 27926 v0.AuxInt = 0 27927 v0.AddArg(boolval) 27928 v.AddArg(v0) 27929 return true 27930 } 27931 return false 27932 } 27933 func rewriteValueARM64_OpConst16_0(v *Value) bool { 27934 // match: (Const16 [val]) 27935 // result: (MOVDconst [val]) 27936 for { 27937 val := v.AuxInt 27938 v.reset(OpARM64MOVDconst) 27939 v.AuxInt = val 27940 return true 27941 } 27942 } 27943 func rewriteValueARM64_OpConst32_0(v *Value) bool { 27944 // match: (Const32 [val]) 27945 // result: (MOVDconst [val]) 27946 for { 27947 val := v.AuxInt 27948 v.reset(OpARM64MOVDconst) 27949 v.AuxInt = val 27950 return true 27951 } 27952 } 27953 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 27954 // match: (Const32F [val]) 27955 // result: (FMOVSconst [val]) 27956 for { 27957 val := v.AuxInt 27958 v.reset(OpARM64FMOVSconst) 27959 v.AuxInt = val 27960 return true 27961 } 27962 } 27963 func rewriteValueARM64_OpConst64_0(v *Value) bool { 27964 // match: (Const64 [val]) 27965 // result: (MOVDconst [val]) 27966 for { 27967 val := v.AuxInt 27968 v.reset(OpARM64MOVDconst) 27969 v.AuxInt = val 27970 return true 27971 } 27972 } 27973 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 27974 // match: (Const64F [val]) 27975 // result: (FMOVDconst [val]) 27976 for { 27977 val := v.AuxInt 27978 v.reset(OpARM64FMOVDconst) 27979 v.AuxInt = val 27980 return true 27981 } 27982 } 27983 func rewriteValueARM64_OpConst8_0(v *Value) bool { 27984 // match: (Const8 [val]) 27985 // result: (MOVDconst [val]) 27986 for { 27987 val := v.AuxInt 27988 v.reset(OpARM64MOVDconst) 27989 v.AuxInt = val 27990 return true 27991 } 27992 } 27993 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 27994 // match: (ConstBool [b]) 27995 // result: (MOVDconst [b]) 27996 for { 27997 b := v.AuxInt 27998 v.reset(OpARM64MOVDconst) 27999 v.AuxInt = b 28000 return true 28001 } 28002 } 28003 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 28004 // match: (ConstNil) 28005 // result: (MOVDconst [0]) 28006 for { 28007 v.reset(OpARM64MOVDconst) 28008 v.AuxInt = 0 28009 return true 28010 } 28011 } 28012 func rewriteValueARM64_OpCtz16_0(v *Value) bool { 28013 b := v.Block 28014 typ := &b.Func.Config.Types 28015 // match: (Ctz16 <t> x) 28016 // result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x10000] x))) 28017 for { 28018 t := v.Type 28019 x := v.Args[0] 28020 v.reset(OpARM64CLZW) 28021 v.Type = t 28022 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32) 28023 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32) 28024 v1.AuxInt = 0x10000 28025 v1.AddArg(x) 28026 v0.AddArg(v1) 28027 v.AddArg(v0) 28028 return true 28029 } 28030 } 28031 func rewriteValueARM64_OpCtz16NonZero_0(v *Value) bool { 28032 // match: (Ctz16NonZero x) 28033 // result: (Ctz32 x) 28034 for { 28035 x := v.Args[0] 28036 v.reset(OpCtz32) 28037 v.AddArg(x) 28038 return true 28039 } 28040 } 28041 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 28042 b := v.Block 28043 // match: (Ctz32 <t> x) 28044 // result: (CLZW (RBITW <t> x)) 28045 for { 28046 t := v.Type 28047 x := v.Args[0] 28048 v.reset(OpARM64CLZW) 28049 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 28050 v0.AddArg(x) 28051 v.AddArg(v0) 28052 return true 28053 } 28054 } 28055 func rewriteValueARM64_OpCtz32NonZero_0(v *Value) bool { 28056 // match: (Ctz32NonZero x) 28057 // result: (Ctz32 x) 28058 for { 28059 x := v.Args[0] 28060 v.reset(OpCtz32) 28061 v.AddArg(x) 28062 return true 28063 } 28064 } 28065 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 28066 b := v.Block 28067 // match: (Ctz64 <t> x) 28068 // result: (CLZ (RBIT <t> x)) 28069 for { 28070 t := v.Type 28071 x := v.Args[0] 28072 v.reset(OpARM64CLZ) 28073 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 28074 v0.AddArg(x) 28075 v.AddArg(v0) 28076 return true 28077 } 28078 } 28079 func rewriteValueARM64_OpCtz64NonZero_0(v *Value) bool { 28080 // match: (Ctz64NonZero x) 28081 // result: (Ctz64 x) 28082 for { 28083 x := v.Args[0] 28084 v.reset(OpCtz64) 28085 v.AddArg(x) 28086 return true 28087 } 28088 } 28089 func rewriteValueARM64_OpCtz8_0(v *Value) bool { 28090 b := v.Block 28091 typ := &b.Func.Config.Types 28092 // match: (Ctz8 <t> x) 28093 // result: (CLZW <t> (RBITW <typ.UInt32> (ORconst <typ.UInt32> [0x100] x))) 28094 for { 28095 t := v.Type 28096 x := v.Args[0] 28097 v.reset(OpARM64CLZW) 28098 v.Type = t 28099 v0 := b.NewValue0(v.Pos, OpARM64RBITW, typ.UInt32) 28100 v1 := b.NewValue0(v.Pos, OpARM64ORconst, typ.UInt32) 28101 v1.AuxInt = 0x100 28102 v1.AddArg(x) 28103 v0.AddArg(v1) 28104 v.AddArg(v0) 28105 return true 28106 } 28107 } 28108 func rewriteValueARM64_OpCtz8NonZero_0(v *Value) bool { 28109 // match: (Ctz8NonZero x) 28110 // result: (Ctz32 x) 28111 for { 28112 x := v.Args[0] 28113 v.reset(OpCtz32) 28114 v.AddArg(x) 28115 return true 28116 } 28117 } 28118 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 28119 // match: (Cvt32Fto32 x) 28120 // result: (FCVTZSSW x) 28121 for { 28122 x := v.Args[0] 28123 v.reset(OpARM64FCVTZSSW) 28124 v.AddArg(x) 28125 return true 28126 } 28127 } 28128 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 28129 // match: (Cvt32Fto32U x) 28130 // result: (FCVTZUSW x) 28131 for { 28132 x := v.Args[0] 28133 v.reset(OpARM64FCVTZUSW) 28134 v.AddArg(x) 28135 return true 28136 } 28137 } 28138 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 28139 // match: (Cvt32Fto64 x) 28140 // result: (FCVTZSS x) 28141 for { 28142 x := v.Args[0] 28143 v.reset(OpARM64FCVTZSS) 28144 v.AddArg(x) 28145 return true 28146 } 28147 } 28148 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 28149 // match: (Cvt32Fto64F x) 28150 // result: (FCVTSD x) 28151 for { 28152 x := v.Args[0] 28153 v.reset(OpARM64FCVTSD) 28154 v.AddArg(x) 28155 return true 28156 } 28157 } 28158 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 28159 // match: (Cvt32Fto64U x) 28160 // result: (FCVTZUS x) 28161 for { 28162 x := v.Args[0] 28163 v.reset(OpARM64FCVTZUS) 28164 v.AddArg(x) 28165 return true 28166 } 28167 } 28168 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 28169 // match: (Cvt32Uto32F x) 28170 // result: (UCVTFWS x) 28171 for { 28172 x := v.Args[0] 28173 v.reset(OpARM64UCVTFWS) 28174 v.AddArg(x) 28175 return true 28176 } 28177 } 28178 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 28179 // match: (Cvt32Uto64F x) 28180 // result: (UCVTFWD x) 28181 for { 28182 x := v.Args[0] 28183 v.reset(OpARM64UCVTFWD) 28184 v.AddArg(x) 28185 return true 28186 } 28187 } 28188 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 28189 // match: (Cvt32to32F x) 28190 // result: (SCVTFWS x) 28191 for { 28192 x := v.Args[0] 28193 v.reset(OpARM64SCVTFWS) 28194 v.AddArg(x) 28195 return true 28196 } 28197 } 28198 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 28199 // match: (Cvt32to64F x) 28200 // result: (SCVTFWD x) 28201 for { 28202 x := v.Args[0] 28203 v.reset(OpARM64SCVTFWD) 28204 v.AddArg(x) 28205 return true 28206 } 28207 } 28208 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 28209 // match: (Cvt64Fto32 x) 28210 // result: (FCVTZSDW x) 28211 for { 28212 x := v.Args[0] 28213 v.reset(OpARM64FCVTZSDW) 28214 v.AddArg(x) 28215 return true 28216 } 28217 } 28218 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 28219 // match: (Cvt64Fto32F x) 28220 // result: (FCVTDS x) 28221 for { 28222 x := v.Args[0] 28223 v.reset(OpARM64FCVTDS) 28224 v.AddArg(x) 28225 return true 28226 } 28227 } 28228 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 28229 // match: (Cvt64Fto32U x) 28230 // result: (FCVTZUDW x) 28231 for { 28232 x := v.Args[0] 28233 v.reset(OpARM64FCVTZUDW) 28234 v.AddArg(x) 28235 return true 28236 } 28237 } 28238 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 28239 // match: (Cvt64Fto64 x) 28240 // result: (FCVTZSD x) 28241 for { 28242 x := v.Args[0] 28243 v.reset(OpARM64FCVTZSD) 28244 v.AddArg(x) 28245 return true 28246 } 28247 } 28248 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 28249 // match: (Cvt64Fto64U x) 28250 // result: (FCVTZUD x) 28251 for { 28252 x := v.Args[0] 28253 v.reset(OpARM64FCVTZUD) 28254 v.AddArg(x) 28255 return true 28256 } 28257 } 28258 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 28259 // match: (Cvt64Uto32F x) 28260 // result: (UCVTFS x) 28261 for { 28262 x := v.Args[0] 28263 v.reset(OpARM64UCVTFS) 28264 v.AddArg(x) 28265 return true 28266 } 28267 } 28268 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 28269 // match: (Cvt64Uto64F x) 28270 // result: (UCVTFD x) 28271 for { 28272 x := v.Args[0] 28273 v.reset(OpARM64UCVTFD) 28274 v.AddArg(x) 28275 return true 28276 } 28277 } 28278 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 28279 // match: (Cvt64to32F x) 28280 // result: (SCVTFS x) 28281 for { 28282 x := v.Args[0] 28283 v.reset(OpARM64SCVTFS) 28284 v.AddArg(x) 28285 return true 28286 } 28287 } 28288 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 28289 // match: (Cvt64to64F x) 28290 // result: (SCVTFD x) 28291 for { 28292 x := v.Args[0] 28293 v.reset(OpARM64SCVTFD) 28294 v.AddArg(x) 28295 return true 28296 } 28297 } 28298 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 28299 b := v.Block 28300 typ := &b.Func.Config.Types 28301 // match: (Div16 x y) 28302 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 28303 for { 28304 y := v.Args[1] 28305 x := v.Args[0] 28306 v.reset(OpARM64DIVW) 28307 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 28308 v0.AddArg(x) 28309 v.AddArg(v0) 28310 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 28311 v1.AddArg(y) 28312 v.AddArg(v1) 28313 return true 28314 } 28315 } 28316 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 28317 b := v.Block 28318 typ := &b.Func.Config.Types 28319 // match: (Div16u x y) 28320 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 28321 for { 28322 y := v.Args[1] 28323 x := v.Args[0] 28324 v.reset(OpARM64UDIVW) 28325 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28326 v0.AddArg(x) 28327 v.AddArg(v0) 28328 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28329 v1.AddArg(y) 28330 v.AddArg(v1) 28331 return true 28332 } 28333 } 28334 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 28335 // match: (Div32 x y) 28336 // result: (DIVW x y) 28337 for { 28338 y := v.Args[1] 28339 x := v.Args[0] 28340 v.reset(OpARM64DIVW) 28341 v.AddArg(x) 28342 v.AddArg(y) 28343 return true 28344 } 28345 } 28346 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 28347 // match: (Div32F x y) 28348 // result: (FDIVS x y) 28349 for { 28350 y := v.Args[1] 28351 x := v.Args[0] 28352 v.reset(OpARM64FDIVS) 28353 v.AddArg(x) 28354 v.AddArg(y) 28355 return true 28356 } 28357 } 28358 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 28359 // match: (Div32u x y) 28360 // result: (UDIVW x y) 28361 for { 28362 y := v.Args[1] 28363 x := v.Args[0] 28364 v.reset(OpARM64UDIVW) 28365 v.AddArg(x) 28366 v.AddArg(y) 28367 return true 28368 } 28369 } 28370 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 28371 // match: (Div64 x y) 28372 // result: (DIV x y) 28373 for { 28374 y := v.Args[1] 28375 x := v.Args[0] 28376 v.reset(OpARM64DIV) 28377 v.AddArg(x) 28378 v.AddArg(y) 28379 return true 28380 } 28381 } 28382 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 28383 // match: (Div64F x y) 28384 // result: (FDIVD x y) 28385 for { 28386 y := v.Args[1] 28387 x := v.Args[0] 28388 v.reset(OpARM64FDIVD) 28389 v.AddArg(x) 28390 v.AddArg(y) 28391 return true 28392 } 28393 } 28394 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 28395 // match: (Div64u x y) 28396 // result: (UDIV x y) 28397 for { 28398 y := v.Args[1] 28399 x := v.Args[0] 28400 v.reset(OpARM64UDIV) 28401 v.AddArg(x) 28402 v.AddArg(y) 28403 return true 28404 } 28405 } 28406 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 28407 b := v.Block 28408 typ := &b.Func.Config.Types 28409 // match: (Div8 x y) 28410 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 28411 for { 28412 y := v.Args[1] 28413 x := v.Args[0] 28414 v.reset(OpARM64DIVW) 28415 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 28416 v0.AddArg(x) 28417 v.AddArg(v0) 28418 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 28419 v1.AddArg(y) 28420 v.AddArg(v1) 28421 return true 28422 } 28423 } 28424 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 28425 b := v.Block 28426 typ := &b.Func.Config.Types 28427 // match: (Div8u x y) 28428 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 28429 for { 28430 y := v.Args[1] 28431 x := v.Args[0] 28432 v.reset(OpARM64UDIVW) 28433 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28434 v0.AddArg(x) 28435 v.AddArg(v0) 28436 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28437 v1.AddArg(y) 28438 v.AddArg(v1) 28439 return true 28440 } 28441 } 28442 func rewriteValueARM64_OpEq16_0(v *Value) bool { 28443 b := v.Block 28444 typ := &b.Func.Config.Types 28445 // match: (Eq16 x y) 28446 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 28447 for { 28448 y := v.Args[1] 28449 x := v.Args[0] 28450 v.reset(OpARM64Equal) 28451 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28452 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28453 v1.AddArg(x) 28454 v0.AddArg(v1) 28455 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28456 v2.AddArg(y) 28457 v0.AddArg(v2) 28458 v.AddArg(v0) 28459 return true 28460 } 28461 } 28462 func rewriteValueARM64_OpEq32_0(v *Value) bool { 28463 b := v.Block 28464 // match: (Eq32 x y) 28465 // result: (Equal (CMPW x y)) 28466 for { 28467 y := v.Args[1] 28468 x := v.Args[0] 28469 v.reset(OpARM64Equal) 28470 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28471 v0.AddArg(x) 28472 v0.AddArg(y) 28473 v.AddArg(v0) 28474 return true 28475 } 28476 } 28477 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 28478 b := v.Block 28479 // match: (Eq32F x y) 28480 // result: (Equal (FCMPS x y)) 28481 for { 28482 y := v.Args[1] 28483 x := v.Args[0] 28484 v.reset(OpARM64Equal) 28485 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 28486 v0.AddArg(x) 28487 v0.AddArg(y) 28488 v.AddArg(v0) 28489 return true 28490 } 28491 } 28492 func rewriteValueARM64_OpEq64_0(v *Value) bool { 28493 b := v.Block 28494 // match: (Eq64 x y) 28495 // result: (Equal (CMP x y)) 28496 for { 28497 y := v.Args[1] 28498 x := v.Args[0] 28499 v.reset(OpARM64Equal) 28500 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 28501 v0.AddArg(x) 28502 v0.AddArg(y) 28503 v.AddArg(v0) 28504 return true 28505 } 28506 } 28507 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 28508 b := v.Block 28509 // match: (Eq64F x y) 28510 // result: (Equal (FCMPD x y)) 28511 for { 28512 y := v.Args[1] 28513 x := v.Args[0] 28514 v.reset(OpARM64Equal) 28515 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 28516 v0.AddArg(x) 28517 v0.AddArg(y) 28518 v.AddArg(v0) 28519 return true 28520 } 28521 } 28522 func rewriteValueARM64_OpEq8_0(v *Value) bool { 28523 b := v.Block 28524 typ := &b.Func.Config.Types 28525 // match: (Eq8 x y) 28526 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 28527 for { 28528 y := v.Args[1] 28529 x := v.Args[0] 28530 v.reset(OpARM64Equal) 28531 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28532 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28533 v1.AddArg(x) 28534 v0.AddArg(v1) 28535 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28536 v2.AddArg(y) 28537 v0.AddArg(v2) 28538 v.AddArg(v0) 28539 return true 28540 } 28541 } 28542 func rewriteValueARM64_OpEqB_0(v *Value) bool { 28543 b := v.Block 28544 typ := &b.Func.Config.Types 28545 // match: (EqB x y) 28546 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 28547 for { 28548 y := v.Args[1] 28549 x := v.Args[0] 28550 v.reset(OpARM64XOR) 28551 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 28552 v0.AuxInt = 1 28553 v.AddArg(v0) 28554 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 28555 v1.AddArg(x) 28556 v1.AddArg(y) 28557 v.AddArg(v1) 28558 return true 28559 } 28560 } 28561 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 28562 b := v.Block 28563 // match: (EqPtr x y) 28564 // result: (Equal (CMP x y)) 28565 for { 28566 y := v.Args[1] 28567 x := v.Args[0] 28568 v.reset(OpARM64Equal) 28569 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 28570 v0.AddArg(x) 28571 v0.AddArg(y) 28572 v.AddArg(v0) 28573 return true 28574 } 28575 } 28576 func rewriteValueARM64_OpFMA_0(v *Value) bool { 28577 // match: (FMA x y z) 28578 // result: (FMADDD z x y) 28579 for { 28580 z := v.Args[2] 28581 x := v.Args[0] 28582 y := v.Args[1] 28583 v.reset(OpARM64FMADDD) 28584 v.AddArg(z) 28585 v.AddArg(x) 28586 v.AddArg(y) 28587 return true 28588 } 28589 } 28590 func rewriteValueARM64_OpFloor_0(v *Value) bool { 28591 // match: (Floor x) 28592 // result: (FRINTMD x) 28593 for { 28594 x := v.Args[0] 28595 v.reset(OpARM64FRINTMD) 28596 v.AddArg(x) 28597 return true 28598 } 28599 } 28600 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 28601 b := v.Block 28602 typ := &b.Func.Config.Types 28603 // match: (Geq16 x y) 28604 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 28605 for { 28606 y := v.Args[1] 28607 x := v.Args[0] 28608 v.reset(OpARM64GreaterEqual) 28609 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28610 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 28611 v1.AddArg(x) 28612 v0.AddArg(v1) 28613 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 28614 v2.AddArg(y) 28615 v0.AddArg(v2) 28616 v.AddArg(v0) 28617 return true 28618 } 28619 } 28620 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 28621 b := v.Block 28622 typ := &b.Func.Config.Types 28623 // match: (Geq16U x y) 28624 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 28625 for { 28626 y := v.Args[1] 28627 x := v.Args[0] 28628 v.reset(OpARM64GreaterEqualU) 28629 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28630 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28631 v1.AddArg(x) 28632 v0.AddArg(v1) 28633 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28634 v2.AddArg(y) 28635 v0.AddArg(v2) 28636 v.AddArg(v0) 28637 return true 28638 } 28639 } 28640 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 28641 b := v.Block 28642 // match: (Geq32 x y) 28643 // result: (GreaterEqual (CMPW x y)) 28644 for { 28645 y := v.Args[1] 28646 x := v.Args[0] 28647 v.reset(OpARM64GreaterEqual) 28648 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28649 v0.AddArg(x) 28650 v0.AddArg(y) 28651 v.AddArg(v0) 28652 return true 28653 } 28654 } 28655 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 28656 b := v.Block 28657 // match: (Geq32F x y) 28658 // result: (GreaterEqualF (FCMPS x y)) 28659 for { 28660 y := v.Args[1] 28661 x := v.Args[0] 28662 v.reset(OpARM64GreaterEqualF) 28663 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 28664 v0.AddArg(x) 28665 v0.AddArg(y) 28666 v.AddArg(v0) 28667 return true 28668 } 28669 } 28670 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 28671 b := v.Block 28672 // match: (Geq32U x y) 28673 // result: (GreaterEqualU (CMPW x y)) 28674 for { 28675 y := v.Args[1] 28676 x := v.Args[0] 28677 v.reset(OpARM64GreaterEqualU) 28678 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28679 v0.AddArg(x) 28680 v0.AddArg(y) 28681 v.AddArg(v0) 28682 return true 28683 } 28684 } 28685 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 28686 b := v.Block 28687 // match: (Geq64 x y) 28688 // result: (GreaterEqual (CMP x y)) 28689 for { 28690 y := v.Args[1] 28691 x := v.Args[0] 28692 v.reset(OpARM64GreaterEqual) 28693 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 28694 v0.AddArg(x) 28695 v0.AddArg(y) 28696 v.AddArg(v0) 28697 return true 28698 } 28699 } 28700 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 28701 b := v.Block 28702 // match: (Geq64F x y) 28703 // result: (GreaterEqualF (FCMPD x y)) 28704 for { 28705 y := v.Args[1] 28706 x := v.Args[0] 28707 v.reset(OpARM64GreaterEqualF) 28708 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 28709 v0.AddArg(x) 28710 v0.AddArg(y) 28711 v.AddArg(v0) 28712 return true 28713 } 28714 } 28715 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 28716 b := v.Block 28717 // match: (Geq64U x y) 28718 // result: (GreaterEqualU (CMP x y)) 28719 for { 28720 y := v.Args[1] 28721 x := v.Args[0] 28722 v.reset(OpARM64GreaterEqualU) 28723 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 28724 v0.AddArg(x) 28725 v0.AddArg(y) 28726 v.AddArg(v0) 28727 return true 28728 } 28729 } 28730 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 28731 b := v.Block 28732 typ := &b.Func.Config.Types 28733 // match: (Geq8 x y) 28734 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 28735 for { 28736 y := v.Args[1] 28737 x := v.Args[0] 28738 v.reset(OpARM64GreaterEqual) 28739 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28740 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 28741 v1.AddArg(x) 28742 v0.AddArg(v1) 28743 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 28744 v2.AddArg(y) 28745 v0.AddArg(v2) 28746 v.AddArg(v0) 28747 return true 28748 } 28749 } 28750 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 28751 b := v.Block 28752 typ := &b.Func.Config.Types 28753 // match: (Geq8U x y) 28754 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 28755 for { 28756 y := v.Args[1] 28757 x := v.Args[0] 28758 v.reset(OpARM64GreaterEqualU) 28759 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28760 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28761 v1.AddArg(x) 28762 v0.AddArg(v1) 28763 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28764 v2.AddArg(y) 28765 v0.AddArg(v2) 28766 v.AddArg(v0) 28767 return true 28768 } 28769 } 28770 func rewriteValueARM64_OpGetCallerPC_0(v *Value) bool { 28771 // match: (GetCallerPC) 28772 // result: (LoweredGetCallerPC) 28773 for { 28774 v.reset(OpARM64LoweredGetCallerPC) 28775 return true 28776 } 28777 } 28778 func rewriteValueARM64_OpGetCallerSP_0(v *Value) bool { 28779 // match: (GetCallerSP) 28780 // result: (LoweredGetCallerSP) 28781 for { 28782 v.reset(OpARM64LoweredGetCallerSP) 28783 return true 28784 } 28785 } 28786 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 28787 // match: (GetClosurePtr) 28788 // result: (LoweredGetClosurePtr) 28789 for { 28790 v.reset(OpARM64LoweredGetClosurePtr) 28791 return true 28792 } 28793 } 28794 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 28795 b := v.Block 28796 typ := &b.Func.Config.Types 28797 // match: (Greater16 x y) 28798 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 28799 for { 28800 y := v.Args[1] 28801 x := v.Args[0] 28802 v.reset(OpARM64GreaterThan) 28803 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28804 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 28805 v1.AddArg(x) 28806 v0.AddArg(v1) 28807 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 28808 v2.AddArg(y) 28809 v0.AddArg(v2) 28810 v.AddArg(v0) 28811 return true 28812 } 28813 } 28814 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 28815 b := v.Block 28816 typ := &b.Func.Config.Types 28817 // match: (Greater16U x y) 28818 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 28819 for { 28820 y := v.Args[1] 28821 x := v.Args[0] 28822 v.reset(OpARM64GreaterThanU) 28823 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28824 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28825 v1.AddArg(x) 28826 v0.AddArg(v1) 28827 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 28828 v2.AddArg(y) 28829 v0.AddArg(v2) 28830 v.AddArg(v0) 28831 return true 28832 } 28833 } 28834 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 28835 b := v.Block 28836 // match: (Greater32 x y) 28837 // result: (GreaterThan (CMPW x y)) 28838 for { 28839 y := v.Args[1] 28840 x := v.Args[0] 28841 v.reset(OpARM64GreaterThan) 28842 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28843 v0.AddArg(x) 28844 v0.AddArg(y) 28845 v.AddArg(v0) 28846 return true 28847 } 28848 } 28849 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 28850 b := v.Block 28851 // match: (Greater32F x y) 28852 // result: (GreaterThanF (FCMPS x y)) 28853 for { 28854 y := v.Args[1] 28855 x := v.Args[0] 28856 v.reset(OpARM64GreaterThanF) 28857 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 28858 v0.AddArg(x) 28859 v0.AddArg(y) 28860 v.AddArg(v0) 28861 return true 28862 } 28863 } 28864 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 28865 b := v.Block 28866 // match: (Greater32U x y) 28867 // result: (GreaterThanU (CMPW x y)) 28868 for { 28869 y := v.Args[1] 28870 x := v.Args[0] 28871 v.reset(OpARM64GreaterThanU) 28872 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28873 v0.AddArg(x) 28874 v0.AddArg(y) 28875 v.AddArg(v0) 28876 return true 28877 } 28878 } 28879 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 28880 b := v.Block 28881 // match: (Greater64 x y) 28882 // result: (GreaterThan (CMP x y)) 28883 for { 28884 y := v.Args[1] 28885 x := v.Args[0] 28886 v.reset(OpARM64GreaterThan) 28887 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 28888 v0.AddArg(x) 28889 v0.AddArg(y) 28890 v.AddArg(v0) 28891 return true 28892 } 28893 } 28894 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 28895 b := v.Block 28896 // match: (Greater64F x y) 28897 // result: (GreaterThanF (FCMPD x y)) 28898 for { 28899 y := v.Args[1] 28900 x := v.Args[0] 28901 v.reset(OpARM64GreaterThanF) 28902 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 28903 v0.AddArg(x) 28904 v0.AddArg(y) 28905 v.AddArg(v0) 28906 return true 28907 } 28908 } 28909 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 28910 b := v.Block 28911 // match: (Greater64U x y) 28912 // result: (GreaterThanU (CMP x y)) 28913 for { 28914 y := v.Args[1] 28915 x := v.Args[0] 28916 v.reset(OpARM64GreaterThanU) 28917 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 28918 v0.AddArg(x) 28919 v0.AddArg(y) 28920 v.AddArg(v0) 28921 return true 28922 } 28923 } 28924 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 28925 b := v.Block 28926 typ := &b.Func.Config.Types 28927 // match: (Greater8 x y) 28928 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 28929 for { 28930 y := v.Args[1] 28931 x := v.Args[0] 28932 v.reset(OpARM64GreaterThan) 28933 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28934 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 28935 v1.AddArg(x) 28936 v0.AddArg(v1) 28937 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 28938 v2.AddArg(y) 28939 v0.AddArg(v2) 28940 v.AddArg(v0) 28941 return true 28942 } 28943 } 28944 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 28945 b := v.Block 28946 typ := &b.Func.Config.Types 28947 // match: (Greater8U x y) 28948 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 28949 for { 28950 y := v.Args[1] 28951 x := v.Args[0] 28952 v.reset(OpARM64GreaterThanU) 28953 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 28954 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28955 v1.AddArg(x) 28956 v0.AddArg(v1) 28957 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 28958 v2.AddArg(y) 28959 v0.AddArg(v2) 28960 v.AddArg(v0) 28961 return true 28962 } 28963 } 28964 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 28965 b := v.Block 28966 typ := &b.Func.Config.Types 28967 // match: (Hmul32 x y) 28968 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 28969 for { 28970 y := v.Args[1] 28971 x := v.Args[0] 28972 v.reset(OpARM64SRAconst) 28973 v.AuxInt = 32 28974 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 28975 v0.AddArg(x) 28976 v0.AddArg(y) 28977 v.AddArg(v0) 28978 return true 28979 } 28980 } 28981 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 28982 b := v.Block 28983 typ := &b.Func.Config.Types 28984 // match: (Hmul32u x y) 28985 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 28986 for { 28987 y := v.Args[1] 28988 x := v.Args[0] 28989 v.reset(OpARM64SRAconst) 28990 v.AuxInt = 32 28991 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 28992 v0.AddArg(x) 28993 v0.AddArg(y) 28994 v.AddArg(v0) 28995 return true 28996 } 28997 } 28998 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 28999 // match: (Hmul64 x y) 29000 // result: (MULH x y) 29001 for { 29002 y := v.Args[1] 29003 x := v.Args[0] 29004 v.reset(OpARM64MULH) 29005 v.AddArg(x) 29006 v.AddArg(y) 29007 return true 29008 } 29009 } 29010 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 29011 // match: (Hmul64u x y) 29012 // result: (UMULH x y) 29013 for { 29014 y := v.Args[1] 29015 x := v.Args[0] 29016 v.reset(OpARM64UMULH) 29017 v.AddArg(x) 29018 v.AddArg(y) 29019 return true 29020 } 29021 } 29022 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 29023 // match: (InterCall [argwid] entry mem) 29024 // result: (CALLinter [argwid] entry mem) 29025 for { 29026 argwid := v.AuxInt 29027 mem := v.Args[1] 29028 entry := v.Args[0] 29029 v.reset(OpARM64CALLinter) 29030 v.AuxInt = argwid 29031 v.AddArg(entry) 29032 v.AddArg(mem) 29033 return true 29034 } 29035 } 29036 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 29037 b := v.Block 29038 // match: (IsInBounds idx len) 29039 // result: (LessThanU (CMP idx len)) 29040 for { 29041 len := v.Args[1] 29042 idx := v.Args[0] 29043 v.reset(OpARM64LessThanU) 29044 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 29045 v0.AddArg(idx) 29046 v0.AddArg(len) 29047 v.AddArg(v0) 29048 return true 29049 } 29050 } 29051 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 29052 b := v.Block 29053 // match: (IsNonNil ptr) 29054 // result: (NotEqual (CMPconst [0] ptr)) 29055 for { 29056 ptr := v.Args[0] 29057 v.reset(OpARM64NotEqual) 29058 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29059 v0.AuxInt = 0 29060 v0.AddArg(ptr) 29061 v.AddArg(v0) 29062 return true 29063 } 29064 } 29065 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 29066 b := v.Block 29067 // match: (IsSliceInBounds idx len) 29068 // result: (LessEqualU (CMP idx len)) 29069 for { 29070 len := v.Args[1] 29071 idx := v.Args[0] 29072 v.reset(OpARM64LessEqualU) 29073 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 29074 v0.AddArg(idx) 29075 v0.AddArg(len) 29076 v.AddArg(v0) 29077 return true 29078 } 29079 } 29080 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 29081 b := v.Block 29082 typ := &b.Func.Config.Types 29083 // match: (Leq16 x y) 29084 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 29085 for { 29086 y := v.Args[1] 29087 x := v.Args[0] 29088 v.reset(OpARM64LessEqual) 29089 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29090 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 29091 v1.AddArg(x) 29092 v0.AddArg(v1) 29093 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 29094 v2.AddArg(y) 29095 v0.AddArg(v2) 29096 v.AddArg(v0) 29097 return true 29098 } 29099 } 29100 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 29101 b := v.Block 29102 typ := &b.Func.Config.Types 29103 // match: (Leq16U x y) 29104 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 29105 for { 29106 y := v.Args[1] 29107 x := v.Args[0] 29108 v.reset(OpARM64LessEqualU) 29109 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29110 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 29111 v1.AddArg(x) 29112 v0.AddArg(v1) 29113 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 29114 v2.AddArg(y) 29115 v0.AddArg(v2) 29116 v.AddArg(v0) 29117 return true 29118 } 29119 } 29120 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 29121 b := v.Block 29122 // match: (Leq32 x y) 29123 // result: (LessEqual (CMPW x y)) 29124 for { 29125 y := v.Args[1] 29126 x := v.Args[0] 29127 v.reset(OpARM64LessEqual) 29128 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29129 v0.AddArg(x) 29130 v0.AddArg(y) 29131 v.AddArg(v0) 29132 return true 29133 } 29134 } 29135 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 29136 b := v.Block 29137 // match: (Leq32F x y) 29138 // result: (LessEqualF (FCMPS x y)) 29139 for { 29140 y := v.Args[1] 29141 x := v.Args[0] 29142 v.reset(OpARM64LessEqualF) 29143 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 29144 v0.AddArg(x) 29145 v0.AddArg(y) 29146 v.AddArg(v0) 29147 return true 29148 } 29149 } 29150 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 29151 b := v.Block 29152 // match: (Leq32U x y) 29153 // result: (LessEqualU (CMPW x y)) 29154 for { 29155 y := v.Args[1] 29156 x := v.Args[0] 29157 v.reset(OpARM64LessEqualU) 29158 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29159 v0.AddArg(x) 29160 v0.AddArg(y) 29161 v.AddArg(v0) 29162 return true 29163 } 29164 } 29165 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 29166 b := v.Block 29167 // match: (Leq64 x y) 29168 // result: (LessEqual (CMP x y)) 29169 for { 29170 y := v.Args[1] 29171 x := v.Args[0] 29172 v.reset(OpARM64LessEqual) 29173 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 29174 v0.AddArg(x) 29175 v0.AddArg(y) 29176 v.AddArg(v0) 29177 return true 29178 } 29179 } 29180 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 29181 b := v.Block 29182 // match: (Leq64F x y) 29183 // result: (LessEqualF (FCMPD x y)) 29184 for { 29185 y := v.Args[1] 29186 x := v.Args[0] 29187 v.reset(OpARM64LessEqualF) 29188 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 29189 v0.AddArg(x) 29190 v0.AddArg(y) 29191 v.AddArg(v0) 29192 return true 29193 } 29194 } 29195 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 29196 b := v.Block 29197 // match: (Leq64U x y) 29198 // result: (LessEqualU (CMP x y)) 29199 for { 29200 y := v.Args[1] 29201 x := v.Args[0] 29202 v.reset(OpARM64LessEqualU) 29203 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 29204 v0.AddArg(x) 29205 v0.AddArg(y) 29206 v.AddArg(v0) 29207 return true 29208 } 29209 } 29210 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 29211 b := v.Block 29212 typ := &b.Func.Config.Types 29213 // match: (Leq8 x y) 29214 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 29215 for { 29216 y := v.Args[1] 29217 x := v.Args[0] 29218 v.reset(OpARM64LessEqual) 29219 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29220 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 29221 v1.AddArg(x) 29222 v0.AddArg(v1) 29223 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 29224 v2.AddArg(y) 29225 v0.AddArg(v2) 29226 v.AddArg(v0) 29227 return true 29228 } 29229 } 29230 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 29231 b := v.Block 29232 typ := &b.Func.Config.Types 29233 // match: (Leq8U x y) 29234 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 29235 for { 29236 y := v.Args[1] 29237 x := v.Args[0] 29238 v.reset(OpARM64LessEqualU) 29239 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29240 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 29241 v1.AddArg(x) 29242 v0.AddArg(v1) 29243 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 29244 v2.AddArg(y) 29245 v0.AddArg(v2) 29246 v.AddArg(v0) 29247 return true 29248 } 29249 } 29250 func rewriteValueARM64_OpLess16_0(v *Value) bool { 29251 b := v.Block 29252 typ := &b.Func.Config.Types 29253 // match: (Less16 x y) 29254 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 29255 for { 29256 y := v.Args[1] 29257 x := v.Args[0] 29258 v.reset(OpARM64LessThan) 29259 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29260 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 29261 v1.AddArg(x) 29262 v0.AddArg(v1) 29263 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 29264 v2.AddArg(y) 29265 v0.AddArg(v2) 29266 v.AddArg(v0) 29267 return true 29268 } 29269 } 29270 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 29271 b := v.Block 29272 typ := &b.Func.Config.Types 29273 // match: (Less16U x y) 29274 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 29275 for { 29276 y := v.Args[1] 29277 x := v.Args[0] 29278 v.reset(OpARM64LessThanU) 29279 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29280 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 29281 v1.AddArg(x) 29282 v0.AddArg(v1) 29283 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 29284 v2.AddArg(y) 29285 v0.AddArg(v2) 29286 v.AddArg(v0) 29287 return true 29288 } 29289 } 29290 func rewriteValueARM64_OpLess32_0(v *Value) bool { 29291 b := v.Block 29292 // match: (Less32 x y) 29293 // result: (LessThan (CMPW x y)) 29294 for { 29295 y := v.Args[1] 29296 x := v.Args[0] 29297 v.reset(OpARM64LessThan) 29298 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29299 v0.AddArg(x) 29300 v0.AddArg(y) 29301 v.AddArg(v0) 29302 return true 29303 } 29304 } 29305 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 29306 b := v.Block 29307 // match: (Less32F x y) 29308 // result: (LessThanF (FCMPS x y)) 29309 for { 29310 y := v.Args[1] 29311 x := v.Args[0] 29312 v.reset(OpARM64LessThanF) 29313 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 29314 v0.AddArg(x) 29315 v0.AddArg(y) 29316 v.AddArg(v0) 29317 return true 29318 } 29319 } 29320 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 29321 b := v.Block 29322 // match: (Less32U x y) 29323 // result: (LessThanU (CMPW x y)) 29324 for { 29325 y := v.Args[1] 29326 x := v.Args[0] 29327 v.reset(OpARM64LessThanU) 29328 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29329 v0.AddArg(x) 29330 v0.AddArg(y) 29331 v.AddArg(v0) 29332 return true 29333 } 29334 } 29335 func rewriteValueARM64_OpLess64_0(v *Value) bool { 29336 b := v.Block 29337 // match: (Less64 x y) 29338 // result: (LessThan (CMP x y)) 29339 for { 29340 y := v.Args[1] 29341 x := v.Args[0] 29342 v.reset(OpARM64LessThan) 29343 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 29344 v0.AddArg(x) 29345 v0.AddArg(y) 29346 v.AddArg(v0) 29347 return true 29348 } 29349 } 29350 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 29351 b := v.Block 29352 // match: (Less64F x y) 29353 // result: (LessThanF (FCMPD x y)) 29354 for { 29355 y := v.Args[1] 29356 x := v.Args[0] 29357 v.reset(OpARM64LessThanF) 29358 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 29359 v0.AddArg(x) 29360 v0.AddArg(y) 29361 v.AddArg(v0) 29362 return true 29363 } 29364 } 29365 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 29366 b := v.Block 29367 // match: (Less64U x y) 29368 // result: (LessThanU (CMP x y)) 29369 for { 29370 y := v.Args[1] 29371 x := v.Args[0] 29372 v.reset(OpARM64LessThanU) 29373 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 29374 v0.AddArg(x) 29375 v0.AddArg(y) 29376 v.AddArg(v0) 29377 return true 29378 } 29379 } 29380 func rewriteValueARM64_OpLess8_0(v *Value) bool { 29381 b := v.Block 29382 typ := &b.Func.Config.Types 29383 // match: (Less8 x y) 29384 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 29385 for { 29386 y := v.Args[1] 29387 x := v.Args[0] 29388 v.reset(OpARM64LessThan) 29389 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29390 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 29391 v1.AddArg(x) 29392 v0.AddArg(v1) 29393 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 29394 v2.AddArg(y) 29395 v0.AddArg(v2) 29396 v.AddArg(v0) 29397 return true 29398 } 29399 } 29400 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 29401 b := v.Block 29402 typ := &b.Func.Config.Types 29403 // match: (Less8U x y) 29404 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 29405 for { 29406 y := v.Args[1] 29407 x := v.Args[0] 29408 v.reset(OpARM64LessThanU) 29409 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 29410 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 29411 v1.AddArg(x) 29412 v0.AddArg(v1) 29413 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 29414 v2.AddArg(y) 29415 v0.AddArg(v2) 29416 v.AddArg(v0) 29417 return true 29418 } 29419 } 29420 func rewriteValueARM64_OpLoad_0(v *Value) bool { 29421 // match: (Load <t> ptr mem) 29422 // cond: t.IsBoolean() 29423 // result: (MOVBUload ptr mem) 29424 for { 29425 t := v.Type 29426 mem := v.Args[1] 29427 ptr := v.Args[0] 29428 if !(t.IsBoolean()) { 29429 break 29430 } 29431 v.reset(OpARM64MOVBUload) 29432 v.AddArg(ptr) 29433 v.AddArg(mem) 29434 return true 29435 } 29436 // match: (Load <t> ptr mem) 29437 // cond: (is8BitInt(t) && isSigned(t)) 29438 // result: (MOVBload ptr mem) 29439 for { 29440 t := v.Type 29441 mem := v.Args[1] 29442 ptr := v.Args[0] 29443 if !(is8BitInt(t) && isSigned(t)) { 29444 break 29445 } 29446 v.reset(OpARM64MOVBload) 29447 v.AddArg(ptr) 29448 v.AddArg(mem) 29449 return true 29450 } 29451 // match: (Load <t> ptr mem) 29452 // cond: (is8BitInt(t) && !isSigned(t)) 29453 // result: (MOVBUload ptr mem) 29454 for { 29455 t := v.Type 29456 mem := v.Args[1] 29457 ptr := v.Args[0] 29458 if !(is8BitInt(t) && !isSigned(t)) { 29459 break 29460 } 29461 v.reset(OpARM64MOVBUload) 29462 v.AddArg(ptr) 29463 v.AddArg(mem) 29464 return true 29465 } 29466 // match: (Load <t> ptr mem) 29467 // cond: (is16BitInt(t) && isSigned(t)) 29468 // result: (MOVHload ptr mem) 29469 for { 29470 t := v.Type 29471 mem := v.Args[1] 29472 ptr := v.Args[0] 29473 if !(is16BitInt(t) && isSigned(t)) { 29474 break 29475 } 29476 v.reset(OpARM64MOVHload) 29477 v.AddArg(ptr) 29478 v.AddArg(mem) 29479 return true 29480 } 29481 // match: (Load <t> ptr mem) 29482 // cond: (is16BitInt(t) && !isSigned(t)) 29483 // result: (MOVHUload ptr mem) 29484 for { 29485 t := v.Type 29486 mem := v.Args[1] 29487 ptr := v.Args[0] 29488 if !(is16BitInt(t) && !isSigned(t)) { 29489 break 29490 } 29491 v.reset(OpARM64MOVHUload) 29492 v.AddArg(ptr) 29493 v.AddArg(mem) 29494 return true 29495 } 29496 // match: (Load <t> ptr mem) 29497 // cond: (is32BitInt(t) && isSigned(t)) 29498 // result: (MOVWload ptr mem) 29499 for { 29500 t := v.Type 29501 mem := v.Args[1] 29502 ptr := v.Args[0] 29503 if !(is32BitInt(t) && isSigned(t)) { 29504 break 29505 } 29506 v.reset(OpARM64MOVWload) 29507 v.AddArg(ptr) 29508 v.AddArg(mem) 29509 return true 29510 } 29511 // match: (Load <t> ptr mem) 29512 // cond: (is32BitInt(t) && !isSigned(t)) 29513 // result: (MOVWUload ptr mem) 29514 for { 29515 t := v.Type 29516 mem := v.Args[1] 29517 ptr := v.Args[0] 29518 if !(is32BitInt(t) && !isSigned(t)) { 29519 break 29520 } 29521 v.reset(OpARM64MOVWUload) 29522 v.AddArg(ptr) 29523 v.AddArg(mem) 29524 return true 29525 } 29526 // match: (Load <t> ptr mem) 29527 // cond: (is64BitInt(t) || isPtr(t)) 29528 // result: (MOVDload ptr mem) 29529 for { 29530 t := v.Type 29531 mem := v.Args[1] 29532 ptr := v.Args[0] 29533 if !(is64BitInt(t) || isPtr(t)) { 29534 break 29535 } 29536 v.reset(OpARM64MOVDload) 29537 v.AddArg(ptr) 29538 v.AddArg(mem) 29539 return true 29540 } 29541 // match: (Load <t> ptr mem) 29542 // cond: is32BitFloat(t) 29543 // result: (FMOVSload ptr mem) 29544 for { 29545 t := v.Type 29546 mem := v.Args[1] 29547 ptr := v.Args[0] 29548 if !(is32BitFloat(t)) { 29549 break 29550 } 29551 v.reset(OpARM64FMOVSload) 29552 v.AddArg(ptr) 29553 v.AddArg(mem) 29554 return true 29555 } 29556 // match: (Load <t> ptr mem) 29557 // cond: is64BitFloat(t) 29558 // result: (FMOVDload ptr mem) 29559 for { 29560 t := v.Type 29561 mem := v.Args[1] 29562 ptr := v.Args[0] 29563 if !(is64BitFloat(t)) { 29564 break 29565 } 29566 v.reset(OpARM64FMOVDload) 29567 v.AddArg(ptr) 29568 v.AddArg(mem) 29569 return true 29570 } 29571 return false 29572 } 29573 func rewriteValueARM64_OpLocalAddr_0(v *Value) bool { 29574 // match: (LocalAddr {sym} base _) 29575 // result: (MOVDaddr {sym} base) 29576 for { 29577 sym := v.Aux 29578 _ = v.Args[1] 29579 base := v.Args[0] 29580 v.reset(OpARM64MOVDaddr) 29581 v.Aux = sym 29582 v.AddArg(base) 29583 return true 29584 } 29585 } 29586 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 29587 b := v.Block 29588 typ := &b.Func.Config.Types 29589 // match: (Lsh16x16 <t> x y) 29590 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 29591 for { 29592 t := v.Type 29593 y := v.Args[1] 29594 x := v.Args[0] 29595 v.reset(OpARM64CSEL) 29596 v.Aux = OpARM64LessThanU 29597 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29598 v0.AddArg(x) 29599 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29600 v1.AddArg(y) 29601 v0.AddArg(v1) 29602 v.AddArg(v0) 29603 v2 := b.NewValue0(v.Pos, OpConst64, t) 29604 v2.AuxInt = 0 29605 v.AddArg(v2) 29606 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29607 v3.AuxInt = 64 29608 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29609 v4.AddArg(y) 29610 v3.AddArg(v4) 29611 v.AddArg(v3) 29612 return true 29613 } 29614 } 29615 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 29616 b := v.Block 29617 typ := &b.Func.Config.Types 29618 // match: (Lsh16x32 <t> x y) 29619 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 29620 for { 29621 t := v.Type 29622 y := v.Args[1] 29623 x := v.Args[0] 29624 v.reset(OpARM64CSEL) 29625 v.Aux = OpARM64LessThanU 29626 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29627 v0.AddArg(x) 29628 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29629 v1.AddArg(y) 29630 v0.AddArg(v1) 29631 v.AddArg(v0) 29632 v2 := b.NewValue0(v.Pos, OpConst64, t) 29633 v2.AuxInt = 0 29634 v.AddArg(v2) 29635 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29636 v3.AuxInt = 64 29637 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29638 v4.AddArg(y) 29639 v3.AddArg(v4) 29640 v.AddArg(v3) 29641 return true 29642 } 29643 } 29644 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 29645 b := v.Block 29646 // match: (Lsh16x64 <t> x y) 29647 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 29648 for { 29649 t := v.Type 29650 y := v.Args[1] 29651 x := v.Args[0] 29652 v.reset(OpARM64CSEL) 29653 v.Aux = OpARM64LessThanU 29654 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29655 v0.AddArg(x) 29656 v0.AddArg(y) 29657 v.AddArg(v0) 29658 v1 := b.NewValue0(v.Pos, OpConst64, t) 29659 v1.AuxInt = 0 29660 v.AddArg(v1) 29661 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29662 v2.AuxInt = 64 29663 v2.AddArg(y) 29664 v.AddArg(v2) 29665 return true 29666 } 29667 } 29668 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 29669 b := v.Block 29670 typ := &b.Func.Config.Types 29671 // match: (Lsh16x8 <t> x y) 29672 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 29673 for { 29674 t := v.Type 29675 y := v.Args[1] 29676 x := v.Args[0] 29677 v.reset(OpARM64CSEL) 29678 v.Aux = OpARM64LessThanU 29679 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29680 v0.AddArg(x) 29681 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 29682 v1.AddArg(y) 29683 v0.AddArg(v1) 29684 v.AddArg(v0) 29685 v2 := b.NewValue0(v.Pos, OpConst64, t) 29686 v2.AuxInt = 0 29687 v.AddArg(v2) 29688 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29689 v3.AuxInt = 64 29690 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 29691 v4.AddArg(y) 29692 v3.AddArg(v4) 29693 v.AddArg(v3) 29694 return true 29695 } 29696 } 29697 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 29698 b := v.Block 29699 typ := &b.Func.Config.Types 29700 // match: (Lsh32x16 <t> x y) 29701 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 29702 for { 29703 t := v.Type 29704 y := v.Args[1] 29705 x := v.Args[0] 29706 v.reset(OpARM64CSEL) 29707 v.Aux = OpARM64LessThanU 29708 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29709 v0.AddArg(x) 29710 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29711 v1.AddArg(y) 29712 v0.AddArg(v1) 29713 v.AddArg(v0) 29714 v2 := b.NewValue0(v.Pos, OpConst64, t) 29715 v2.AuxInt = 0 29716 v.AddArg(v2) 29717 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29718 v3.AuxInt = 64 29719 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29720 v4.AddArg(y) 29721 v3.AddArg(v4) 29722 v.AddArg(v3) 29723 return true 29724 } 29725 } 29726 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 29727 b := v.Block 29728 typ := &b.Func.Config.Types 29729 // match: (Lsh32x32 <t> x y) 29730 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 29731 for { 29732 t := v.Type 29733 y := v.Args[1] 29734 x := v.Args[0] 29735 v.reset(OpARM64CSEL) 29736 v.Aux = OpARM64LessThanU 29737 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29738 v0.AddArg(x) 29739 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29740 v1.AddArg(y) 29741 v0.AddArg(v1) 29742 v.AddArg(v0) 29743 v2 := b.NewValue0(v.Pos, OpConst64, t) 29744 v2.AuxInt = 0 29745 v.AddArg(v2) 29746 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29747 v3.AuxInt = 64 29748 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29749 v4.AddArg(y) 29750 v3.AddArg(v4) 29751 v.AddArg(v3) 29752 return true 29753 } 29754 } 29755 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 29756 b := v.Block 29757 // match: (Lsh32x64 <t> x y) 29758 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 29759 for { 29760 t := v.Type 29761 y := v.Args[1] 29762 x := v.Args[0] 29763 v.reset(OpARM64CSEL) 29764 v.Aux = OpARM64LessThanU 29765 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29766 v0.AddArg(x) 29767 v0.AddArg(y) 29768 v.AddArg(v0) 29769 v1 := b.NewValue0(v.Pos, OpConst64, t) 29770 v1.AuxInt = 0 29771 v.AddArg(v1) 29772 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29773 v2.AuxInt = 64 29774 v2.AddArg(y) 29775 v.AddArg(v2) 29776 return true 29777 } 29778 } 29779 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 29780 b := v.Block 29781 typ := &b.Func.Config.Types 29782 // match: (Lsh32x8 <t> x y) 29783 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 29784 for { 29785 t := v.Type 29786 y := v.Args[1] 29787 x := v.Args[0] 29788 v.reset(OpARM64CSEL) 29789 v.Aux = OpARM64LessThanU 29790 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29791 v0.AddArg(x) 29792 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 29793 v1.AddArg(y) 29794 v0.AddArg(v1) 29795 v.AddArg(v0) 29796 v2 := b.NewValue0(v.Pos, OpConst64, t) 29797 v2.AuxInt = 0 29798 v.AddArg(v2) 29799 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29800 v3.AuxInt = 64 29801 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 29802 v4.AddArg(y) 29803 v3.AddArg(v4) 29804 v.AddArg(v3) 29805 return true 29806 } 29807 } 29808 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 29809 b := v.Block 29810 typ := &b.Func.Config.Types 29811 // match: (Lsh64x16 <t> x y) 29812 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 29813 for { 29814 t := v.Type 29815 y := v.Args[1] 29816 x := v.Args[0] 29817 v.reset(OpARM64CSEL) 29818 v.Aux = OpARM64LessThanU 29819 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29820 v0.AddArg(x) 29821 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29822 v1.AddArg(y) 29823 v0.AddArg(v1) 29824 v.AddArg(v0) 29825 v2 := b.NewValue0(v.Pos, OpConst64, t) 29826 v2.AuxInt = 0 29827 v.AddArg(v2) 29828 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29829 v3.AuxInt = 64 29830 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29831 v4.AddArg(y) 29832 v3.AddArg(v4) 29833 v.AddArg(v3) 29834 return true 29835 } 29836 } 29837 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 29838 b := v.Block 29839 typ := &b.Func.Config.Types 29840 // match: (Lsh64x32 <t> x y) 29841 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 29842 for { 29843 t := v.Type 29844 y := v.Args[1] 29845 x := v.Args[0] 29846 v.reset(OpARM64CSEL) 29847 v.Aux = OpARM64LessThanU 29848 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29849 v0.AddArg(x) 29850 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29851 v1.AddArg(y) 29852 v0.AddArg(v1) 29853 v.AddArg(v0) 29854 v2 := b.NewValue0(v.Pos, OpConst64, t) 29855 v2.AuxInt = 0 29856 v.AddArg(v2) 29857 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29858 v3.AuxInt = 64 29859 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29860 v4.AddArg(y) 29861 v3.AddArg(v4) 29862 v.AddArg(v3) 29863 return true 29864 } 29865 } 29866 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 29867 b := v.Block 29868 // match: (Lsh64x64 <t> x y) 29869 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 29870 for { 29871 t := v.Type 29872 y := v.Args[1] 29873 x := v.Args[0] 29874 v.reset(OpARM64CSEL) 29875 v.Aux = OpARM64LessThanU 29876 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29877 v0.AddArg(x) 29878 v0.AddArg(y) 29879 v.AddArg(v0) 29880 v1 := b.NewValue0(v.Pos, OpConst64, t) 29881 v1.AuxInt = 0 29882 v.AddArg(v1) 29883 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29884 v2.AuxInt = 64 29885 v2.AddArg(y) 29886 v.AddArg(v2) 29887 return true 29888 } 29889 } 29890 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 29891 b := v.Block 29892 typ := &b.Func.Config.Types 29893 // match: (Lsh64x8 <t> x y) 29894 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 29895 for { 29896 t := v.Type 29897 y := v.Args[1] 29898 x := v.Args[0] 29899 v.reset(OpARM64CSEL) 29900 v.Aux = OpARM64LessThanU 29901 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29902 v0.AddArg(x) 29903 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 29904 v1.AddArg(y) 29905 v0.AddArg(v1) 29906 v.AddArg(v0) 29907 v2 := b.NewValue0(v.Pos, OpConst64, t) 29908 v2.AuxInt = 0 29909 v.AddArg(v2) 29910 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29911 v3.AuxInt = 64 29912 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 29913 v4.AddArg(y) 29914 v3.AddArg(v4) 29915 v.AddArg(v3) 29916 return true 29917 } 29918 } 29919 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 29920 b := v.Block 29921 typ := &b.Func.Config.Types 29922 // match: (Lsh8x16 <t> x y) 29923 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 29924 for { 29925 t := v.Type 29926 y := v.Args[1] 29927 x := v.Args[0] 29928 v.reset(OpARM64CSEL) 29929 v.Aux = OpARM64LessThanU 29930 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29931 v0.AddArg(x) 29932 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29933 v1.AddArg(y) 29934 v0.AddArg(v1) 29935 v.AddArg(v0) 29936 v2 := b.NewValue0(v.Pos, OpConst64, t) 29937 v2.AuxInt = 0 29938 v.AddArg(v2) 29939 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29940 v3.AuxInt = 64 29941 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 29942 v4.AddArg(y) 29943 v3.AddArg(v4) 29944 v.AddArg(v3) 29945 return true 29946 } 29947 } 29948 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 29949 b := v.Block 29950 typ := &b.Func.Config.Types 29951 // match: (Lsh8x32 <t> x y) 29952 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 29953 for { 29954 t := v.Type 29955 y := v.Args[1] 29956 x := v.Args[0] 29957 v.reset(OpARM64CSEL) 29958 v.Aux = OpARM64LessThanU 29959 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29960 v0.AddArg(x) 29961 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29962 v1.AddArg(y) 29963 v0.AddArg(v1) 29964 v.AddArg(v0) 29965 v2 := b.NewValue0(v.Pos, OpConst64, t) 29966 v2.AuxInt = 0 29967 v.AddArg(v2) 29968 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29969 v3.AuxInt = 64 29970 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 29971 v4.AddArg(y) 29972 v3.AddArg(v4) 29973 v.AddArg(v3) 29974 return true 29975 } 29976 } 29977 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 29978 b := v.Block 29979 // match: (Lsh8x64 <t> x y) 29980 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 29981 for { 29982 t := v.Type 29983 y := v.Args[1] 29984 x := v.Args[0] 29985 v.reset(OpARM64CSEL) 29986 v.Aux = OpARM64LessThanU 29987 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 29988 v0.AddArg(x) 29989 v0.AddArg(y) 29990 v.AddArg(v0) 29991 v1 := b.NewValue0(v.Pos, OpConst64, t) 29992 v1.AuxInt = 0 29993 v.AddArg(v1) 29994 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 29995 v2.AuxInt = 64 29996 v2.AddArg(y) 29997 v.AddArg(v2) 29998 return true 29999 } 30000 } 30001 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 30002 b := v.Block 30003 typ := &b.Func.Config.Types 30004 // match: (Lsh8x8 <t> x y) 30005 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 30006 for { 30007 t := v.Type 30008 y := v.Args[1] 30009 x := v.Args[0] 30010 v.reset(OpARM64CSEL) 30011 v.Aux = OpARM64LessThanU 30012 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 30013 v0.AddArg(x) 30014 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 30015 v1.AddArg(y) 30016 v0.AddArg(v1) 30017 v.AddArg(v0) 30018 v2 := b.NewValue0(v.Pos, OpConst64, t) 30019 v2.AuxInt = 0 30020 v.AddArg(v2) 30021 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 30022 v3.AuxInt = 64 30023 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 30024 v4.AddArg(y) 30025 v3.AddArg(v4) 30026 v.AddArg(v3) 30027 return true 30028 } 30029 } 30030 func rewriteValueARM64_OpMod16_0(v *Value) bool { 30031 b := v.Block 30032 typ := &b.Func.Config.Types 30033 // match: (Mod16 x y) 30034 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 30035 for { 30036 y := v.Args[1] 30037 x := v.Args[0] 30038 v.reset(OpARM64MODW) 30039 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 30040 v0.AddArg(x) 30041 v.AddArg(v0) 30042 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 30043 v1.AddArg(y) 30044 v.AddArg(v1) 30045 return true 30046 } 30047 } 30048 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 30049 b := v.Block 30050 typ := &b.Func.Config.Types 30051 // match: (Mod16u x y) 30052 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 30053 for { 30054 y := v.Args[1] 30055 x := v.Args[0] 30056 v.reset(OpARM64UMODW) 30057 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 30058 v0.AddArg(x) 30059 v.AddArg(v0) 30060 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 30061 v1.AddArg(y) 30062 v.AddArg(v1) 30063 return true 30064 } 30065 } 30066 func rewriteValueARM64_OpMod32_0(v *Value) bool { 30067 // match: (Mod32 x y) 30068 // result: (MODW x y) 30069 for { 30070 y := v.Args[1] 30071 x := v.Args[0] 30072 v.reset(OpARM64MODW) 30073 v.AddArg(x) 30074 v.AddArg(y) 30075 return true 30076 } 30077 } 30078 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 30079 // match: (Mod32u x y) 30080 // result: (UMODW x y) 30081 for { 30082 y := v.Args[1] 30083 x := v.Args[0] 30084 v.reset(OpARM64UMODW) 30085 v.AddArg(x) 30086 v.AddArg(y) 30087 return true 30088 } 30089 } 30090 func rewriteValueARM64_OpMod64_0(v *Value) bool { 30091 // match: (Mod64 x y) 30092 // result: (MOD x y) 30093 for { 30094 y := v.Args[1] 30095 x := v.Args[0] 30096 v.reset(OpARM64MOD) 30097 v.AddArg(x) 30098 v.AddArg(y) 30099 return true 30100 } 30101 } 30102 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 30103 // match: (Mod64u x y) 30104 // result: (UMOD x y) 30105 for { 30106 y := v.Args[1] 30107 x := v.Args[0] 30108 v.reset(OpARM64UMOD) 30109 v.AddArg(x) 30110 v.AddArg(y) 30111 return true 30112 } 30113 } 30114 func rewriteValueARM64_OpMod8_0(v *Value) bool { 30115 b := v.Block 30116 typ := &b.Func.Config.Types 30117 // match: (Mod8 x y) 30118 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 30119 for { 30120 y := v.Args[1] 30121 x := v.Args[0] 30122 v.reset(OpARM64MODW) 30123 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 30124 v0.AddArg(x) 30125 v.AddArg(v0) 30126 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 30127 v1.AddArg(y) 30128 v.AddArg(v1) 30129 return true 30130 } 30131 } 30132 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 30133 b := v.Block 30134 typ := &b.Func.Config.Types 30135 // match: (Mod8u x y) 30136 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 30137 for { 30138 y := v.Args[1] 30139 x := v.Args[0] 30140 v.reset(OpARM64UMODW) 30141 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 30142 v0.AddArg(x) 30143 v.AddArg(v0) 30144 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 30145 v1.AddArg(y) 30146 v.AddArg(v1) 30147 return true 30148 } 30149 } 30150 func rewriteValueARM64_OpMove_0(v *Value) bool { 30151 b := v.Block 30152 typ := &b.Func.Config.Types 30153 // match: (Move [0] _ _ mem) 30154 // result: mem 30155 for { 30156 if v.AuxInt != 0 { 30157 break 30158 } 30159 mem := v.Args[2] 30160 v.reset(OpCopy) 30161 v.Type = mem.Type 30162 v.AddArg(mem) 30163 return true 30164 } 30165 // match: (Move [1] dst src mem) 30166 // result: (MOVBstore dst (MOVBUload src mem) mem) 30167 for { 30168 if v.AuxInt != 1 { 30169 break 30170 } 30171 mem := v.Args[2] 30172 dst := v.Args[0] 30173 src := v.Args[1] 30174 v.reset(OpARM64MOVBstore) 30175 v.AddArg(dst) 30176 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 30177 v0.AddArg(src) 30178 v0.AddArg(mem) 30179 v.AddArg(v0) 30180 v.AddArg(mem) 30181 return true 30182 } 30183 // match: (Move [2] dst src mem) 30184 // result: (MOVHstore dst (MOVHUload src mem) mem) 30185 for { 30186 if v.AuxInt != 2 { 30187 break 30188 } 30189 mem := v.Args[2] 30190 dst := v.Args[0] 30191 src := v.Args[1] 30192 v.reset(OpARM64MOVHstore) 30193 v.AddArg(dst) 30194 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 30195 v0.AddArg(src) 30196 v0.AddArg(mem) 30197 v.AddArg(v0) 30198 v.AddArg(mem) 30199 return true 30200 } 30201 // match: (Move [4] dst src mem) 30202 // result: (MOVWstore dst (MOVWUload src mem) mem) 30203 for { 30204 if v.AuxInt != 4 { 30205 break 30206 } 30207 mem := v.Args[2] 30208 dst := v.Args[0] 30209 src := v.Args[1] 30210 v.reset(OpARM64MOVWstore) 30211 v.AddArg(dst) 30212 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 30213 v0.AddArg(src) 30214 v0.AddArg(mem) 30215 v.AddArg(v0) 30216 v.AddArg(mem) 30217 return true 30218 } 30219 // match: (Move [8] dst src mem) 30220 // result: (MOVDstore dst (MOVDload src mem) mem) 30221 for { 30222 if v.AuxInt != 8 { 30223 break 30224 } 30225 mem := v.Args[2] 30226 dst := v.Args[0] 30227 src := v.Args[1] 30228 v.reset(OpARM64MOVDstore) 30229 v.AddArg(dst) 30230 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30231 v0.AddArg(src) 30232 v0.AddArg(mem) 30233 v.AddArg(v0) 30234 v.AddArg(mem) 30235 return true 30236 } 30237 // match: (Move [3] dst src mem) 30238 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 30239 for { 30240 if v.AuxInt != 3 { 30241 break 30242 } 30243 mem := v.Args[2] 30244 dst := v.Args[0] 30245 src := v.Args[1] 30246 v.reset(OpARM64MOVBstore) 30247 v.AuxInt = 2 30248 v.AddArg(dst) 30249 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 30250 v0.AuxInt = 2 30251 v0.AddArg(src) 30252 v0.AddArg(mem) 30253 v.AddArg(v0) 30254 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 30255 v1.AddArg(dst) 30256 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 30257 v2.AddArg(src) 30258 v2.AddArg(mem) 30259 v1.AddArg(v2) 30260 v1.AddArg(mem) 30261 v.AddArg(v1) 30262 return true 30263 } 30264 // match: (Move [5] dst src mem) 30265 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 30266 for { 30267 if v.AuxInt != 5 { 30268 break 30269 } 30270 mem := v.Args[2] 30271 dst := v.Args[0] 30272 src := v.Args[1] 30273 v.reset(OpARM64MOVBstore) 30274 v.AuxInt = 4 30275 v.AddArg(dst) 30276 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 30277 v0.AuxInt = 4 30278 v0.AddArg(src) 30279 v0.AddArg(mem) 30280 v.AddArg(v0) 30281 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 30282 v1.AddArg(dst) 30283 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 30284 v2.AddArg(src) 30285 v2.AddArg(mem) 30286 v1.AddArg(v2) 30287 v1.AddArg(mem) 30288 v.AddArg(v1) 30289 return true 30290 } 30291 // match: (Move [6] dst src mem) 30292 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 30293 for { 30294 if v.AuxInt != 6 { 30295 break 30296 } 30297 mem := v.Args[2] 30298 dst := v.Args[0] 30299 src := v.Args[1] 30300 v.reset(OpARM64MOVHstore) 30301 v.AuxInt = 4 30302 v.AddArg(dst) 30303 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 30304 v0.AuxInt = 4 30305 v0.AddArg(src) 30306 v0.AddArg(mem) 30307 v.AddArg(v0) 30308 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 30309 v1.AddArg(dst) 30310 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 30311 v2.AddArg(src) 30312 v2.AddArg(mem) 30313 v1.AddArg(v2) 30314 v1.AddArg(mem) 30315 v.AddArg(v1) 30316 return true 30317 } 30318 // match: (Move [7] dst src mem) 30319 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 30320 for { 30321 if v.AuxInt != 7 { 30322 break 30323 } 30324 mem := v.Args[2] 30325 dst := v.Args[0] 30326 src := v.Args[1] 30327 v.reset(OpARM64MOVBstore) 30328 v.AuxInt = 6 30329 v.AddArg(dst) 30330 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 30331 v0.AuxInt = 6 30332 v0.AddArg(src) 30333 v0.AddArg(mem) 30334 v.AddArg(v0) 30335 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 30336 v1.AuxInt = 4 30337 v1.AddArg(dst) 30338 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 30339 v2.AuxInt = 4 30340 v2.AddArg(src) 30341 v2.AddArg(mem) 30342 v1.AddArg(v2) 30343 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 30344 v3.AddArg(dst) 30345 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 30346 v4.AddArg(src) 30347 v4.AddArg(mem) 30348 v3.AddArg(v4) 30349 v3.AddArg(mem) 30350 v1.AddArg(v3) 30351 v.AddArg(v1) 30352 return true 30353 } 30354 // match: (Move [12] dst src mem) 30355 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 30356 for { 30357 if v.AuxInt != 12 { 30358 break 30359 } 30360 mem := v.Args[2] 30361 dst := v.Args[0] 30362 src := v.Args[1] 30363 v.reset(OpARM64MOVWstore) 30364 v.AuxInt = 8 30365 v.AddArg(dst) 30366 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 30367 v0.AuxInt = 8 30368 v0.AddArg(src) 30369 v0.AddArg(mem) 30370 v.AddArg(v0) 30371 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 30372 v1.AddArg(dst) 30373 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30374 v2.AddArg(src) 30375 v2.AddArg(mem) 30376 v1.AddArg(v2) 30377 v1.AddArg(mem) 30378 v.AddArg(v1) 30379 return true 30380 } 30381 return false 30382 } 30383 func rewriteValueARM64_OpMove_10(v *Value) bool { 30384 b := v.Block 30385 config := b.Func.Config 30386 typ := &b.Func.Config.Types 30387 // match: (Move [16] dst src mem) 30388 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 30389 for { 30390 if v.AuxInt != 16 { 30391 break 30392 } 30393 mem := v.Args[2] 30394 dst := v.Args[0] 30395 src := v.Args[1] 30396 v.reset(OpARM64MOVDstore) 30397 v.AuxInt = 8 30398 v.AddArg(dst) 30399 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30400 v0.AuxInt = 8 30401 v0.AddArg(src) 30402 v0.AddArg(mem) 30403 v.AddArg(v0) 30404 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 30405 v1.AddArg(dst) 30406 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30407 v2.AddArg(src) 30408 v2.AddArg(mem) 30409 v1.AddArg(v2) 30410 v1.AddArg(mem) 30411 v.AddArg(v1) 30412 return true 30413 } 30414 // match: (Move [24] dst src mem) 30415 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 30416 for { 30417 if v.AuxInt != 24 { 30418 break 30419 } 30420 mem := v.Args[2] 30421 dst := v.Args[0] 30422 src := v.Args[1] 30423 v.reset(OpARM64MOVDstore) 30424 v.AuxInt = 16 30425 v.AddArg(dst) 30426 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30427 v0.AuxInt = 16 30428 v0.AddArg(src) 30429 v0.AddArg(mem) 30430 v.AddArg(v0) 30431 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 30432 v1.AuxInt = 8 30433 v1.AddArg(dst) 30434 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30435 v2.AuxInt = 8 30436 v2.AddArg(src) 30437 v2.AddArg(mem) 30438 v1.AddArg(v2) 30439 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 30440 v3.AddArg(dst) 30441 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30442 v4.AddArg(src) 30443 v4.AddArg(mem) 30444 v3.AddArg(v4) 30445 v3.AddArg(mem) 30446 v1.AddArg(v3) 30447 v.AddArg(v1) 30448 return true 30449 } 30450 // match: (Move [s] dst src mem) 30451 // cond: s%8 != 0 && s > 8 30452 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 30453 for { 30454 s := v.AuxInt 30455 mem := v.Args[2] 30456 dst := v.Args[0] 30457 src := v.Args[1] 30458 if !(s%8 != 0 && s > 8) { 30459 break 30460 } 30461 v.reset(OpMove) 30462 v.AuxInt = s % 8 30463 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 30464 v0.AuxInt = s - s%8 30465 v0.AddArg(dst) 30466 v.AddArg(v0) 30467 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 30468 v1.AuxInt = s - s%8 30469 v1.AddArg(src) 30470 v.AddArg(v1) 30471 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 30472 v2.AuxInt = s - s%8 30473 v2.AddArg(dst) 30474 v2.AddArg(src) 30475 v2.AddArg(mem) 30476 v.AddArg(v2) 30477 return true 30478 } 30479 // match: (Move [s] dst src mem) 30480 // cond: s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice 30481 // result: (MOVDstore [s-8] dst (MOVDload [s-8] src mem) (DUFFCOPY <types.TypeMem> [8*(64-(s-8)/16)] dst src mem)) 30482 for { 30483 s := v.AuxInt 30484 mem := v.Args[2] 30485 dst := v.Args[0] 30486 src := v.Args[1] 30487 if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice) { 30488 break 30489 } 30490 v.reset(OpARM64MOVDstore) 30491 v.AuxInt = s - 8 30492 v.AddArg(dst) 30493 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 30494 v0.AuxInt = s - 8 30495 v0.AddArg(src) 30496 v0.AddArg(mem) 30497 v.AddArg(v0) 30498 v1 := b.NewValue0(v.Pos, OpARM64DUFFCOPY, types.TypeMem) 30499 v1.AuxInt = 8 * (64 - (s-8)/16) 30500 v1.AddArg(dst) 30501 v1.AddArg(src) 30502 v1.AddArg(mem) 30503 v.AddArg(v1) 30504 return true 30505 } 30506 // match: (Move [s] dst src mem) 30507 // cond: s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice 30508 // result: (DUFFCOPY [8 * (64 - s/16)] dst src mem) 30509 for { 30510 s := v.AuxInt 30511 mem := v.Args[2] 30512 dst := v.Args[0] 30513 src := v.Args[1] 30514 if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice) { 30515 break 30516 } 30517 v.reset(OpARM64DUFFCOPY) 30518 v.AuxInt = 8 * (64 - s/16) 30519 v.AddArg(dst) 30520 v.AddArg(src) 30521 v.AddArg(mem) 30522 return true 30523 } 30524 // match: (Move [s] dst src mem) 30525 // cond: s > 24 && s%8 == 0 30526 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 30527 for { 30528 s := v.AuxInt 30529 mem := v.Args[2] 30530 dst := v.Args[0] 30531 src := v.Args[1] 30532 if !(s > 24 && s%8 == 0) { 30533 break 30534 } 30535 v.reset(OpARM64LoweredMove) 30536 v.AddArg(dst) 30537 v.AddArg(src) 30538 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 30539 v0.AuxInt = s - 8 30540 v0.AddArg(src) 30541 v.AddArg(v0) 30542 v.AddArg(mem) 30543 return true 30544 } 30545 return false 30546 } 30547 func rewriteValueARM64_OpMul16_0(v *Value) bool { 30548 // match: (Mul16 x y) 30549 // result: (MULW x y) 30550 for { 30551 y := v.Args[1] 30552 x := v.Args[0] 30553 v.reset(OpARM64MULW) 30554 v.AddArg(x) 30555 v.AddArg(y) 30556 return true 30557 } 30558 } 30559 func rewriteValueARM64_OpMul32_0(v *Value) bool { 30560 // match: (Mul32 x y) 30561 // result: (MULW x y) 30562 for { 30563 y := v.Args[1] 30564 x := v.Args[0] 30565 v.reset(OpARM64MULW) 30566 v.AddArg(x) 30567 v.AddArg(y) 30568 return true 30569 } 30570 } 30571 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 30572 // match: (Mul32F x y) 30573 // result: (FMULS x y) 30574 for { 30575 y := v.Args[1] 30576 x := v.Args[0] 30577 v.reset(OpARM64FMULS) 30578 v.AddArg(x) 30579 v.AddArg(y) 30580 return true 30581 } 30582 } 30583 func rewriteValueARM64_OpMul64_0(v *Value) bool { 30584 // match: (Mul64 x y) 30585 // result: (MUL x y) 30586 for { 30587 y := v.Args[1] 30588 x := v.Args[0] 30589 v.reset(OpARM64MUL) 30590 v.AddArg(x) 30591 v.AddArg(y) 30592 return true 30593 } 30594 } 30595 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 30596 // match: (Mul64F x y) 30597 // result: (FMULD x y) 30598 for { 30599 y := v.Args[1] 30600 x := v.Args[0] 30601 v.reset(OpARM64FMULD) 30602 v.AddArg(x) 30603 v.AddArg(y) 30604 return true 30605 } 30606 } 30607 func rewriteValueARM64_OpMul64uhilo_0(v *Value) bool { 30608 // match: (Mul64uhilo x y) 30609 // result: (LoweredMuluhilo x y) 30610 for { 30611 y := v.Args[1] 30612 x := v.Args[0] 30613 v.reset(OpARM64LoweredMuluhilo) 30614 v.AddArg(x) 30615 v.AddArg(y) 30616 return true 30617 } 30618 } 30619 func rewriteValueARM64_OpMul8_0(v *Value) bool { 30620 // match: (Mul8 x y) 30621 // result: (MULW x y) 30622 for { 30623 y := v.Args[1] 30624 x := v.Args[0] 30625 v.reset(OpARM64MULW) 30626 v.AddArg(x) 30627 v.AddArg(y) 30628 return true 30629 } 30630 } 30631 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 30632 // match: (Neg16 x) 30633 // result: (NEG x) 30634 for { 30635 x := v.Args[0] 30636 v.reset(OpARM64NEG) 30637 v.AddArg(x) 30638 return true 30639 } 30640 } 30641 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 30642 // match: (Neg32 x) 30643 // result: (NEG x) 30644 for { 30645 x := v.Args[0] 30646 v.reset(OpARM64NEG) 30647 v.AddArg(x) 30648 return true 30649 } 30650 } 30651 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 30652 // match: (Neg32F x) 30653 // result: (FNEGS x) 30654 for { 30655 x := v.Args[0] 30656 v.reset(OpARM64FNEGS) 30657 v.AddArg(x) 30658 return true 30659 } 30660 } 30661 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 30662 // match: (Neg64 x) 30663 // result: (NEG x) 30664 for { 30665 x := v.Args[0] 30666 v.reset(OpARM64NEG) 30667 v.AddArg(x) 30668 return true 30669 } 30670 } 30671 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 30672 // match: (Neg64F x) 30673 // result: (FNEGD x) 30674 for { 30675 x := v.Args[0] 30676 v.reset(OpARM64FNEGD) 30677 v.AddArg(x) 30678 return true 30679 } 30680 } 30681 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 30682 // match: (Neg8 x) 30683 // result: (NEG x) 30684 for { 30685 x := v.Args[0] 30686 v.reset(OpARM64NEG) 30687 v.AddArg(x) 30688 return true 30689 } 30690 } 30691 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 30692 b := v.Block 30693 typ := &b.Func.Config.Types 30694 // match: (Neq16 x y) 30695 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 30696 for { 30697 y := v.Args[1] 30698 x := v.Args[0] 30699 v.reset(OpARM64NotEqual) 30700 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 30701 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 30702 v1.AddArg(x) 30703 v0.AddArg(v1) 30704 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 30705 v2.AddArg(y) 30706 v0.AddArg(v2) 30707 v.AddArg(v0) 30708 return true 30709 } 30710 } 30711 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 30712 b := v.Block 30713 // match: (Neq32 x y) 30714 // result: (NotEqual (CMPW x y)) 30715 for { 30716 y := v.Args[1] 30717 x := v.Args[0] 30718 v.reset(OpARM64NotEqual) 30719 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 30720 v0.AddArg(x) 30721 v0.AddArg(y) 30722 v.AddArg(v0) 30723 return true 30724 } 30725 } 30726 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 30727 b := v.Block 30728 // match: (Neq32F x y) 30729 // result: (NotEqual (FCMPS x y)) 30730 for { 30731 y := v.Args[1] 30732 x := v.Args[0] 30733 v.reset(OpARM64NotEqual) 30734 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 30735 v0.AddArg(x) 30736 v0.AddArg(y) 30737 v.AddArg(v0) 30738 return true 30739 } 30740 } 30741 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 30742 b := v.Block 30743 // match: (Neq64 x y) 30744 // result: (NotEqual (CMP x y)) 30745 for { 30746 y := v.Args[1] 30747 x := v.Args[0] 30748 v.reset(OpARM64NotEqual) 30749 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 30750 v0.AddArg(x) 30751 v0.AddArg(y) 30752 v.AddArg(v0) 30753 return true 30754 } 30755 } 30756 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 30757 b := v.Block 30758 // match: (Neq64F x y) 30759 // result: (NotEqual (FCMPD x y)) 30760 for { 30761 y := v.Args[1] 30762 x := v.Args[0] 30763 v.reset(OpARM64NotEqual) 30764 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 30765 v0.AddArg(x) 30766 v0.AddArg(y) 30767 v.AddArg(v0) 30768 return true 30769 } 30770 } 30771 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 30772 b := v.Block 30773 typ := &b.Func.Config.Types 30774 // match: (Neq8 x y) 30775 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 30776 for { 30777 y := v.Args[1] 30778 x := v.Args[0] 30779 v.reset(OpARM64NotEqual) 30780 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 30781 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 30782 v1.AddArg(x) 30783 v0.AddArg(v1) 30784 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 30785 v2.AddArg(y) 30786 v0.AddArg(v2) 30787 v.AddArg(v0) 30788 return true 30789 } 30790 } 30791 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 30792 // match: (NeqB x y) 30793 // result: (XOR x y) 30794 for { 30795 y := v.Args[1] 30796 x := v.Args[0] 30797 v.reset(OpARM64XOR) 30798 v.AddArg(x) 30799 v.AddArg(y) 30800 return true 30801 } 30802 } 30803 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 30804 b := v.Block 30805 // match: (NeqPtr x y) 30806 // result: (NotEqual (CMP x y)) 30807 for { 30808 y := v.Args[1] 30809 x := v.Args[0] 30810 v.reset(OpARM64NotEqual) 30811 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 30812 v0.AddArg(x) 30813 v0.AddArg(y) 30814 v.AddArg(v0) 30815 return true 30816 } 30817 } 30818 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 30819 // match: (NilCheck ptr mem) 30820 // result: (LoweredNilCheck ptr mem) 30821 for { 30822 mem := v.Args[1] 30823 ptr := v.Args[0] 30824 v.reset(OpARM64LoweredNilCheck) 30825 v.AddArg(ptr) 30826 v.AddArg(mem) 30827 return true 30828 } 30829 } 30830 func rewriteValueARM64_OpNot_0(v *Value) bool { 30831 b := v.Block 30832 typ := &b.Func.Config.Types 30833 // match: (Not x) 30834 // result: (XOR (MOVDconst [1]) x) 30835 for { 30836 x := v.Args[0] 30837 v.reset(OpARM64XOR) 30838 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 30839 v0.AuxInt = 1 30840 v.AddArg(v0) 30841 v.AddArg(x) 30842 return true 30843 } 30844 } 30845 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 30846 // match: (OffPtr [off] ptr:(SP)) 30847 // result: (MOVDaddr [off] ptr) 30848 for { 30849 off := v.AuxInt 30850 ptr := v.Args[0] 30851 if ptr.Op != OpSP { 30852 break 30853 } 30854 v.reset(OpARM64MOVDaddr) 30855 v.AuxInt = off 30856 v.AddArg(ptr) 30857 return true 30858 } 30859 // match: (OffPtr [off] ptr) 30860 // result: (ADDconst [off] ptr) 30861 for { 30862 off := v.AuxInt 30863 ptr := v.Args[0] 30864 v.reset(OpARM64ADDconst) 30865 v.AuxInt = off 30866 v.AddArg(ptr) 30867 return true 30868 } 30869 } 30870 func rewriteValueARM64_OpOr16_0(v *Value) bool { 30871 // match: (Or16 x y) 30872 // result: (OR x y) 30873 for { 30874 y := v.Args[1] 30875 x := v.Args[0] 30876 v.reset(OpARM64OR) 30877 v.AddArg(x) 30878 v.AddArg(y) 30879 return true 30880 } 30881 } 30882 func rewriteValueARM64_OpOr32_0(v *Value) bool { 30883 // match: (Or32 x y) 30884 // result: (OR x y) 30885 for { 30886 y := v.Args[1] 30887 x := v.Args[0] 30888 v.reset(OpARM64OR) 30889 v.AddArg(x) 30890 v.AddArg(y) 30891 return true 30892 } 30893 } 30894 func rewriteValueARM64_OpOr64_0(v *Value) bool { 30895 // match: (Or64 x y) 30896 // result: (OR x y) 30897 for { 30898 y := v.Args[1] 30899 x := v.Args[0] 30900 v.reset(OpARM64OR) 30901 v.AddArg(x) 30902 v.AddArg(y) 30903 return true 30904 } 30905 } 30906 func rewriteValueARM64_OpOr8_0(v *Value) bool { 30907 // match: (Or8 x y) 30908 // result: (OR x y) 30909 for { 30910 y := v.Args[1] 30911 x := v.Args[0] 30912 v.reset(OpARM64OR) 30913 v.AddArg(x) 30914 v.AddArg(y) 30915 return true 30916 } 30917 } 30918 func rewriteValueARM64_OpOrB_0(v *Value) bool { 30919 // match: (OrB x y) 30920 // result: (OR x y) 30921 for { 30922 y := v.Args[1] 30923 x := v.Args[0] 30924 v.reset(OpARM64OR) 30925 v.AddArg(x) 30926 v.AddArg(y) 30927 return true 30928 } 30929 } 30930 func rewriteValueARM64_OpPanicBounds_0(v *Value) bool { 30931 // match: (PanicBounds [kind] x y mem) 30932 // cond: boundsABI(kind) == 0 30933 // result: (LoweredPanicBoundsA [kind] x y mem) 30934 for { 30935 kind := v.AuxInt 30936 mem := v.Args[2] 30937 x := v.Args[0] 30938 y := v.Args[1] 30939 if !(boundsABI(kind) == 0) { 30940 break 30941 } 30942 v.reset(OpARM64LoweredPanicBoundsA) 30943 v.AuxInt = kind 30944 v.AddArg(x) 30945 v.AddArg(y) 30946 v.AddArg(mem) 30947 return true 30948 } 30949 // match: (PanicBounds [kind] x y mem) 30950 // cond: boundsABI(kind) == 1 30951 // result: (LoweredPanicBoundsB [kind] x y mem) 30952 for { 30953 kind := v.AuxInt 30954 mem := v.Args[2] 30955 x := v.Args[0] 30956 y := v.Args[1] 30957 if !(boundsABI(kind) == 1) { 30958 break 30959 } 30960 v.reset(OpARM64LoweredPanicBoundsB) 30961 v.AuxInt = kind 30962 v.AddArg(x) 30963 v.AddArg(y) 30964 v.AddArg(mem) 30965 return true 30966 } 30967 // match: (PanicBounds [kind] x y mem) 30968 // cond: boundsABI(kind) == 2 30969 // result: (LoweredPanicBoundsC [kind] x y mem) 30970 for { 30971 kind := v.AuxInt 30972 mem := v.Args[2] 30973 x := v.Args[0] 30974 y := v.Args[1] 30975 if !(boundsABI(kind) == 2) { 30976 break 30977 } 30978 v.reset(OpARM64LoweredPanicBoundsC) 30979 v.AuxInt = kind 30980 v.AddArg(x) 30981 v.AddArg(y) 30982 v.AddArg(mem) 30983 return true 30984 } 30985 return false 30986 } 30987 func rewriteValueARM64_OpPopCount16_0(v *Value) bool { 30988 b := v.Block 30989 typ := &b.Func.Config.Types 30990 // match: (PopCount16 <t> x) 30991 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x))))) 30992 for { 30993 t := v.Type 30994 x := v.Args[0] 30995 v.reset(OpARM64FMOVDfpgp) 30996 v.Type = t 30997 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 30998 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 30999 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 31000 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31001 v3.AddArg(x) 31002 v2.AddArg(v3) 31003 v1.AddArg(v2) 31004 v0.AddArg(v1) 31005 v.AddArg(v0) 31006 return true 31007 } 31008 } 31009 func rewriteValueARM64_OpPopCount32_0(v *Value) bool { 31010 b := v.Block 31011 typ := &b.Func.Config.Types 31012 // match: (PopCount32 <t> x) 31013 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x))))) 31014 for { 31015 t := v.Type 31016 x := v.Args[0] 31017 v.reset(OpARM64FMOVDfpgp) 31018 v.Type = t 31019 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 31020 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 31021 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 31022 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31023 v3.AddArg(x) 31024 v2.AddArg(v3) 31025 v1.AddArg(v2) 31026 v0.AddArg(v1) 31027 v.AddArg(v0) 31028 return true 31029 } 31030 } 31031 func rewriteValueARM64_OpPopCount64_0(v *Value) bool { 31032 b := v.Block 31033 typ := &b.Func.Config.Types 31034 // match: (PopCount64 <t> x) 31035 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x)))) 31036 for { 31037 t := v.Type 31038 x := v.Args[0] 31039 v.reset(OpARM64FMOVDfpgp) 31040 v.Type = t 31041 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 31042 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 31043 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 31044 v2.AddArg(x) 31045 v1.AddArg(v2) 31046 v0.AddArg(v1) 31047 v.AddArg(v0) 31048 return true 31049 } 31050 } 31051 func rewriteValueARM64_OpRotateLeft16_0(v *Value) bool { 31052 b := v.Block 31053 typ := &b.Func.Config.Types 31054 // match: (RotateLeft16 <t> x (MOVDconst [c])) 31055 // result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15]))) 31056 for { 31057 t := v.Type 31058 _ = v.Args[1] 31059 x := v.Args[0] 31060 v_1 := v.Args[1] 31061 if v_1.Op != OpARM64MOVDconst { 31062 break 31063 } 31064 c := v_1.AuxInt 31065 v.reset(OpOr16) 31066 v0 := b.NewValue0(v.Pos, OpLsh16x64, t) 31067 v0.AddArg(x) 31068 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 31069 v1.AuxInt = c & 15 31070 v0.AddArg(v1) 31071 v.AddArg(v0) 31072 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t) 31073 v2.AddArg(x) 31074 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 31075 v3.AuxInt = -c & 15 31076 v2.AddArg(v3) 31077 v.AddArg(v2) 31078 return true 31079 } 31080 return false 31081 } 31082 func rewriteValueARM64_OpRotateLeft32_0(v *Value) bool { 31083 b := v.Block 31084 // match: (RotateLeft32 x y) 31085 // result: (RORW x (NEG <y.Type> y)) 31086 for { 31087 y := v.Args[1] 31088 x := v.Args[0] 31089 v.reset(OpARM64RORW) 31090 v.AddArg(x) 31091 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 31092 v0.AddArg(y) 31093 v.AddArg(v0) 31094 return true 31095 } 31096 } 31097 func rewriteValueARM64_OpRotateLeft64_0(v *Value) bool { 31098 b := v.Block 31099 // match: (RotateLeft64 x y) 31100 // result: (ROR x (NEG <y.Type> y)) 31101 for { 31102 y := v.Args[1] 31103 x := v.Args[0] 31104 v.reset(OpARM64ROR) 31105 v.AddArg(x) 31106 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 31107 v0.AddArg(y) 31108 v.AddArg(v0) 31109 return true 31110 } 31111 } 31112 func rewriteValueARM64_OpRotateLeft8_0(v *Value) bool { 31113 b := v.Block 31114 typ := &b.Func.Config.Types 31115 // match: (RotateLeft8 <t> x (MOVDconst [c])) 31116 // result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7]))) 31117 for { 31118 t := v.Type 31119 _ = v.Args[1] 31120 x := v.Args[0] 31121 v_1 := v.Args[1] 31122 if v_1.Op != OpARM64MOVDconst { 31123 break 31124 } 31125 c := v_1.AuxInt 31126 v.reset(OpOr8) 31127 v0 := b.NewValue0(v.Pos, OpLsh8x64, t) 31128 v0.AddArg(x) 31129 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 31130 v1.AuxInt = c & 7 31131 v0.AddArg(v1) 31132 v.AddArg(v0) 31133 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t) 31134 v2.AddArg(x) 31135 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 31136 v3.AuxInt = -c & 7 31137 v2.AddArg(v3) 31138 v.AddArg(v2) 31139 return true 31140 } 31141 return false 31142 } 31143 func rewriteValueARM64_OpRound_0(v *Value) bool { 31144 // match: (Round x) 31145 // result: (FRINTAD x) 31146 for { 31147 x := v.Args[0] 31148 v.reset(OpARM64FRINTAD) 31149 v.AddArg(x) 31150 return true 31151 } 31152 } 31153 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 31154 // match: (Round32F x) 31155 // result: (LoweredRound32F x) 31156 for { 31157 x := v.Args[0] 31158 v.reset(OpARM64LoweredRound32F) 31159 v.AddArg(x) 31160 return true 31161 } 31162 } 31163 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 31164 // match: (Round64F x) 31165 // result: (LoweredRound64F x) 31166 for { 31167 x := v.Args[0] 31168 v.reset(OpARM64LoweredRound64F) 31169 v.AddArg(x) 31170 return true 31171 } 31172 } 31173 func rewriteValueARM64_OpRoundToEven_0(v *Value) bool { 31174 // match: (RoundToEven x) 31175 // result: (FRINTND x) 31176 for { 31177 x := v.Args[0] 31178 v.reset(OpARM64FRINTND) 31179 v.AddArg(x) 31180 return true 31181 } 31182 } 31183 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 31184 b := v.Block 31185 typ := &b.Func.Config.Types 31186 // match: (Rsh16Ux16 <t> x y) 31187 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 31188 for { 31189 t := v.Type 31190 y := v.Args[1] 31191 x := v.Args[0] 31192 v.reset(OpARM64CSEL) 31193 v.Aux = OpARM64LessThanU 31194 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31195 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31196 v1.AddArg(x) 31197 v0.AddArg(v1) 31198 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31199 v2.AddArg(y) 31200 v0.AddArg(v2) 31201 v.AddArg(v0) 31202 v3 := b.NewValue0(v.Pos, OpConst64, t) 31203 v3.AuxInt = 0 31204 v.AddArg(v3) 31205 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31206 v4.AuxInt = 64 31207 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31208 v5.AddArg(y) 31209 v4.AddArg(v5) 31210 v.AddArg(v4) 31211 return true 31212 } 31213 } 31214 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 31215 b := v.Block 31216 typ := &b.Func.Config.Types 31217 // match: (Rsh16Ux32 <t> x y) 31218 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 31219 for { 31220 t := v.Type 31221 y := v.Args[1] 31222 x := v.Args[0] 31223 v.reset(OpARM64CSEL) 31224 v.Aux = OpARM64LessThanU 31225 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31226 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31227 v1.AddArg(x) 31228 v0.AddArg(v1) 31229 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31230 v2.AddArg(y) 31231 v0.AddArg(v2) 31232 v.AddArg(v0) 31233 v3 := b.NewValue0(v.Pos, OpConst64, t) 31234 v3.AuxInt = 0 31235 v.AddArg(v3) 31236 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31237 v4.AuxInt = 64 31238 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31239 v5.AddArg(y) 31240 v4.AddArg(v5) 31241 v.AddArg(v4) 31242 return true 31243 } 31244 } 31245 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 31246 b := v.Block 31247 typ := &b.Func.Config.Types 31248 // match: (Rsh16Ux64 <t> x y) 31249 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 31250 for { 31251 t := v.Type 31252 y := v.Args[1] 31253 x := v.Args[0] 31254 v.reset(OpARM64CSEL) 31255 v.Aux = OpARM64LessThanU 31256 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31257 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31258 v1.AddArg(x) 31259 v0.AddArg(v1) 31260 v0.AddArg(y) 31261 v.AddArg(v0) 31262 v2 := b.NewValue0(v.Pos, OpConst64, t) 31263 v2.AuxInt = 0 31264 v.AddArg(v2) 31265 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31266 v3.AuxInt = 64 31267 v3.AddArg(y) 31268 v.AddArg(v3) 31269 return true 31270 } 31271 } 31272 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 31273 b := v.Block 31274 typ := &b.Func.Config.Types 31275 // match: (Rsh16Ux8 <t> x y) 31276 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 31277 for { 31278 t := v.Type 31279 y := v.Args[1] 31280 x := v.Args[0] 31281 v.reset(OpARM64CSEL) 31282 v.Aux = OpARM64LessThanU 31283 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31284 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31285 v1.AddArg(x) 31286 v0.AddArg(v1) 31287 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31288 v2.AddArg(y) 31289 v0.AddArg(v2) 31290 v.AddArg(v0) 31291 v3 := b.NewValue0(v.Pos, OpConst64, t) 31292 v3.AuxInt = 0 31293 v.AddArg(v3) 31294 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31295 v4.AuxInt = 64 31296 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31297 v5.AddArg(y) 31298 v4.AddArg(v5) 31299 v.AddArg(v4) 31300 return true 31301 } 31302 } 31303 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 31304 b := v.Block 31305 typ := &b.Func.Config.Types 31306 // match: (Rsh16x16 x y) 31307 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 31308 for { 31309 y := v.Args[1] 31310 x := v.Args[0] 31311 v.reset(OpARM64SRA) 31312 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 31313 v0.AddArg(x) 31314 v.AddArg(v0) 31315 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31316 v1.Aux = OpARM64LessThanU 31317 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31318 v2.AddArg(y) 31319 v1.AddArg(v2) 31320 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 31321 v3.AuxInt = 63 31322 v1.AddArg(v3) 31323 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31324 v4.AuxInt = 64 31325 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31326 v5.AddArg(y) 31327 v4.AddArg(v5) 31328 v1.AddArg(v4) 31329 v.AddArg(v1) 31330 return true 31331 } 31332 } 31333 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 31334 b := v.Block 31335 typ := &b.Func.Config.Types 31336 // match: (Rsh16x32 x y) 31337 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 31338 for { 31339 y := v.Args[1] 31340 x := v.Args[0] 31341 v.reset(OpARM64SRA) 31342 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 31343 v0.AddArg(x) 31344 v.AddArg(v0) 31345 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31346 v1.Aux = OpARM64LessThanU 31347 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31348 v2.AddArg(y) 31349 v1.AddArg(v2) 31350 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 31351 v3.AuxInt = 63 31352 v1.AddArg(v3) 31353 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31354 v4.AuxInt = 64 31355 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31356 v5.AddArg(y) 31357 v4.AddArg(v5) 31358 v1.AddArg(v4) 31359 v.AddArg(v1) 31360 return true 31361 } 31362 } 31363 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 31364 b := v.Block 31365 typ := &b.Func.Config.Types 31366 // match: (Rsh16x64 x y) 31367 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 31368 for { 31369 y := v.Args[1] 31370 x := v.Args[0] 31371 v.reset(OpARM64SRA) 31372 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 31373 v0.AddArg(x) 31374 v.AddArg(v0) 31375 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31376 v1.Aux = OpARM64LessThanU 31377 v1.AddArg(y) 31378 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 31379 v2.AuxInt = 63 31380 v1.AddArg(v2) 31381 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31382 v3.AuxInt = 64 31383 v3.AddArg(y) 31384 v1.AddArg(v3) 31385 v.AddArg(v1) 31386 return true 31387 } 31388 } 31389 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 31390 b := v.Block 31391 typ := &b.Func.Config.Types 31392 // match: (Rsh16x8 x y) 31393 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 31394 for { 31395 y := v.Args[1] 31396 x := v.Args[0] 31397 v.reset(OpARM64SRA) 31398 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 31399 v0.AddArg(x) 31400 v.AddArg(v0) 31401 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31402 v1.Aux = OpARM64LessThanU 31403 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31404 v2.AddArg(y) 31405 v1.AddArg(v2) 31406 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 31407 v3.AuxInt = 63 31408 v1.AddArg(v3) 31409 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31410 v4.AuxInt = 64 31411 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31412 v5.AddArg(y) 31413 v4.AddArg(v5) 31414 v1.AddArg(v4) 31415 v.AddArg(v1) 31416 return true 31417 } 31418 } 31419 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 31420 b := v.Block 31421 typ := &b.Func.Config.Types 31422 // match: (Rsh32Ux16 <t> x y) 31423 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 31424 for { 31425 t := v.Type 31426 y := v.Args[1] 31427 x := v.Args[0] 31428 v.reset(OpARM64CSEL) 31429 v.Aux = OpARM64LessThanU 31430 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31431 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31432 v1.AddArg(x) 31433 v0.AddArg(v1) 31434 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31435 v2.AddArg(y) 31436 v0.AddArg(v2) 31437 v.AddArg(v0) 31438 v3 := b.NewValue0(v.Pos, OpConst64, t) 31439 v3.AuxInt = 0 31440 v.AddArg(v3) 31441 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31442 v4.AuxInt = 64 31443 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31444 v5.AddArg(y) 31445 v4.AddArg(v5) 31446 v.AddArg(v4) 31447 return true 31448 } 31449 } 31450 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 31451 b := v.Block 31452 typ := &b.Func.Config.Types 31453 // match: (Rsh32Ux32 <t> x y) 31454 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 31455 for { 31456 t := v.Type 31457 y := v.Args[1] 31458 x := v.Args[0] 31459 v.reset(OpARM64CSEL) 31460 v.Aux = OpARM64LessThanU 31461 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31462 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31463 v1.AddArg(x) 31464 v0.AddArg(v1) 31465 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31466 v2.AddArg(y) 31467 v0.AddArg(v2) 31468 v.AddArg(v0) 31469 v3 := b.NewValue0(v.Pos, OpConst64, t) 31470 v3.AuxInt = 0 31471 v.AddArg(v3) 31472 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31473 v4.AuxInt = 64 31474 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31475 v5.AddArg(y) 31476 v4.AddArg(v5) 31477 v.AddArg(v4) 31478 return true 31479 } 31480 } 31481 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 31482 b := v.Block 31483 typ := &b.Func.Config.Types 31484 // match: (Rsh32Ux64 <t> x y) 31485 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 31486 for { 31487 t := v.Type 31488 y := v.Args[1] 31489 x := v.Args[0] 31490 v.reset(OpARM64CSEL) 31491 v.Aux = OpARM64LessThanU 31492 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31493 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31494 v1.AddArg(x) 31495 v0.AddArg(v1) 31496 v0.AddArg(y) 31497 v.AddArg(v0) 31498 v2 := b.NewValue0(v.Pos, OpConst64, t) 31499 v2.AuxInt = 0 31500 v.AddArg(v2) 31501 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31502 v3.AuxInt = 64 31503 v3.AddArg(y) 31504 v.AddArg(v3) 31505 return true 31506 } 31507 } 31508 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 31509 b := v.Block 31510 typ := &b.Func.Config.Types 31511 // match: (Rsh32Ux8 <t> x y) 31512 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 31513 for { 31514 t := v.Type 31515 y := v.Args[1] 31516 x := v.Args[0] 31517 v.reset(OpARM64CSEL) 31518 v.Aux = OpARM64LessThanU 31519 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31520 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31521 v1.AddArg(x) 31522 v0.AddArg(v1) 31523 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31524 v2.AddArg(y) 31525 v0.AddArg(v2) 31526 v.AddArg(v0) 31527 v3 := b.NewValue0(v.Pos, OpConst64, t) 31528 v3.AuxInt = 0 31529 v.AddArg(v3) 31530 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31531 v4.AuxInt = 64 31532 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31533 v5.AddArg(y) 31534 v4.AddArg(v5) 31535 v.AddArg(v4) 31536 return true 31537 } 31538 } 31539 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 31540 b := v.Block 31541 typ := &b.Func.Config.Types 31542 // match: (Rsh32x16 x y) 31543 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 31544 for { 31545 y := v.Args[1] 31546 x := v.Args[0] 31547 v.reset(OpARM64SRA) 31548 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 31549 v0.AddArg(x) 31550 v.AddArg(v0) 31551 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31552 v1.Aux = OpARM64LessThanU 31553 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31554 v2.AddArg(y) 31555 v1.AddArg(v2) 31556 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 31557 v3.AuxInt = 63 31558 v1.AddArg(v3) 31559 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31560 v4.AuxInt = 64 31561 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31562 v5.AddArg(y) 31563 v4.AddArg(v5) 31564 v1.AddArg(v4) 31565 v.AddArg(v1) 31566 return true 31567 } 31568 } 31569 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 31570 b := v.Block 31571 typ := &b.Func.Config.Types 31572 // match: (Rsh32x32 x y) 31573 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 31574 for { 31575 y := v.Args[1] 31576 x := v.Args[0] 31577 v.reset(OpARM64SRA) 31578 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 31579 v0.AddArg(x) 31580 v.AddArg(v0) 31581 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31582 v1.Aux = OpARM64LessThanU 31583 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31584 v2.AddArg(y) 31585 v1.AddArg(v2) 31586 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 31587 v3.AuxInt = 63 31588 v1.AddArg(v3) 31589 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31590 v4.AuxInt = 64 31591 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31592 v5.AddArg(y) 31593 v4.AddArg(v5) 31594 v1.AddArg(v4) 31595 v.AddArg(v1) 31596 return true 31597 } 31598 } 31599 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 31600 b := v.Block 31601 typ := &b.Func.Config.Types 31602 // match: (Rsh32x64 x y) 31603 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 31604 for { 31605 y := v.Args[1] 31606 x := v.Args[0] 31607 v.reset(OpARM64SRA) 31608 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 31609 v0.AddArg(x) 31610 v.AddArg(v0) 31611 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31612 v1.Aux = OpARM64LessThanU 31613 v1.AddArg(y) 31614 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 31615 v2.AuxInt = 63 31616 v1.AddArg(v2) 31617 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31618 v3.AuxInt = 64 31619 v3.AddArg(y) 31620 v1.AddArg(v3) 31621 v.AddArg(v1) 31622 return true 31623 } 31624 } 31625 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 31626 b := v.Block 31627 typ := &b.Func.Config.Types 31628 // match: (Rsh32x8 x y) 31629 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 31630 for { 31631 y := v.Args[1] 31632 x := v.Args[0] 31633 v.reset(OpARM64SRA) 31634 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 31635 v0.AddArg(x) 31636 v.AddArg(v0) 31637 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31638 v1.Aux = OpARM64LessThanU 31639 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31640 v2.AddArg(y) 31641 v1.AddArg(v2) 31642 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 31643 v3.AuxInt = 63 31644 v1.AddArg(v3) 31645 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31646 v4.AuxInt = 64 31647 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31648 v5.AddArg(y) 31649 v4.AddArg(v5) 31650 v1.AddArg(v4) 31651 v.AddArg(v1) 31652 return true 31653 } 31654 } 31655 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 31656 b := v.Block 31657 typ := &b.Func.Config.Types 31658 // match: (Rsh64Ux16 <t> x y) 31659 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 31660 for { 31661 t := v.Type 31662 y := v.Args[1] 31663 x := v.Args[0] 31664 v.reset(OpARM64CSEL) 31665 v.Aux = OpARM64LessThanU 31666 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31667 v0.AddArg(x) 31668 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31669 v1.AddArg(y) 31670 v0.AddArg(v1) 31671 v.AddArg(v0) 31672 v2 := b.NewValue0(v.Pos, OpConst64, t) 31673 v2.AuxInt = 0 31674 v.AddArg(v2) 31675 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31676 v3.AuxInt = 64 31677 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31678 v4.AddArg(y) 31679 v3.AddArg(v4) 31680 v.AddArg(v3) 31681 return true 31682 } 31683 } 31684 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 31685 b := v.Block 31686 typ := &b.Func.Config.Types 31687 // match: (Rsh64Ux32 <t> x y) 31688 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 31689 for { 31690 t := v.Type 31691 y := v.Args[1] 31692 x := v.Args[0] 31693 v.reset(OpARM64CSEL) 31694 v.Aux = OpARM64LessThanU 31695 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31696 v0.AddArg(x) 31697 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31698 v1.AddArg(y) 31699 v0.AddArg(v1) 31700 v.AddArg(v0) 31701 v2 := b.NewValue0(v.Pos, OpConst64, t) 31702 v2.AuxInt = 0 31703 v.AddArg(v2) 31704 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31705 v3.AuxInt = 64 31706 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31707 v4.AddArg(y) 31708 v3.AddArg(v4) 31709 v.AddArg(v3) 31710 return true 31711 } 31712 } 31713 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 31714 b := v.Block 31715 // match: (Rsh64Ux64 <t> x y) 31716 // result: (CSEL {OpARM64LessThanU} (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 31717 for { 31718 t := v.Type 31719 y := v.Args[1] 31720 x := v.Args[0] 31721 v.reset(OpARM64CSEL) 31722 v.Aux = OpARM64LessThanU 31723 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31724 v0.AddArg(x) 31725 v0.AddArg(y) 31726 v.AddArg(v0) 31727 v1 := b.NewValue0(v.Pos, OpConst64, t) 31728 v1.AuxInt = 0 31729 v.AddArg(v1) 31730 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31731 v2.AuxInt = 64 31732 v2.AddArg(y) 31733 v.AddArg(v2) 31734 return true 31735 } 31736 } 31737 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 31738 b := v.Block 31739 typ := &b.Func.Config.Types 31740 // match: (Rsh64Ux8 <t> x y) 31741 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 31742 for { 31743 t := v.Type 31744 y := v.Args[1] 31745 x := v.Args[0] 31746 v.reset(OpARM64CSEL) 31747 v.Aux = OpARM64LessThanU 31748 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31749 v0.AddArg(x) 31750 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31751 v1.AddArg(y) 31752 v0.AddArg(v1) 31753 v.AddArg(v0) 31754 v2 := b.NewValue0(v.Pos, OpConst64, t) 31755 v2.AuxInt = 0 31756 v.AddArg(v2) 31757 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31758 v3.AuxInt = 64 31759 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31760 v4.AddArg(y) 31761 v3.AddArg(v4) 31762 v.AddArg(v3) 31763 return true 31764 } 31765 } 31766 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 31767 b := v.Block 31768 typ := &b.Func.Config.Types 31769 // match: (Rsh64x16 x y) 31770 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 31771 for { 31772 y := v.Args[1] 31773 x := v.Args[0] 31774 v.reset(OpARM64SRA) 31775 v.AddArg(x) 31776 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31777 v0.Aux = OpARM64LessThanU 31778 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31779 v1.AddArg(y) 31780 v0.AddArg(v1) 31781 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 31782 v2.AuxInt = 63 31783 v0.AddArg(v2) 31784 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31785 v3.AuxInt = 64 31786 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31787 v4.AddArg(y) 31788 v3.AddArg(v4) 31789 v0.AddArg(v3) 31790 v.AddArg(v0) 31791 return true 31792 } 31793 } 31794 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 31795 b := v.Block 31796 typ := &b.Func.Config.Types 31797 // match: (Rsh64x32 x y) 31798 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 31799 for { 31800 y := v.Args[1] 31801 x := v.Args[0] 31802 v.reset(OpARM64SRA) 31803 v.AddArg(x) 31804 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31805 v0.Aux = OpARM64LessThanU 31806 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31807 v1.AddArg(y) 31808 v0.AddArg(v1) 31809 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 31810 v2.AuxInt = 63 31811 v0.AddArg(v2) 31812 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31813 v3.AuxInt = 64 31814 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31815 v4.AddArg(y) 31816 v3.AddArg(v4) 31817 v0.AddArg(v3) 31818 v.AddArg(v0) 31819 return true 31820 } 31821 } 31822 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 31823 b := v.Block 31824 // match: (Rsh64x64 x y) 31825 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 31826 for { 31827 y := v.Args[1] 31828 x := v.Args[0] 31829 v.reset(OpARM64SRA) 31830 v.AddArg(x) 31831 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31832 v0.Aux = OpARM64LessThanU 31833 v0.AddArg(y) 31834 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 31835 v1.AuxInt = 63 31836 v0.AddArg(v1) 31837 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31838 v2.AuxInt = 64 31839 v2.AddArg(y) 31840 v0.AddArg(v2) 31841 v.AddArg(v0) 31842 return true 31843 } 31844 } 31845 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 31846 b := v.Block 31847 typ := &b.Func.Config.Types 31848 // match: (Rsh64x8 x y) 31849 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 31850 for { 31851 y := v.Args[1] 31852 x := v.Args[0] 31853 v.reset(OpARM64SRA) 31854 v.AddArg(x) 31855 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 31856 v0.Aux = OpARM64LessThanU 31857 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31858 v1.AddArg(y) 31859 v0.AddArg(v1) 31860 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 31861 v2.AuxInt = 63 31862 v0.AddArg(v2) 31863 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31864 v3.AuxInt = 64 31865 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31866 v4.AddArg(y) 31867 v3.AddArg(v4) 31868 v0.AddArg(v3) 31869 v.AddArg(v0) 31870 return true 31871 } 31872 } 31873 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 31874 b := v.Block 31875 typ := &b.Func.Config.Types 31876 // match: (Rsh8Ux16 <t> x y) 31877 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 31878 for { 31879 t := v.Type 31880 y := v.Args[1] 31881 x := v.Args[0] 31882 v.reset(OpARM64CSEL) 31883 v.Aux = OpARM64LessThanU 31884 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31885 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31886 v1.AddArg(x) 31887 v0.AddArg(v1) 31888 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31889 v2.AddArg(y) 31890 v0.AddArg(v2) 31891 v.AddArg(v0) 31892 v3 := b.NewValue0(v.Pos, OpConst64, t) 31893 v3.AuxInt = 0 31894 v.AddArg(v3) 31895 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31896 v4.AuxInt = 64 31897 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 31898 v5.AddArg(y) 31899 v4.AddArg(v5) 31900 v.AddArg(v4) 31901 return true 31902 } 31903 } 31904 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 31905 b := v.Block 31906 typ := &b.Func.Config.Types 31907 // match: (Rsh8Ux32 <t> x y) 31908 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 31909 for { 31910 t := v.Type 31911 y := v.Args[1] 31912 x := v.Args[0] 31913 v.reset(OpARM64CSEL) 31914 v.Aux = OpARM64LessThanU 31915 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31916 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31917 v1.AddArg(x) 31918 v0.AddArg(v1) 31919 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31920 v2.AddArg(y) 31921 v0.AddArg(v2) 31922 v.AddArg(v0) 31923 v3 := b.NewValue0(v.Pos, OpConst64, t) 31924 v3.AuxInt = 0 31925 v.AddArg(v3) 31926 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31927 v4.AuxInt = 64 31928 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 31929 v5.AddArg(y) 31930 v4.AddArg(v5) 31931 v.AddArg(v4) 31932 return true 31933 } 31934 } 31935 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 31936 b := v.Block 31937 typ := &b.Func.Config.Types 31938 // match: (Rsh8Ux64 <t> x y) 31939 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 31940 for { 31941 t := v.Type 31942 y := v.Args[1] 31943 x := v.Args[0] 31944 v.reset(OpARM64CSEL) 31945 v.Aux = OpARM64LessThanU 31946 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31947 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31948 v1.AddArg(x) 31949 v0.AddArg(v1) 31950 v0.AddArg(y) 31951 v.AddArg(v0) 31952 v2 := b.NewValue0(v.Pos, OpConst64, t) 31953 v2.AuxInt = 0 31954 v.AddArg(v2) 31955 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31956 v3.AuxInt = 64 31957 v3.AddArg(y) 31958 v.AddArg(v3) 31959 return true 31960 } 31961 } 31962 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 31963 b := v.Block 31964 typ := &b.Func.Config.Types 31965 // match: (Rsh8Ux8 <t> x y) 31966 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 31967 for { 31968 t := v.Type 31969 y := v.Args[1] 31970 x := v.Args[0] 31971 v.reset(OpARM64CSEL) 31972 v.Aux = OpARM64LessThanU 31973 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 31974 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31975 v1.AddArg(x) 31976 v0.AddArg(v1) 31977 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31978 v2.AddArg(y) 31979 v0.AddArg(v2) 31980 v.AddArg(v0) 31981 v3 := b.NewValue0(v.Pos, OpConst64, t) 31982 v3.AuxInt = 0 31983 v.AddArg(v3) 31984 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 31985 v4.AuxInt = 64 31986 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 31987 v5.AddArg(y) 31988 v4.AddArg(v5) 31989 v.AddArg(v4) 31990 return true 31991 } 31992 } 31993 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 31994 b := v.Block 31995 typ := &b.Func.Config.Types 31996 // match: (Rsh8x16 x y) 31997 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 31998 for { 31999 y := v.Args[1] 32000 x := v.Args[0] 32001 v.reset(OpARM64SRA) 32002 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 32003 v0.AddArg(x) 32004 v.AddArg(v0) 32005 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 32006 v1.Aux = OpARM64LessThanU 32007 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 32008 v2.AddArg(y) 32009 v1.AddArg(v2) 32010 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 32011 v3.AuxInt = 63 32012 v1.AddArg(v3) 32013 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 32014 v4.AuxInt = 64 32015 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 32016 v5.AddArg(y) 32017 v4.AddArg(v5) 32018 v1.AddArg(v4) 32019 v.AddArg(v1) 32020 return true 32021 } 32022 } 32023 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 32024 b := v.Block 32025 typ := &b.Func.Config.Types 32026 // match: (Rsh8x32 x y) 32027 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 32028 for { 32029 y := v.Args[1] 32030 x := v.Args[0] 32031 v.reset(OpARM64SRA) 32032 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 32033 v0.AddArg(x) 32034 v.AddArg(v0) 32035 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 32036 v1.Aux = OpARM64LessThanU 32037 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 32038 v2.AddArg(y) 32039 v1.AddArg(v2) 32040 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 32041 v3.AuxInt = 63 32042 v1.AddArg(v3) 32043 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 32044 v4.AuxInt = 64 32045 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 32046 v5.AddArg(y) 32047 v4.AddArg(v5) 32048 v1.AddArg(v4) 32049 v.AddArg(v1) 32050 return true 32051 } 32052 } 32053 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 32054 b := v.Block 32055 typ := &b.Func.Config.Types 32056 // match: (Rsh8x64 x y) 32057 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 32058 for { 32059 y := v.Args[1] 32060 x := v.Args[0] 32061 v.reset(OpARM64SRA) 32062 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 32063 v0.AddArg(x) 32064 v.AddArg(v0) 32065 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 32066 v1.Aux = OpARM64LessThanU 32067 v1.AddArg(y) 32068 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 32069 v2.AuxInt = 63 32070 v1.AddArg(v2) 32071 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 32072 v3.AuxInt = 64 32073 v3.AddArg(y) 32074 v1.AddArg(v3) 32075 v.AddArg(v1) 32076 return true 32077 } 32078 } 32079 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 32080 b := v.Block 32081 typ := &b.Func.Config.Types 32082 // match: (Rsh8x8 x y) 32083 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 32084 for { 32085 y := v.Args[1] 32086 x := v.Args[0] 32087 v.reset(OpARM64SRA) 32088 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 32089 v0.AddArg(x) 32090 v.AddArg(v0) 32091 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 32092 v1.Aux = OpARM64LessThanU 32093 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 32094 v2.AddArg(y) 32095 v1.AddArg(v2) 32096 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 32097 v3.AuxInt = 63 32098 v1.AddArg(v3) 32099 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 32100 v4.AuxInt = 64 32101 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 32102 v5.AddArg(y) 32103 v4.AddArg(v5) 32104 v1.AddArg(v4) 32105 v.AddArg(v1) 32106 return true 32107 } 32108 } 32109 func rewriteValueARM64_OpSelect0_0(v *Value) bool { 32110 b := v.Block 32111 typ := &b.Func.Config.Types 32112 // match: (Select0 (Add64carry x y c)) 32113 // result: (Select0 <typ.UInt64> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c)))) 32114 for { 32115 v_0 := v.Args[0] 32116 if v_0.Op != OpAdd64carry { 32117 break 32118 } 32119 c := v_0.Args[2] 32120 x := v_0.Args[0] 32121 y := v_0.Args[1] 32122 v.reset(OpSelect0) 32123 v.Type = typ.UInt64 32124 v0 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32125 v0.AddArg(x) 32126 v0.AddArg(y) 32127 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 32128 v2 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32129 v2.AuxInt = -1 32130 v2.AddArg(c) 32131 v1.AddArg(v2) 32132 v0.AddArg(v1) 32133 v.AddArg(v0) 32134 return true 32135 } 32136 // match: (Select0 (Sub64borrow x y bo)) 32137 // result: (Select0 <typ.UInt64> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo)))) 32138 for { 32139 v_0 := v.Args[0] 32140 if v_0.Op != OpSub64borrow { 32141 break 32142 } 32143 bo := v_0.Args[2] 32144 x := v_0.Args[0] 32145 y := v_0.Args[1] 32146 v.reset(OpSelect0) 32147 v.Type = typ.UInt64 32148 v0 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32149 v0.AddArg(x) 32150 v0.AddArg(y) 32151 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 32152 v2 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32153 v2.AddArg(bo) 32154 v1.AddArg(v2) 32155 v0.AddArg(v1) 32156 v.AddArg(v0) 32157 return true 32158 } 32159 return false 32160 } 32161 func rewriteValueARM64_OpSelect1_0(v *Value) bool { 32162 b := v.Block 32163 typ := &b.Func.Config.Types 32164 // match: (Select1 (Add64carry x y c)) 32165 // result: (ADCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (ADCSflags x y (Select1 <types.TypeFlags> (ADDSconstflags [-1] c))))) 32166 for { 32167 v_0 := v.Args[0] 32168 if v_0.Op != OpAdd64carry { 32169 break 32170 } 32171 c := v_0.Args[2] 32172 x := v_0.Args[0] 32173 y := v_0.Args[1] 32174 v.reset(OpARM64ADCzerocarry) 32175 v.Type = typ.UInt64 32176 v0 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 32177 v1 := b.NewValue0(v.Pos, OpARM64ADCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32178 v1.AddArg(x) 32179 v1.AddArg(y) 32180 v2 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 32181 v3 := b.NewValue0(v.Pos, OpARM64ADDSconstflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32182 v3.AuxInt = -1 32183 v3.AddArg(c) 32184 v2.AddArg(v3) 32185 v1.AddArg(v2) 32186 v0.AddArg(v1) 32187 v.AddArg(v0) 32188 return true 32189 } 32190 // match: (Select1 (Sub64borrow x y bo)) 32191 // result: (NEG <typ.UInt64> (NGCzerocarry <typ.UInt64> (Select1 <types.TypeFlags> (SBCSflags x y (Select1 <types.TypeFlags> (NEGSflags bo)))))) 32192 for { 32193 v_0 := v.Args[0] 32194 if v_0.Op != OpSub64borrow { 32195 break 32196 } 32197 bo := v_0.Args[2] 32198 x := v_0.Args[0] 32199 y := v_0.Args[1] 32200 v.reset(OpARM64NEG) 32201 v.Type = typ.UInt64 32202 v0 := b.NewValue0(v.Pos, OpARM64NGCzerocarry, typ.UInt64) 32203 v1 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 32204 v2 := b.NewValue0(v.Pos, OpARM64SBCSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32205 v2.AddArg(x) 32206 v2.AddArg(y) 32207 v3 := b.NewValue0(v.Pos, OpSelect1, types.TypeFlags) 32208 v4 := b.NewValue0(v.Pos, OpARM64NEGSflags, types.NewTuple(typ.UInt64, types.TypeFlags)) 32209 v4.AddArg(bo) 32210 v3.AddArg(v4) 32211 v2.AddArg(v3) 32212 v1.AddArg(v2) 32213 v0.AddArg(v1) 32214 v.AddArg(v0) 32215 return true 32216 } 32217 return false 32218 } 32219 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 32220 // match: (SignExt16to32 x) 32221 // result: (MOVHreg x) 32222 for { 32223 x := v.Args[0] 32224 v.reset(OpARM64MOVHreg) 32225 v.AddArg(x) 32226 return true 32227 } 32228 } 32229 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 32230 // match: (SignExt16to64 x) 32231 // result: (MOVHreg x) 32232 for { 32233 x := v.Args[0] 32234 v.reset(OpARM64MOVHreg) 32235 v.AddArg(x) 32236 return true 32237 } 32238 } 32239 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 32240 // match: (SignExt32to64 x) 32241 // result: (MOVWreg x) 32242 for { 32243 x := v.Args[0] 32244 v.reset(OpARM64MOVWreg) 32245 v.AddArg(x) 32246 return true 32247 } 32248 } 32249 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 32250 // match: (SignExt8to16 x) 32251 // result: (MOVBreg x) 32252 for { 32253 x := v.Args[0] 32254 v.reset(OpARM64MOVBreg) 32255 v.AddArg(x) 32256 return true 32257 } 32258 } 32259 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 32260 // match: (SignExt8to32 x) 32261 // result: (MOVBreg x) 32262 for { 32263 x := v.Args[0] 32264 v.reset(OpARM64MOVBreg) 32265 v.AddArg(x) 32266 return true 32267 } 32268 } 32269 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 32270 // match: (SignExt8to64 x) 32271 // result: (MOVBreg x) 32272 for { 32273 x := v.Args[0] 32274 v.reset(OpARM64MOVBreg) 32275 v.AddArg(x) 32276 return true 32277 } 32278 } 32279 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 32280 b := v.Block 32281 // match: (Slicemask <t> x) 32282 // result: (SRAconst (NEG <t> x) [63]) 32283 for { 32284 t := v.Type 32285 x := v.Args[0] 32286 v.reset(OpARM64SRAconst) 32287 v.AuxInt = 63 32288 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 32289 v0.AddArg(x) 32290 v.AddArg(v0) 32291 return true 32292 } 32293 } 32294 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 32295 // match: (Sqrt x) 32296 // result: (FSQRTD x) 32297 for { 32298 x := v.Args[0] 32299 v.reset(OpARM64FSQRTD) 32300 v.AddArg(x) 32301 return true 32302 } 32303 } 32304 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 32305 // match: (StaticCall [argwid] {target} mem) 32306 // result: (CALLstatic [argwid] {target} mem) 32307 for { 32308 argwid := v.AuxInt 32309 target := v.Aux 32310 mem := v.Args[0] 32311 v.reset(OpARM64CALLstatic) 32312 v.AuxInt = argwid 32313 v.Aux = target 32314 v.AddArg(mem) 32315 return true 32316 } 32317 } 32318 func rewriteValueARM64_OpStore_0(v *Value) bool { 32319 // match: (Store {t} ptr val mem) 32320 // cond: t.(*types.Type).Size() == 1 32321 // result: (MOVBstore ptr val mem) 32322 for { 32323 t := v.Aux 32324 mem := v.Args[2] 32325 ptr := v.Args[0] 32326 val := v.Args[1] 32327 if !(t.(*types.Type).Size() == 1) { 32328 break 32329 } 32330 v.reset(OpARM64MOVBstore) 32331 v.AddArg(ptr) 32332 v.AddArg(val) 32333 v.AddArg(mem) 32334 return true 32335 } 32336 // match: (Store {t} ptr val mem) 32337 // cond: t.(*types.Type).Size() == 2 32338 // result: (MOVHstore ptr val mem) 32339 for { 32340 t := v.Aux 32341 mem := v.Args[2] 32342 ptr := v.Args[0] 32343 val := v.Args[1] 32344 if !(t.(*types.Type).Size() == 2) { 32345 break 32346 } 32347 v.reset(OpARM64MOVHstore) 32348 v.AddArg(ptr) 32349 v.AddArg(val) 32350 v.AddArg(mem) 32351 return true 32352 } 32353 // match: (Store {t} ptr val mem) 32354 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 32355 // result: (MOVWstore ptr val mem) 32356 for { 32357 t := v.Aux 32358 mem := v.Args[2] 32359 ptr := v.Args[0] 32360 val := v.Args[1] 32361 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 32362 break 32363 } 32364 v.reset(OpARM64MOVWstore) 32365 v.AddArg(ptr) 32366 v.AddArg(val) 32367 v.AddArg(mem) 32368 return true 32369 } 32370 // match: (Store {t} ptr val mem) 32371 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 32372 // result: (MOVDstore ptr val mem) 32373 for { 32374 t := v.Aux 32375 mem := v.Args[2] 32376 ptr := v.Args[0] 32377 val := v.Args[1] 32378 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 32379 break 32380 } 32381 v.reset(OpARM64MOVDstore) 32382 v.AddArg(ptr) 32383 v.AddArg(val) 32384 v.AddArg(mem) 32385 return true 32386 } 32387 // match: (Store {t} ptr val mem) 32388 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 32389 // result: (FMOVSstore ptr val mem) 32390 for { 32391 t := v.Aux 32392 mem := v.Args[2] 32393 ptr := v.Args[0] 32394 val := v.Args[1] 32395 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 32396 break 32397 } 32398 v.reset(OpARM64FMOVSstore) 32399 v.AddArg(ptr) 32400 v.AddArg(val) 32401 v.AddArg(mem) 32402 return true 32403 } 32404 // match: (Store {t} ptr val mem) 32405 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 32406 // result: (FMOVDstore ptr val mem) 32407 for { 32408 t := v.Aux 32409 mem := v.Args[2] 32410 ptr := v.Args[0] 32411 val := v.Args[1] 32412 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 32413 break 32414 } 32415 v.reset(OpARM64FMOVDstore) 32416 v.AddArg(ptr) 32417 v.AddArg(val) 32418 v.AddArg(mem) 32419 return true 32420 } 32421 return false 32422 } 32423 func rewriteValueARM64_OpSub16_0(v *Value) bool { 32424 // match: (Sub16 x y) 32425 // result: (SUB x y) 32426 for { 32427 y := v.Args[1] 32428 x := v.Args[0] 32429 v.reset(OpARM64SUB) 32430 v.AddArg(x) 32431 v.AddArg(y) 32432 return true 32433 } 32434 } 32435 func rewriteValueARM64_OpSub32_0(v *Value) bool { 32436 // match: (Sub32 x y) 32437 // result: (SUB x y) 32438 for { 32439 y := v.Args[1] 32440 x := v.Args[0] 32441 v.reset(OpARM64SUB) 32442 v.AddArg(x) 32443 v.AddArg(y) 32444 return true 32445 } 32446 } 32447 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 32448 // match: (Sub32F x y) 32449 // result: (FSUBS x y) 32450 for { 32451 y := v.Args[1] 32452 x := v.Args[0] 32453 v.reset(OpARM64FSUBS) 32454 v.AddArg(x) 32455 v.AddArg(y) 32456 return true 32457 } 32458 } 32459 func rewriteValueARM64_OpSub64_0(v *Value) bool { 32460 // match: (Sub64 x y) 32461 // result: (SUB x y) 32462 for { 32463 y := v.Args[1] 32464 x := v.Args[0] 32465 v.reset(OpARM64SUB) 32466 v.AddArg(x) 32467 v.AddArg(y) 32468 return true 32469 } 32470 } 32471 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 32472 // match: (Sub64F x y) 32473 // result: (FSUBD x y) 32474 for { 32475 y := v.Args[1] 32476 x := v.Args[0] 32477 v.reset(OpARM64FSUBD) 32478 v.AddArg(x) 32479 v.AddArg(y) 32480 return true 32481 } 32482 } 32483 func rewriteValueARM64_OpSub8_0(v *Value) bool { 32484 // match: (Sub8 x y) 32485 // result: (SUB x y) 32486 for { 32487 y := v.Args[1] 32488 x := v.Args[0] 32489 v.reset(OpARM64SUB) 32490 v.AddArg(x) 32491 v.AddArg(y) 32492 return true 32493 } 32494 } 32495 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 32496 // match: (SubPtr x y) 32497 // result: (SUB x y) 32498 for { 32499 y := v.Args[1] 32500 x := v.Args[0] 32501 v.reset(OpARM64SUB) 32502 v.AddArg(x) 32503 v.AddArg(y) 32504 return true 32505 } 32506 } 32507 func rewriteValueARM64_OpTrunc_0(v *Value) bool { 32508 // match: (Trunc x) 32509 // result: (FRINTZD x) 32510 for { 32511 x := v.Args[0] 32512 v.reset(OpARM64FRINTZD) 32513 v.AddArg(x) 32514 return true 32515 } 32516 } 32517 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 32518 // match: (Trunc16to8 x) 32519 // result: x 32520 for { 32521 x := v.Args[0] 32522 v.reset(OpCopy) 32523 v.Type = x.Type 32524 v.AddArg(x) 32525 return true 32526 } 32527 } 32528 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 32529 // match: (Trunc32to16 x) 32530 // result: x 32531 for { 32532 x := v.Args[0] 32533 v.reset(OpCopy) 32534 v.Type = x.Type 32535 v.AddArg(x) 32536 return true 32537 } 32538 } 32539 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 32540 // match: (Trunc32to8 x) 32541 // result: x 32542 for { 32543 x := v.Args[0] 32544 v.reset(OpCopy) 32545 v.Type = x.Type 32546 v.AddArg(x) 32547 return true 32548 } 32549 } 32550 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 32551 // match: (Trunc64to16 x) 32552 // result: x 32553 for { 32554 x := v.Args[0] 32555 v.reset(OpCopy) 32556 v.Type = x.Type 32557 v.AddArg(x) 32558 return true 32559 } 32560 } 32561 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 32562 // match: (Trunc64to32 x) 32563 // result: x 32564 for { 32565 x := v.Args[0] 32566 v.reset(OpCopy) 32567 v.Type = x.Type 32568 v.AddArg(x) 32569 return true 32570 } 32571 } 32572 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 32573 // match: (Trunc64to8 x) 32574 // result: x 32575 for { 32576 x := v.Args[0] 32577 v.reset(OpCopy) 32578 v.Type = x.Type 32579 v.AddArg(x) 32580 return true 32581 } 32582 } 32583 func rewriteValueARM64_OpWB_0(v *Value) bool { 32584 // match: (WB {fn} destptr srcptr mem) 32585 // result: (LoweredWB {fn} destptr srcptr mem) 32586 for { 32587 fn := v.Aux 32588 mem := v.Args[2] 32589 destptr := v.Args[0] 32590 srcptr := v.Args[1] 32591 v.reset(OpARM64LoweredWB) 32592 v.Aux = fn 32593 v.AddArg(destptr) 32594 v.AddArg(srcptr) 32595 v.AddArg(mem) 32596 return true 32597 } 32598 } 32599 func rewriteValueARM64_OpXor16_0(v *Value) bool { 32600 // match: (Xor16 x y) 32601 // result: (XOR x y) 32602 for { 32603 y := v.Args[1] 32604 x := v.Args[0] 32605 v.reset(OpARM64XOR) 32606 v.AddArg(x) 32607 v.AddArg(y) 32608 return true 32609 } 32610 } 32611 func rewriteValueARM64_OpXor32_0(v *Value) bool { 32612 // match: (Xor32 x y) 32613 // result: (XOR x y) 32614 for { 32615 y := v.Args[1] 32616 x := v.Args[0] 32617 v.reset(OpARM64XOR) 32618 v.AddArg(x) 32619 v.AddArg(y) 32620 return true 32621 } 32622 } 32623 func rewriteValueARM64_OpXor64_0(v *Value) bool { 32624 // match: (Xor64 x y) 32625 // result: (XOR x y) 32626 for { 32627 y := v.Args[1] 32628 x := v.Args[0] 32629 v.reset(OpARM64XOR) 32630 v.AddArg(x) 32631 v.AddArg(y) 32632 return true 32633 } 32634 } 32635 func rewriteValueARM64_OpXor8_0(v *Value) bool { 32636 // match: (Xor8 x y) 32637 // result: (XOR x y) 32638 for { 32639 y := v.Args[1] 32640 x := v.Args[0] 32641 v.reset(OpARM64XOR) 32642 v.AddArg(x) 32643 v.AddArg(y) 32644 return true 32645 } 32646 } 32647 func rewriteValueARM64_OpZero_0(v *Value) bool { 32648 b := v.Block 32649 typ := &b.Func.Config.Types 32650 // match: (Zero [0] _ mem) 32651 // result: mem 32652 for { 32653 if v.AuxInt != 0 { 32654 break 32655 } 32656 mem := v.Args[1] 32657 v.reset(OpCopy) 32658 v.Type = mem.Type 32659 v.AddArg(mem) 32660 return true 32661 } 32662 // match: (Zero [1] ptr mem) 32663 // result: (MOVBstore ptr (MOVDconst [0]) mem) 32664 for { 32665 if v.AuxInt != 1 { 32666 break 32667 } 32668 mem := v.Args[1] 32669 ptr := v.Args[0] 32670 v.reset(OpARM64MOVBstore) 32671 v.AddArg(ptr) 32672 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32673 v0.AuxInt = 0 32674 v.AddArg(v0) 32675 v.AddArg(mem) 32676 return true 32677 } 32678 // match: (Zero [2] ptr mem) 32679 // result: (MOVHstore ptr (MOVDconst [0]) mem) 32680 for { 32681 if v.AuxInt != 2 { 32682 break 32683 } 32684 mem := v.Args[1] 32685 ptr := v.Args[0] 32686 v.reset(OpARM64MOVHstore) 32687 v.AddArg(ptr) 32688 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32689 v0.AuxInt = 0 32690 v.AddArg(v0) 32691 v.AddArg(mem) 32692 return true 32693 } 32694 // match: (Zero [4] ptr mem) 32695 // result: (MOVWstore ptr (MOVDconst [0]) mem) 32696 for { 32697 if v.AuxInt != 4 { 32698 break 32699 } 32700 mem := v.Args[1] 32701 ptr := v.Args[0] 32702 v.reset(OpARM64MOVWstore) 32703 v.AddArg(ptr) 32704 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32705 v0.AuxInt = 0 32706 v.AddArg(v0) 32707 v.AddArg(mem) 32708 return true 32709 } 32710 // match: (Zero [8] ptr mem) 32711 // result: (MOVDstore ptr (MOVDconst [0]) mem) 32712 for { 32713 if v.AuxInt != 8 { 32714 break 32715 } 32716 mem := v.Args[1] 32717 ptr := v.Args[0] 32718 v.reset(OpARM64MOVDstore) 32719 v.AddArg(ptr) 32720 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32721 v0.AuxInt = 0 32722 v.AddArg(v0) 32723 v.AddArg(mem) 32724 return true 32725 } 32726 // match: (Zero [3] ptr mem) 32727 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 32728 for { 32729 if v.AuxInt != 3 { 32730 break 32731 } 32732 mem := v.Args[1] 32733 ptr := v.Args[0] 32734 v.reset(OpARM64MOVBstore) 32735 v.AuxInt = 2 32736 v.AddArg(ptr) 32737 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32738 v0.AuxInt = 0 32739 v.AddArg(v0) 32740 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 32741 v1.AddArg(ptr) 32742 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32743 v2.AuxInt = 0 32744 v1.AddArg(v2) 32745 v1.AddArg(mem) 32746 v.AddArg(v1) 32747 return true 32748 } 32749 // match: (Zero [5] ptr mem) 32750 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 32751 for { 32752 if v.AuxInt != 5 { 32753 break 32754 } 32755 mem := v.Args[1] 32756 ptr := v.Args[0] 32757 v.reset(OpARM64MOVBstore) 32758 v.AuxInt = 4 32759 v.AddArg(ptr) 32760 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32761 v0.AuxInt = 0 32762 v.AddArg(v0) 32763 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 32764 v1.AddArg(ptr) 32765 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32766 v2.AuxInt = 0 32767 v1.AddArg(v2) 32768 v1.AddArg(mem) 32769 v.AddArg(v1) 32770 return true 32771 } 32772 // match: (Zero [6] ptr mem) 32773 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 32774 for { 32775 if v.AuxInt != 6 { 32776 break 32777 } 32778 mem := v.Args[1] 32779 ptr := v.Args[0] 32780 v.reset(OpARM64MOVHstore) 32781 v.AuxInt = 4 32782 v.AddArg(ptr) 32783 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32784 v0.AuxInt = 0 32785 v.AddArg(v0) 32786 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 32787 v1.AddArg(ptr) 32788 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32789 v2.AuxInt = 0 32790 v1.AddArg(v2) 32791 v1.AddArg(mem) 32792 v.AddArg(v1) 32793 return true 32794 } 32795 // match: (Zero [7] ptr mem) 32796 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 32797 for { 32798 if v.AuxInt != 7 { 32799 break 32800 } 32801 mem := v.Args[1] 32802 ptr := v.Args[0] 32803 v.reset(OpARM64MOVBstore) 32804 v.AuxInt = 6 32805 v.AddArg(ptr) 32806 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32807 v0.AuxInt = 0 32808 v.AddArg(v0) 32809 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 32810 v1.AuxInt = 4 32811 v1.AddArg(ptr) 32812 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32813 v2.AuxInt = 0 32814 v1.AddArg(v2) 32815 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 32816 v3.AddArg(ptr) 32817 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32818 v4.AuxInt = 0 32819 v3.AddArg(v4) 32820 v3.AddArg(mem) 32821 v1.AddArg(v3) 32822 v.AddArg(v1) 32823 return true 32824 } 32825 // match: (Zero [9] ptr mem) 32826 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 32827 for { 32828 if v.AuxInt != 9 { 32829 break 32830 } 32831 mem := v.Args[1] 32832 ptr := v.Args[0] 32833 v.reset(OpARM64MOVBstore) 32834 v.AuxInt = 8 32835 v.AddArg(ptr) 32836 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32837 v0.AuxInt = 0 32838 v.AddArg(v0) 32839 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 32840 v1.AddArg(ptr) 32841 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32842 v2.AuxInt = 0 32843 v1.AddArg(v2) 32844 v1.AddArg(mem) 32845 v.AddArg(v1) 32846 return true 32847 } 32848 return false 32849 } 32850 func rewriteValueARM64_OpZero_10(v *Value) bool { 32851 b := v.Block 32852 typ := &b.Func.Config.Types 32853 // match: (Zero [10] ptr mem) 32854 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 32855 for { 32856 if v.AuxInt != 10 { 32857 break 32858 } 32859 mem := v.Args[1] 32860 ptr := v.Args[0] 32861 v.reset(OpARM64MOVHstore) 32862 v.AuxInt = 8 32863 v.AddArg(ptr) 32864 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32865 v0.AuxInt = 0 32866 v.AddArg(v0) 32867 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 32868 v1.AddArg(ptr) 32869 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32870 v2.AuxInt = 0 32871 v1.AddArg(v2) 32872 v1.AddArg(mem) 32873 v.AddArg(v1) 32874 return true 32875 } 32876 // match: (Zero [11] ptr mem) 32877 // result: (MOVBstore [10] ptr (MOVDconst [0]) (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 32878 for { 32879 if v.AuxInt != 11 { 32880 break 32881 } 32882 mem := v.Args[1] 32883 ptr := v.Args[0] 32884 v.reset(OpARM64MOVBstore) 32885 v.AuxInt = 10 32886 v.AddArg(ptr) 32887 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32888 v0.AuxInt = 0 32889 v.AddArg(v0) 32890 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 32891 v1.AuxInt = 8 32892 v1.AddArg(ptr) 32893 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32894 v2.AuxInt = 0 32895 v1.AddArg(v2) 32896 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 32897 v3.AddArg(ptr) 32898 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32899 v4.AuxInt = 0 32900 v3.AddArg(v4) 32901 v3.AddArg(mem) 32902 v1.AddArg(v3) 32903 v.AddArg(v1) 32904 return true 32905 } 32906 // match: (Zero [12] ptr mem) 32907 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 32908 for { 32909 if v.AuxInt != 12 { 32910 break 32911 } 32912 mem := v.Args[1] 32913 ptr := v.Args[0] 32914 v.reset(OpARM64MOVWstore) 32915 v.AuxInt = 8 32916 v.AddArg(ptr) 32917 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32918 v0.AuxInt = 0 32919 v.AddArg(v0) 32920 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 32921 v1.AddArg(ptr) 32922 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32923 v2.AuxInt = 0 32924 v1.AddArg(v2) 32925 v1.AddArg(mem) 32926 v.AddArg(v1) 32927 return true 32928 } 32929 // match: (Zero [13] ptr mem) 32930 // result: (MOVBstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 32931 for { 32932 if v.AuxInt != 13 { 32933 break 32934 } 32935 mem := v.Args[1] 32936 ptr := v.Args[0] 32937 v.reset(OpARM64MOVBstore) 32938 v.AuxInt = 12 32939 v.AddArg(ptr) 32940 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32941 v0.AuxInt = 0 32942 v.AddArg(v0) 32943 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 32944 v1.AuxInt = 8 32945 v1.AddArg(ptr) 32946 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32947 v2.AuxInt = 0 32948 v1.AddArg(v2) 32949 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 32950 v3.AddArg(ptr) 32951 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32952 v4.AuxInt = 0 32953 v3.AddArg(v4) 32954 v3.AddArg(mem) 32955 v1.AddArg(v3) 32956 v.AddArg(v1) 32957 return true 32958 } 32959 // match: (Zero [14] ptr mem) 32960 // result: (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 32961 for { 32962 if v.AuxInt != 14 { 32963 break 32964 } 32965 mem := v.Args[1] 32966 ptr := v.Args[0] 32967 v.reset(OpARM64MOVHstore) 32968 v.AuxInt = 12 32969 v.AddArg(ptr) 32970 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32971 v0.AuxInt = 0 32972 v.AddArg(v0) 32973 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 32974 v1.AuxInt = 8 32975 v1.AddArg(ptr) 32976 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32977 v2.AuxInt = 0 32978 v1.AddArg(v2) 32979 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 32980 v3.AddArg(ptr) 32981 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32982 v4.AuxInt = 0 32983 v3.AddArg(v4) 32984 v3.AddArg(mem) 32985 v1.AddArg(v3) 32986 v.AddArg(v1) 32987 return true 32988 } 32989 // match: (Zero [15] ptr mem) 32990 // result: (MOVBstore [14] ptr (MOVDconst [0]) (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))) 32991 for { 32992 if v.AuxInt != 15 { 32993 break 32994 } 32995 mem := v.Args[1] 32996 ptr := v.Args[0] 32997 v.reset(OpARM64MOVBstore) 32998 v.AuxInt = 14 32999 v.AddArg(ptr) 33000 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33001 v0.AuxInt = 0 33002 v.AddArg(v0) 33003 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 33004 v1.AuxInt = 12 33005 v1.AddArg(ptr) 33006 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33007 v2.AuxInt = 0 33008 v1.AddArg(v2) 33009 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 33010 v3.AuxInt = 8 33011 v3.AddArg(ptr) 33012 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33013 v4.AuxInt = 0 33014 v3.AddArg(v4) 33015 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 33016 v5.AddArg(ptr) 33017 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33018 v6.AuxInt = 0 33019 v5.AddArg(v6) 33020 v5.AddArg(mem) 33021 v3.AddArg(v5) 33022 v1.AddArg(v3) 33023 v.AddArg(v1) 33024 return true 33025 } 33026 // match: (Zero [16] ptr mem) 33027 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 33028 for { 33029 if v.AuxInt != 16 { 33030 break 33031 } 33032 mem := v.Args[1] 33033 ptr := v.Args[0] 33034 v.reset(OpARM64STP) 33035 v.AuxInt = 0 33036 v.AddArg(ptr) 33037 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33038 v0.AuxInt = 0 33039 v.AddArg(v0) 33040 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33041 v1.AuxInt = 0 33042 v.AddArg(v1) 33043 v.AddArg(mem) 33044 return true 33045 } 33046 // match: (Zero [32] ptr mem) 33047 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 33048 for { 33049 if v.AuxInt != 32 { 33050 break 33051 } 33052 mem := v.Args[1] 33053 ptr := v.Args[0] 33054 v.reset(OpARM64STP) 33055 v.AuxInt = 16 33056 v.AddArg(ptr) 33057 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33058 v0.AuxInt = 0 33059 v.AddArg(v0) 33060 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33061 v1.AuxInt = 0 33062 v.AddArg(v1) 33063 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 33064 v2.AuxInt = 0 33065 v2.AddArg(ptr) 33066 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33067 v3.AuxInt = 0 33068 v2.AddArg(v3) 33069 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33070 v4.AuxInt = 0 33071 v2.AddArg(v4) 33072 v2.AddArg(mem) 33073 v.AddArg(v2) 33074 return true 33075 } 33076 // match: (Zero [48] ptr mem) 33077 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 33078 for { 33079 if v.AuxInt != 48 { 33080 break 33081 } 33082 mem := v.Args[1] 33083 ptr := v.Args[0] 33084 v.reset(OpARM64STP) 33085 v.AuxInt = 32 33086 v.AddArg(ptr) 33087 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33088 v0.AuxInt = 0 33089 v.AddArg(v0) 33090 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33091 v1.AuxInt = 0 33092 v.AddArg(v1) 33093 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 33094 v2.AuxInt = 16 33095 v2.AddArg(ptr) 33096 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33097 v3.AuxInt = 0 33098 v2.AddArg(v3) 33099 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33100 v4.AuxInt = 0 33101 v2.AddArg(v4) 33102 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 33103 v5.AuxInt = 0 33104 v5.AddArg(ptr) 33105 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33106 v6.AuxInt = 0 33107 v5.AddArg(v6) 33108 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33109 v7.AuxInt = 0 33110 v5.AddArg(v7) 33111 v5.AddArg(mem) 33112 v2.AddArg(v5) 33113 v.AddArg(v2) 33114 return true 33115 } 33116 // match: (Zero [64] ptr mem) 33117 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 33118 for { 33119 if v.AuxInt != 64 { 33120 break 33121 } 33122 mem := v.Args[1] 33123 ptr := v.Args[0] 33124 v.reset(OpARM64STP) 33125 v.AuxInt = 48 33126 v.AddArg(ptr) 33127 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33128 v0.AuxInt = 0 33129 v.AddArg(v0) 33130 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33131 v1.AuxInt = 0 33132 v.AddArg(v1) 33133 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 33134 v2.AuxInt = 32 33135 v2.AddArg(ptr) 33136 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33137 v3.AuxInt = 0 33138 v2.AddArg(v3) 33139 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33140 v4.AuxInt = 0 33141 v2.AddArg(v4) 33142 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 33143 v5.AuxInt = 16 33144 v5.AddArg(ptr) 33145 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33146 v6.AuxInt = 0 33147 v5.AddArg(v6) 33148 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33149 v7.AuxInt = 0 33150 v5.AddArg(v7) 33151 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 33152 v8.AuxInt = 0 33153 v8.AddArg(ptr) 33154 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33155 v9.AuxInt = 0 33156 v8.AddArg(v9) 33157 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33158 v10.AuxInt = 0 33159 v8.AddArg(v10) 33160 v8.AddArg(mem) 33161 v5.AddArg(v8) 33162 v2.AddArg(v5) 33163 v.AddArg(v2) 33164 return true 33165 } 33166 return false 33167 } 33168 func rewriteValueARM64_OpZero_20(v *Value) bool { 33169 b := v.Block 33170 config := b.Func.Config 33171 // match: (Zero [s] ptr mem) 33172 // cond: s%16 != 0 && s%16 <= 8 && s > 16 33173 // result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem)) 33174 for { 33175 s := v.AuxInt 33176 mem := v.Args[1] 33177 ptr := v.Args[0] 33178 if !(s%16 != 0 && s%16 <= 8 && s > 16) { 33179 break 33180 } 33181 v.reset(OpZero) 33182 v.AuxInt = 8 33183 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 33184 v0.AuxInt = s - 8 33185 v0.AddArg(ptr) 33186 v.AddArg(v0) 33187 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 33188 v1.AuxInt = s - s%16 33189 v1.AddArg(ptr) 33190 v1.AddArg(mem) 33191 v.AddArg(v1) 33192 return true 33193 } 33194 // match: (Zero [s] ptr mem) 33195 // cond: s%16 != 0 && s%16 > 8 && s > 16 33196 // result: (Zero [16] (OffPtr <ptr.Type> ptr [s-16]) (Zero [s-s%16] ptr mem)) 33197 for { 33198 s := v.AuxInt 33199 mem := v.Args[1] 33200 ptr := v.Args[0] 33201 if !(s%16 != 0 && s%16 > 8 && s > 16) { 33202 break 33203 } 33204 v.reset(OpZero) 33205 v.AuxInt = 16 33206 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 33207 v0.AuxInt = s - 16 33208 v0.AddArg(ptr) 33209 v.AddArg(v0) 33210 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 33211 v1.AuxInt = s - s%16 33212 v1.AddArg(ptr) 33213 v1.AddArg(mem) 33214 v.AddArg(v1) 33215 return true 33216 } 33217 // match: (Zero [s] ptr mem) 33218 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 33219 // result: (DUFFZERO [4 * (64 - s/16)] ptr mem) 33220 for { 33221 s := v.AuxInt 33222 mem := v.Args[1] 33223 ptr := v.Args[0] 33224 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 33225 break 33226 } 33227 v.reset(OpARM64DUFFZERO) 33228 v.AuxInt = 4 * (64 - s/16) 33229 v.AddArg(ptr) 33230 v.AddArg(mem) 33231 return true 33232 } 33233 // match: (Zero [s] ptr mem) 33234 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 33235 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 33236 for { 33237 s := v.AuxInt 33238 mem := v.Args[1] 33239 ptr := v.Args[0] 33240 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 33241 break 33242 } 33243 v.reset(OpARM64LoweredZero) 33244 v.AddArg(ptr) 33245 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 33246 v0.AuxInt = s - 16 33247 v0.AddArg(ptr) 33248 v.AddArg(v0) 33249 v.AddArg(mem) 33250 return true 33251 } 33252 return false 33253 } 33254 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 33255 // match: (ZeroExt16to32 x) 33256 // result: (MOVHUreg x) 33257 for { 33258 x := v.Args[0] 33259 v.reset(OpARM64MOVHUreg) 33260 v.AddArg(x) 33261 return true 33262 } 33263 } 33264 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 33265 // match: (ZeroExt16to64 x) 33266 // result: (MOVHUreg x) 33267 for { 33268 x := v.Args[0] 33269 v.reset(OpARM64MOVHUreg) 33270 v.AddArg(x) 33271 return true 33272 } 33273 } 33274 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 33275 // match: (ZeroExt32to64 x) 33276 // result: (MOVWUreg x) 33277 for { 33278 x := v.Args[0] 33279 v.reset(OpARM64MOVWUreg) 33280 v.AddArg(x) 33281 return true 33282 } 33283 } 33284 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 33285 // match: (ZeroExt8to16 x) 33286 // result: (MOVBUreg x) 33287 for { 33288 x := v.Args[0] 33289 v.reset(OpARM64MOVBUreg) 33290 v.AddArg(x) 33291 return true 33292 } 33293 } 33294 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 33295 // match: (ZeroExt8to32 x) 33296 // result: (MOVBUreg x) 33297 for { 33298 x := v.Args[0] 33299 v.reset(OpARM64MOVBUreg) 33300 v.AddArg(x) 33301 return true 33302 } 33303 } 33304 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 33305 // match: (ZeroExt8to64 x) 33306 // result: (MOVBUreg x) 33307 for { 33308 x := v.Args[0] 33309 v.reset(OpARM64MOVBUreg) 33310 v.AddArg(x) 33311 return true 33312 } 33313 } 33314 func rewriteBlockARM64(b *Block) bool { 33315 switch b.Kind { 33316 case BlockARM64EQ: 33317 // match: (EQ (CMPWconst [0] x:(ANDconst [c] y)) yes no) 33318 // cond: x.Uses == 1 33319 // result: (EQ (TSTWconst [c] y) yes no) 33320 for b.Controls[0].Op == OpARM64CMPWconst { 33321 v_0 := b.Controls[0] 33322 if v_0.AuxInt != 0 { 33323 break 33324 } 33325 x := v_0.Args[0] 33326 if x.Op != OpARM64ANDconst { 33327 break 33328 } 33329 c := x.AuxInt 33330 y := x.Args[0] 33331 if !(x.Uses == 1) { 33332 break 33333 } 33334 b.Reset(BlockARM64EQ) 33335 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 33336 v0.AuxInt = c 33337 v0.AddArg(y) 33338 b.AddControl(v0) 33339 return true 33340 } 33341 // match: (EQ (CMPconst [0] z:(AND x y)) yes no) 33342 // cond: z.Uses == 1 33343 // result: (EQ (TST x y) yes no) 33344 for b.Controls[0].Op == OpARM64CMPconst { 33345 v_0 := b.Controls[0] 33346 if v_0.AuxInt != 0 { 33347 break 33348 } 33349 z := v_0.Args[0] 33350 if z.Op != OpARM64AND { 33351 break 33352 } 33353 y := z.Args[1] 33354 x := z.Args[0] 33355 if !(z.Uses == 1) { 33356 break 33357 } 33358 b.Reset(BlockARM64EQ) 33359 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 33360 v0.AddArg(x) 33361 v0.AddArg(y) 33362 b.AddControl(v0) 33363 return true 33364 } 33365 // match: (EQ (CMPWconst [0] z:(AND x y)) yes no) 33366 // cond: z.Uses == 1 33367 // result: (EQ (TSTW x y) yes no) 33368 for b.Controls[0].Op == OpARM64CMPWconst { 33369 v_0 := b.Controls[0] 33370 if v_0.AuxInt != 0 { 33371 break 33372 } 33373 z := v_0.Args[0] 33374 if z.Op != OpARM64AND { 33375 break 33376 } 33377 y := z.Args[1] 33378 x := z.Args[0] 33379 if !(z.Uses == 1) { 33380 break 33381 } 33382 b.Reset(BlockARM64EQ) 33383 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 33384 v0.AddArg(x) 33385 v0.AddArg(y) 33386 b.AddControl(v0) 33387 return true 33388 } 33389 // match: (EQ (CMPconst [0] x:(ANDconst [c] y)) yes no) 33390 // cond: x.Uses == 1 33391 // result: (EQ (TSTconst [c] y) yes no) 33392 for b.Controls[0].Op == OpARM64CMPconst { 33393 v_0 := b.Controls[0] 33394 if v_0.AuxInt != 0 { 33395 break 33396 } 33397 x := v_0.Args[0] 33398 if x.Op != OpARM64ANDconst { 33399 break 33400 } 33401 c := x.AuxInt 33402 y := x.Args[0] 33403 if !(x.Uses == 1) { 33404 break 33405 } 33406 b.Reset(BlockARM64EQ) 33407 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 33408 v0.AuxInt = c 33409 v0.AddArg(y) 33410 b.AddControl(v0) 33411 return true 33412 } 33413 // match: (EQ (CMPconst [0] x:(ADDconst [c] y)) yes no) 33414 // cond: x.Uses == 1 33415 // result: (EQ (CMNconst [c] y) yes no) 33416 for b.Controls[0].Op == OpARM64CMPconst { 33417 v_0 := b.Controls[0] 33418 if v_0.AuxInt != 0 { 33419 break 33420 } 33421 x := v_0.Args[0] 33422 if x.Op != OpARM64ADDconst { 33423 break 33424 } 33425 c := x.AuxInt 33426 y := x.Args[0] 33427 if !(x.Uses == 1) { 33428 break 33429 } 33430 b.Reset(BlockARM64EQ) 33431 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 33432 v0.AuxInt = c 33433 v0.AddArg(y) 33434 b.AddControl(v0) 33435 return true 33436 } 33437 // match: (EQ (CMPWconst [0] x:(ADDconst [c] y)) yes no) 33438 // cond: x.Uses == 1 33439 // result: (EQ (CMNWconst [c] y) yes no) 33440 for b.Controls[0].Op == OpARM64CMPWconst { 33441 v_0 := b.Controls[0] 33442 if v_0.AuxInt != 0 { 33443 break 33444 } 33445 x := v_0.Args[0] 33446 if x.Op != OpARM64ADDconst { 33447 break 33448 } 33449 c := x.AuxInt 33450 y := x.Args[0] 33451 if !(x.Uses == 1) { 33452 break 33453 } 33454 b.Reset(BlockARM64EQ) 33455 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 33456 v0.AuxInt = c 33457 v0.AddArg(y) 33458 b.AddControl(v0) 33459 return true 33460 } 33461 // match: (EQ (CMPconst [0] z:(ADD x y)) yes no) 33462 // cond: z.Uses == 1 33463 // result: (EQ (CMN x y) yes no) 33464 for b.Controls[0].Op == OpARM64CMPconst { 33465 v_0 := b.Controls[0] 33466 if v_0.AuxInt != 0 { 33467 break 33468 } 33469 z := v_0.Args[0] 33470 if z.Op != OpARM64ADD { 33471 break 33472 } 33473 y := z.Args[1] 33474 x := z.Args[0] 33475 if !(z.Uses == 1) { 33476 break 33477 } 33478 b.Reset(BlockARM64EQ) 33479 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 33480 v0.AddArg(x) 33481 v0.AddArg(y) 33482 b.AddControl(v0) 33483 return true 33484 } 33485 // match: (EQ (CMPWconst [0] z:(ADD x y)) yes no) 33486 // cond: z.Uses == 1 33487 // result: (EQ (CMNW x y) yes no) 33488 for b.Controls[0].Op == OpARM64CMPWconst { 33489 v_0 := b.Controls[0] 33490 if v_0.AuxInt != 0 { 33491 break 33492 } 33493 z := v_0.Args[0] 33494 if z.Op != OpARM64ADD { 33495 break 33496 } 33497 y := z.Args[1] 33498 x := z.Args[0] 33499 if !(z.Uses == 1) { 33500 break 33501 } 33502 b.Reset(BlockARM64EQ) 33503 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 33504 v0.AddArg(x) 33505 v0.AddArg(y) 33506 b.AddControl(v0) 33507 return true 33508 } 33509 // match: (EQ (CMP x z:(NEG y)) yes no) 33510 // cond: z.Uses == 1 33511 // result: (EQ (CMN x y) yes no) 33512 for b.Controls[0].Op == OpARM64CMP { 33513 v_0 := b.Controls[0] 33514 _ = v_0.Args[1] 33515 x := v_0.Args[0] 33516 z := v_0.Args[1] 33517 if z.Op != OpARM64NEG { 33518 break 33519 } 33520 y := z.Args[0] 33521 if !(z.Uses == 1) { 33522 break 33523 } 33524 b.Reset(BlockARM64EQ) 33525 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 33526 v0.AddArg(x) 33527 v0.AddArg(y) 33528 b.AddControl(v0) 33529 return true 33530 } 33531 // match: (EQ (CMPW x z:(NEG y)) yes no) 33532 // cond: z.Uses == 1 33533 // result: (EQ (CMNW x y) yes no) 33534 for b.Controls[0].Op == OpARM64CMPW { 33535 v_0 := b.Controls[0] 33536 _ = v_0.Args[1] 33537 x := v_0.Args[0] 33538 z := v_0.Args[1] 33539 if z.Op != OpARM64NEG { 33540 break 33541 } 33542 y := z.Args[0] 33543 if !(z.Uses == 1) { 33544 break 33545 } 33546 b.Reset(BlockARM64EQ) 33547 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 33548 v0.AddArg(x) 33549 v0.AddArg(y) 33550 b.AddControl(v0) 33551 return true 33552 } 33553 // match: (EQ (CMPconst [0] x) yes no) 33554 // result: (Z x yes no) 33555 for b.Controls[0].Op == OpARM64CMPconst { 33556 v_0 := b.Controls[0] 33557 if v_0.AuxInt != 0 { 33558 break 33559 } 33560 x := v_0.Args[0] 33561 b.Reset(BlockARM64Z) 33562 b.AddControl(x) 33563 return true 33564 } 33565 // match: (EQ (CMPWconst [0] x) yes no) 33566 // result: (ZW x yes no) 33567 for b.Controls[0].Op == OpARM64CMPWconst { 33568 v_0 := b.Controls[0] 33569 if v_0.AuxInt != 0 { 33570 break 33571 } 33572 x := v_0.Args[0] 33573 b.Reset(BlockARM64ZW) 33574 b.AddControl(x) 33575 return true 33576 } 33577 // match: (EQ (CMPconst [0] z:(MADD a x y)) yes no) 33578 // cond: z.Uses==1 33579 // result: (EQ (CMN a (MUL <x.Type> x y)) yes no) 33580 for b.Controls[0].Op == OpARM64CMPconst { 33581 v_0 := b.Controls[0] 33582 if v_0.AuxInt != 0 { 33583 break 33584 } 33585 z := v_0.Args[0] 33586 if z.Op != OpARM64MADD { 33587 break 33588 } 33589 y := z.Args[2] 33590 a := z.Args[0] 33591 x := z.Args[1] 33592 if !(z.Uses == 1) { 33593 break 33594 } 33595 b.Reset(BlockARM64EQ) 33596 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 33597 v0.AddArg(a) 33598 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 33599 v1.AddArg(x) 33600 v1.AddArg(y) 33601 v0.AddArg(v1) 33602 b.AddControl(v0) 33603 return true 33604 } 33605 // match: (EQ (CMPconst [0] z:(MSUB a x y)) yes no) 33606 // cond: z.Uses==1 33607 // result: (EQ (CMP a (MUL <x.Type> x y)) yes no) 33608 for b.Controls[0].Op == OpARM64CMPconst { 33609 v_0 := b.Controls[0] 33610 if v_0.AuxInt != 0 { 33611 break 33612 } 33613 z := v_0.Args[0] 33614 if z.Op != OpARM64MSUB { 33615 break 33616 } 33617 y := z.Args[2] 33618 a := z.Args[0] 33619 x := z.Args[1] 33620 if !(z.Uses == 1) { 33621 break 33622 } 33623 b.Reset(BlockARM64EQ) 33624 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 33625 v0.AddArg(a) 33626 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 33627 v1.AddArg(x) 33628 v1.AddArg(y) 33629 v0.AddArg(v1) 33630 b.AddControl(v0) 33631 return true 33632 } 33633 // match: (EQ (CMPWconst [0] z:(MADDW a x y)) yes no) 33634 // cond: z.Uses==1 33635 // result: (EQ (CMNW a (MULW <x.Type> x y)) yes no) 33636 for b.Controls[0].Op == OpARM64CMPWconst { 33637 v_0 := b.Controls[0] 33638 if v_0.AuxInt != 0 { 33639 break 33640 } 33641 z := v_0.Args[0] 33642 if z.Op != OpARM64MADDW { 33643 break 33644 } 33645 y := z.Args[2] 33646 a := z.Args[0] 33647 x := z.Args[1] 33648 if !(z.Uses == 1) { 33649 break 33650 } 33651 b.Reset(BlockARM64EQ) 33652 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 33653 v0.AddArg(a) 33654 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 33655 v1.AddArg(x) 33656 v1.AddArg(y) 33657 v0.AddArg(v1) 33658 b.AddControl(v0) 33659 return true 33660 } 33661 // match: (EQ (CMPWconst [0] z:(MSUBW a x y)) yes no) 33662 // cond: z.Uses==1 33663 // result: (EQ (CMPW a (MULW <x.Type> x y)) yes no) 33664 for b.Controls[0].Op == OpARM64CMPWconst { 33665 v_0 := b.Controls[0] 33666 if v_0.AuxInt != 0 { 33667 break 33668 } 33669 z := v_0.Args[0] 33670 if z.Op != OpARM64MSUBW { 33671 break 33672 } 33673 y := z.Args[2] 33674 a := z.Args[0] 33675 x := z.Args[1] 33676 if !(z.Uses == 1) { 33677 break 33678 } 33679 b.Reset(BlockARM64EQ) 33680 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 33681 v0.AddArg(a) 33682 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 33683 v1.AddArg(x) 33684 v1.AddArg(y) 33685 v0.AddArg(v1) 33686 b.AddControl(v0) 33687 return true 33688 } 33689 // match: (EQ (TSTconst [c] x) yes no) 33690 // cond: oneBit(c) 33691 // result: (TBZ {ntz(c)} x yes no) 33692 for b.Controls[0].Op == OpARM64TSTconst { 33693 v_0 := b.Controls[0] 33694 c := v_0.AuxInt 33695 x := v_0.Args[0] 33696 if !(oneBit(c)) { 33697 break 33698 } 33699 b.Reset(BlockARM64TBZ) 33700 b.AddControl(x) 33701 b.Aux = ntz(c) 33702 return true 33703 } 33704 // match: (EQ (TSTWconst [c] x) yes no) 33705 // cond: oneBit(int64(uint32(c))) 33706 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 33707 for b.Controls[0].Op == OpARM64TSTWconst { 33708 v_0 := b.Controls[0] 33709 c := v_0.AuxInt 33710 x := v_0.Args[0] 33711 if !(oneBit(int64(uint32(c)))) { 33712 break 33713 } 33714 b.Reset(BlockARM64TBZ) 33715 b.AddControl(x) 33716 b.Aux = ntz(int64(uint32(c))) 33717 return true 33718 } 33719 // match: (EQ (FlagEQ) yes no) 33720 // result: (First yes no) 33721 for b.Controls[0].Op == OpARM64FlagEQ { 33722 b.Reset(BlockFirst) 33723 return true 33724 } 33725 // match: (EQ (FlagLT_ULT) yes no) 33726 // result: (First no yes) 33727 for b.Controls[0].Op == OpARM64FlagLT_ULT { 33728 b.Reset(BlockFirst) 33729 b.swapSuccessors() 33730 return true 33731 } 33732 // match: (EQ (FlagLT_UGT) yes no) 33733 // result: (First no yes) 33734 for b.Controls[0].Op == OpARM64FlagLT_UGT { 33735 b.Reset(BlockFirst) 33736 b.swapSuccessors() 33737 return true 33738 } 33739 // match: (EQ (FlagGT_ULT) yes no) 33740 // result: (First no yes) 33741 for b.Controls[0].Op == OpARM64FlagGT_ULT { 33742 b.Reset(BlockFirst) 33743 b.swapSuccessors() 33744 return true 33745 } 33746 // match: (EQ (FlagGT_UGT) yes no) 33747 // result: (First no yes) 33748 for b.Controls[0].Op == OpARM64FlagGT_UGT { 33749 b.Reset(BlockFirst) 33750 b.swapSuccessors() 33751 return true 33752 } 33753 // match: (EQ (InvertFlags cmp) yes no) 33754 // result: (EQ cmp yes no) 33755 for b.Controls[0].Op == OpARM64InvertFlags { 33756 v_0 := b.Controls[0] 33757 cmp := v_0.Args[0] 33758 b.Reset(BlockARM64EQ) 33759 b.AddControl(cmp) 33760 return true 33761 } 33762 case BlockARM64FGE: 33763 // match: (FGE (InvertFlags cmp) yes no) 33764 // result: (FLE cmp yes no) 33765 for b.Controls[0].Op == OpARM64InvertFlags { 33766 v_0 := b.Controls[0] 33767 cmp := v_0.Args[0] 33768 b.Reset(BlockARM64FLE) 33769 b.AddControl(cmp) 33770 return true 33771 } 33772 case BlockARM64FGT: 33773 // match: (FGT (InvertFlags cmp) yes no) 33774 // result: (FLT cmp yes no) 33775 for b.Controls[0].Op == OpARM64InvertFlags { 33776 v_0 := b.Controls[0] 33777 cmp := v_0.Args[0] 33778 b.Reset(BlockARM64FLT) 33779 b.AddControl(cmp) 33780 return true 33781 } 33782 case BlockARM64FLE: 33783 // match: (FLE (InvertFlags cmp) yes no) 33784 // result: (FGE cmp yes no) 33785 for b.Controls[0].Op == OpARM64InvertFlags { 33786 v_0 := b.Controls[0] 33787 cmp := v_0.Args[0] 33788 b.Reset(BlockARM64FGE) 33789 b.AddControl(cmp) 33790 return true 33791 } 33792 case BlockARM64FLT: 33793 // match: (FLT (InvertFlags cmp) yes no) 33794 // result: (FGT cmp yes no) 33795 for b.Controls[0].Op == OpARM64InvertFlags { 33796 v_0 := b.Controls[0] 33797 cmp := v_0.Args[0] 33798 b.Reset(BlockARM64FGT) 33799 b.AddControl(cmp) 33800 return true 33801 } 33802 case BlockARM64GE: 33803 // match: (GE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 33804 // cond: x.Uses == 1 33805 // result: (GE (TSTWconst [c] y) yes no) 33806 for b.Controls[0].Op == OpARM64CMPWconst { 33807 v_0 := b.Controls[0] 33808 if v_0.AuxInt != 0 { 33809 break 33810 } 33811 x := v_0.Args[0] 33812 if x.Op != OpARM64ANDconst { 33813 break 33814 } 33815 c := x.AuxInt 33816 y := x.Args[0] 33817 if !(x.Uses == 1) { 33818 break 33819 } 33820 b.Reset(BlockARM64GE) 33821 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 33822 v0.AuxInt = c 33823 v0.AddArg(y) 33824 b.AddControl(v0) 33825 return true 33826 } 33827 // match: (GE (CMPconst [0] z:(AND x y)) yes no) 33828 // cond: z.Uses == 1 33829 // result: (GE (TST x y) yes no) 33830 for b.Controls[0].Op == OpARM64CMPconst { 33831 v_0 := b.Controls[0] 33832 if v_0.AuxInt != 0 { 33833 break 33834 } 33835 z := v_0.Args[0] 33836 if z.Op != OpARM64AND { 33837 break 33838 } 33839 y := z.Args[1] 33840 x := z.Args[0] 33841 if !(z.Uses == 1) { 33842 break 33843 } 33844 b.Reset(BlockARM64GE) 33845 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 33846 v0.AddArg(x) 33847 v0.AddArg(y) 33848 b.AddControl(v0) 33849 return true 33850 } 33851 // match: (GE (CMPWconst [0] z:(AND x y)) yes no) 33852 // cond: z.Uses == 1 33853 // result: (GE (TSTW x y) yes no) 33854 for b.Controls[0].Op == OpARM64CMPWconst { 33855 v_0 := b.Controls[0] 33856 if v_0.AuxInt != 0 { 33857 break 33858 } 33859 z := v_0.Args[0] 33860 if z.Op != OpARM64AND { 33861 break 33862 } 33863 y := z.Args[1] 33864 x := z.Args[0] 33865 if !(z.Uses == 1) { 33866 break 33867 } 33868 b.Reset(BlockARM64GE) 33869 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 33870 v0.AddArg(x) 33871 v0.AddArg(y) 33872 b.AddControl(v0) 33873 return true 33874 } 33875 // match: (GE (CMPconst [0] x:(ANDconst [c] y)) yes no) 33876 // cond: x.Uses == 1 33877 // result: (GE (TSTconst [c] y) yes no) 33878 for b.Controls[0].Op == OpARM64CMPconst { 33879 v_0 := b.Controls[0] 33880 if v_0.AuxInt != 0 { 33881 break 33882 } 33883 x := v_0.Args[0] 33884 if x.Op != OpARM64ANDconst { 33885 break 33886 } 33887 c := x.AuxInt 33888 y := x.Args[0] 33889 if !(x.Uses == 1) { 33890 break 33891 } 33892 b.Reset(BlockARM64GE) 33893 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 33894 v0.AuxInt = c 33895 v0.AddArg(y) 33896 b.AddControl(v0) 33897 return true 33898 } 33899 // match: (GE (CMPconst [0] x:(ADDconst [c] y)) yes no) 33900 // cond: x.Uses == 1 33901 // result: (GE (CMNconst [c] y) yes no) 33902 for b.Controls[0].Op == OpARM64CMPconst { 33903 v_0 := b.Controls[0] 33904 if v_0.AuxInt != 0 { 33905 break 33906 } 33907 x := v_0.Args[0] 33908 if x.Op != OpARM64ADDconst { 33909 break 33910 } 33911 c := x.AuxInt 33912 y := x.Args[0] 33913 if !(x.Uses == 1) { 33914 break 33915 } 33916 b.Reset(BlockARM64GE) 33917 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 33918 v0.AuxInt = c 33919 v0.AddArg(y) 33920 b.AddControl(v0) 33921 return true 33922 } 33923 // match: (GE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 33924 // cond: x.Uses == 1 33925 // result: (GE (CMNWconst [c] y) yes no) 33926 for b.Controls[0].Op == OpARM64CMPWconst { 33927 v_0 := b.Controls[0] 33928 if v_0.AuxInt != 0 { 33929 break 33930 } 33931 x := v_0.Args[0] 33932 if x.Op != OpARM64ADDconst { 33933 break 33934 } 33935 c := x.AuxInt 33936 y := x.Args[0] 33937 if !(x.Uses == 1) { 33938 break 33939 } 33940 b.Reset(BlockARM64GE) 33941 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 33942 v0.AuxInt = c 33943 v0.AddArg(y) 33944 b.AddControl(v0) 33945 return true 33946 } 33947 // match: (GE (CMPconst [0] z:(ADD x y)) yes no) 33948 // cond: z.Uses == 1 33949 // result: (GE (CMN x y) yes no) 33950 for b.Controls[0].Op == OpARM64CMPconst { 33951 v_0 := b.Controls[0] 33952 if v_0.AuxInt != 0 { 33953 break 33954 } 33955 z := v_0.Args[0] 33956 if z.Op != OpARM64ADD { 33957 break 33958 } 33959 y := z.Args[1] 33960 x := z.Args[0] 33961 if !(z.Uses == 1) { 33962 break 33963 } 33964 b.Reset(BlockARM64GE) 33965 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 33966 v0.AddArg(x) 33967 v0.AddArg(y) 33968 b.AddControl(v0) 33969 return true 33970 } 33971 // match: (GE (CMPWconst [0] z:(ADD x y)) yes no) 33972 // cond: z.Uses == 1 33973 // result: (GE (CMNW x y) yes no) 33974 for b.Controls[0].Op == OpARM64CMPWconst { 33975 v_0 := b.Controls[0] 33976 if v_0.AuxInt != 0 { 33977 break 33978 } 33979 z := v_0.Args[0] 33980 if z.Op != OpARM64ADD { 33981 break 33982 } 33983 y := z.Args[1] 33984 x := z.Args[0] 33985 if !(z.Uses == 1) { 33986 break 33987 } 33988 b.Reset(BlockARM64GE) 33989 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 33990 v0.AddArg(x) 33991 v0.AddArg(y) 33992 b.AddControl(v0) 33993 return true 33994 } 33995 // match: (GE (CMP x z:(NEG y)) yes no) 33996 // cond: z.Uses == 1 33997 // result: (GE (CMN x y) yes no) 33998 for b.Controls[0].Op == OpARM64CMP { 33999 v_0 := b.Controls[0] 34000 _ = v_0.Args[1] 34001 x := v_0.Args[0] 34002 z := v_0.Args[1] 34003 if z.Op != OpARM64NEG { 34004 break 34005 } 34006 y := z.Args[0] 34007 if !(z.Uses == 1) { 34008 break 34009 } 34010 b.Reset(BlockARM64GE) 34011 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34012 v0.AddArg(x) 34013 v0.AddArg(y) 34014 b.AddControl(v0) 34015 return true 34016 } 34017 // match: (GE (CMPW x z:(NEG y)) yes no) 34018 // cond: z.Uses == 1 34019 // result: (GE (CMNW x y) yes no) 34020 for b.Controls[0].Op == OpARM64CMPW { 34021 v_0 := b.Controls[0] 34022 _ = v_0.Args[1] 34023 x := v_0.Args[0] 34024 z := v_0.Args[1] 34025 if z.Op != OpARM64NEG { 34026 break 34027 } 34028 y := z.Args[0] 34029 if !(z.Uses == 1) { 34030 break 34031 } 34032 b.Reset(BlockARM64GE) 34033 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34034 v0.AddArg(x) 34035 v0.AddArg(y) 34036 b.AddControl(v0) 34037 return true 34038 } 34039 // match: (GE (CMPconst [0] z:(MADD a x y)) yes no) 34040 // cond: z.Uses==1 34041 // result: (GE (CMN a (MUL <x.Type> x y)) yes no) 34042 for b.Controls[0].Op == OpARM64CMPconst { 34043 v_0 := b.Controls[0] 34044 if v_0.AuxInt != 0 { 34045 break 34046 } 34047 z := v_0.Args[0] 34048 if z.Op != OpARM64MADD { 34049 break 34050 } 34051 y := z.Args[2] 34052 a := z.Args[0] 34053 x := z.Args[1] 34054 if !(z.Uses == 1) { 34055 break 34056 } 34057 b.Reset(BlockARM64GE) 34058 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34059 v0.AddArg(a) 34060 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 34061 v1.AddArg(x) 34062 v1.AddArg(y) 34063 v0.AddArg(v1) 34064 b.AddControl(v0) 34065 return true 34066 } 34067 // match: (GE (CMPconst [0] z:(MSUB a x y)) yes no) 34068 // cond: z.Uses==1 34069 // result: (GE (CMP a (MUL <x.Type> x y)) yes no) 34070 for b.Controls[0].Op == OpARM64CMPconst { 34071 v_0 := b.Controls[0] 34072 if v_0.AuxInt != 0 { 34073 break 34074 } 34075 z := v_0.Args[0] 34076 if z.Op != OpARM64MSUB { 34077 break 34078 } 34079 y := z.Args[2] 34080 a := z.Args[0] 34081 x := z.Args[1] 34082 if !(z.Uses == 1) { 34083 break 34084 } 34085 b.Reset(BlockARM64GE) 34086 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 34087 v0.AddArg(a) 34088 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 34089 v1.AddArg(x) 34090 v1.AddArg(y) 34091 v0.AddArg(v1) 34092 b.AddControl(v0) 34093 return true 34094 } 34095 // match: (GE (CMPWconst [0] z:(MADDW a x y)) yes no) 34096 // cond: z.Uses==1 34097 // result: (GE (CMNW a (MULW <x.Type> x y)) yes no) 34098 for b.Controls[0].Op == OpARM64CMPWconst { 34099 v_0 := b.Controls[0] 34100 if v_0.AuxInt != 0 { 34101 break 34102 } 34103 z := v_0.Args[0] 34104 if z.Op != OpARM64MADDW { 34105 break 34106 } 34107 y := z.Args[2] 34108 a := z.Args[0] 34109 x := z.Args[1] 34110 if !(z.Uses == 1) { 34111 break 34112 } 34113 b.Reset(BlockARM64GE) 34114 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34115 v0.AddArg(a) 34116 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 34117 v1.AddArg(x) 34118 v1.AddArg(y) 34119 v0.AddArg(v1) 34120 b.AddControl(v0) 34121 return true 34122 } 34123 // match: (GE (CMPWconst [0] z:(MSUBW a x y)) yes no) 34124 // cond: z.Uses==1 34125 // result: (GE (CMPW a (MULW <x.Type> x y)) yes no) 34126 for b.Controls[0].Op == OpARM64CMPWconst { 34127 v_0 := b.Controls[0] 34128 if v_0.AuxInt != 0 { 34129 break 34130 } 34131 z := v_0.Args[0] 34132 if z.Op != OpARM64MSUBW { 34133 break 34134 } 34135 y := z.Args[2] 34136 a := z.Args[0] 34137 x := z.Args[1] 34138 if !(z.Uses == 1) { 34139 break 34140 } 34141 b.Reset(BlockARM64GE) 34142 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 34143 v0.AddArg(a) 34144 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 34145 v1.AddArg(x) 34146 v1.AddArg(y) 34147 v0.AddArg(v1) 34148 b.AddControl(v0) 34149 return true 34150 } 34151 // match: (GE (CMPWconst [0] x) yes no) 34152 // result: (TBZ {int64(31)} x yes no) 34153 for b.Controls[0].Op == OpARM64CMPWconst { 34154 v_0 := b.Controls[0] 34155 if v_0.AuxInt != 0 { 34156 break 34157 } 34158 x := v_0.Args[0] 34159 b.Reset(BlockARM64TBZ) 34160 b.AddControl(x) 34161 b.Aux = int64(31) 34162 return true 34163 } 34164 // match: (GE (CMPconst [0] x) yes no) 34165 // result: (TBZ {int64(63)} x yes no) 34166 for b.Controls[0].Op == OpARM64CMPconst { 34167 v_0 := b.Controls[0] 34168 if v_0.AuxInt != 0 { 34169 break 34170 } 34171 x := v_0.Args[0] 34172 b.Reset(BlockARM64TBZ) 34173 b.AddControl(x) 34174 b.Aux = int64(63) 34175 return true 34176 } 34177 // match: (GE (FlagEQ) yes no) 34178 // result: (First yes no) 34179 for b.Controls[0].Op == OpARM64FlagEQ { 34180 b.Reset(BlockFirst) 34181 return true 34182 } 34183 // match: (GE (FlagLT_ULT) yes no) 34184 // result: (First no yes) 34185 for b.Controls[0].Op == OpARM64FlagLT_ULT { 34186 b.Reset(BlockFirst) 34187 b.swapSuccessors() 34188 return true 34189 } 34190 // match: (GE (FlagLT_UGT) yes no) 34191 // result: (First no yes) 34192 for b.Controls[0].Op == OpARM64FlagLT_UGT { 34193 b.Reset(BlockFirst) 34194 b.swapSuccessors() 34195 return true 34196 } 34197 // match: (GE (FlagGT_ULT) yes no) 34198 // result: (First yes no) 34199 for b.Controls[0].Op == OpARM64FlagGT_ULT { 34200 b.Reset(BlockFirst) 34201 return true 34202 } 34203 // match: (GE (FlagGT_UGT) yes no) 34204 // result: (First yes no) 34205 for b.Controls[0].Op == OpARM64FlagGT_UGT { 34206 b.Reset(BlockFirst) 34207 return true 34208 } 34209 // match: (GE (InvertFlags cmp) yes no) 34210 // result: (LE cmp yes no) 34211 for b.Controls[0].Op == OpARM64InvertFlags { 34212 v_0 := b.Controls[0] 34213 cmp := v_0.Args[0] 34214 b.Reset(BlockARM64LE) 34215 b.AddControl(cmp) 34216 return true 34217 } 34218 case BlockARM64GT: 34219 // match: (GT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 34220 // cond: x.Uses == 1 34221 // result: (GT (TSTWconst [c] y) yes no) 34222 for b.Controls[0].Op == OpARM64CMPWconst { 34223 v_0 := b.Controls[0] 34224 if v_0.AuxInt != 0 { 34225 break 34226 } 34227 x := v_0.Args[0] 34228 if x.Op != OpARM64ANDconst { 34229 break 34230 } 34231 c := x.AuxInt 34232 y := x.Args[0] 34233 if !(x.Uses == 1) { 34234 break 34235 } 34236 b.Reset(BlockARM64GT) 34237 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 34238 v0.AuxInt = c 34239 v0.AddArg(y) 34240 b.AddControl(v0) 34241 return true 34242 } 34243 // match: (GT (CMPconst [0] z:(AND x y)) yes no) 34244 // cond: z.Uses == 1 34245 // result: (GT (TST x y) yes no) 34246 for b.Controls[0].Op == OpARM64CMPconst { 34247 v_0 := b.Controls[0] 34248 if v_0.AuxInt != 0 { 34249 break 34250 } 34251 z := v_0.Args[0] 34252 if z.Op != OpARM64AND { 34253 break 34254 } 34255 y := z.Args[1] 34256 x := z.Args[0] 34257 if !(z.Uses == 1) { 34258 break 34259 } 34260 b.Reset(BlockARM64GT) 34261 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 34262 v0.AddArg(x) 34263 v0.AddArg(y) 34264 b.AddControl(v0) 34265 return true 34266 } 34267 // match: (GT (CMPWconst [0] z:(AND x y)) yes no) 34268 // cond: z.Uses == 1 34269 // result: (GT (TSTW x y) yes no) 34270 for b.Controls[0].Op == OpARM64CMPWconst { 34271 v_0 := b.Controls[0] 34272 if v_0.AuxInt != 0 { 34273 break 34274 } 34275 z := v_0.Args[0] 34276 if z.Op != OpARM64AND { 34277 break 34278 } 34279 y := z.Args[1] 34280 x := z.Args[0] 34281 if !(z.Uses == 1) { 34282 break 34283 } 34284 b.Reset(BlockARM64GT) 34285 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 34286 v0.AddArg(x) 34287 v0.AddArg(y) 34288 b.AddControl(v0) 34289 return true 34290 } 34291 // match: (GT (CMPconst [0] x:(ANDconst [c] y)) yes no) 34292 // cond: x.Uses == 1 34293 // result: (GT (TSTconst [c] y) yes no) 34294 for b.Controls[0].Op == OpARM64CMPconst { 34295 v_0 := b.Controls[0] 34296 if v_0.AuxInt != 0 { 34297 break 34298 } 34299 x := v_0.Args[0] 34300 if x.Op != OpARM64ANDconst { 34301 break 34302 } 34303 c := x.AuxInt 34304 y := x.Args[0] 34305 if !(x.Uses == 1) { 34306 break 34307 } 34308 b.Reset(BlockARM64GT) 34309 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 34310 v0.AuxInt = c 34311 v0.AddArg(y) 34312 b.AddControl(v0) 34313 return true 34314 } 34315 // match: (GT (CMPconst [0] x:(ADDconst [c] y)) yes no) 34316 // cond: x.Uses == 1 34317 // result: (GT (CMNconst [c] y) yes no) 34318 for b.Controls[0].Op == OpARM64CMPconst { 34319 v_0 := b.Controls[0] 34320 if v_0.AuxInt != 0 { 34321 break 34322 } 34323 x := v_0.Args[0] 34324 if x.Op != OpARM64ADDconst { 34325 break 34326 } 34327 c := x.AuxInt 34328 y := x.Args[0] 34329 if !(x.Uses == 1) { 34330 break 34331 } 34332 b.Reset(BlockARM64GT) 34333 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 34334 v0.AuxInt = c 34335 v0.AddArg(y) 34336 b.AddControl(v0) 34337 return true 34338 } 34339 // match: (GT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 34340 // cond: x.Uses == 1 34341 // result: (GT (CMNWconst [c] y) yes no) 34342 for b.Controls[0].Op == OpARM64CMPWconst { 34343 v_0 := b.Controls[0] 34344 if v_0.AuxInt != 0 { 34345 break 34346 } 34347 x := v_0.Args[0] 34348 if x.Op != OpARM64ADDconst { 34349 break 34350 } 34351 c := x.AuxInt 34352 y := x.Args[0] 34353 if !(x.Uses == 1) { 34354 break 34355 } 34356 b.Reset(BlockARM64GT) 34357 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 34358 v0.AuxInt = c 34359 v0.AddArg(y) 34360 b.AddControl(v0) 34361 return true 34362 } 34363 // match: (GT (CMPconst [0] z:(ADD x y)) yes no) 34364 // cond: z.Uses == 1 34365 // result: (GT (CMN x y) yes no) 34366 for b.Controls[0].Op == OpARM64CMPconst { 34367 v_0 := b.Controls[0] 34368 if v_0.AuxInt != 0 { 34369 break 34370 } 34371 z := v_0.Args[0] 34372 if z.Op != OpARM64ADD { 34373 break 34374 } 34375 y := z.Args[1] 34376 x := z.Args[0] 34377 if !(z.Uses == 1) { 34378 break 34379 } 34380 b.Reset(BlockARM64GT) 34381 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34382 v0.AddArg(x) 34383 v0.AddArg(y) 34384 b.AddControl(v0) 34385 return true 34386 } 34387 // match: (GT (CMPWconst [0] z:(ADD x y)) yes no) 34388 // cond: z.Uses == 1 34389 // result: (GT (CMNW x y) yes no) 34390 for b.Controls[0].Op == OpARM64CMPWconst { 34391 v_0 := b.Controls[0] 34392 if v_0.AuxInt != 0 { 34393 break 34394 } 34395 z := v_0.Args[0] 34396 if z.Op != OpARM64ADD { 34397 break 34398 } 34399 y := z.Args[1] 34400 x := z.Args[0] 34401 if !(z.Uses == 1) { 34402 break 34403 } 34404 b.Reset(BlockARM64GT) 34405 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34406 v0.AddArg(x) 34407 v0.AddArg(y) 34408 b.AddControl(v0) 34409 return true 34410 } 34411 // match: (GT (CMP x z:(NEG y)) yes no) 34412 // cond: z.Uses == 1 34413 // result: (GT (CMN x y) yes no) 34414 for b.Controls[0].Op == OpARM64CMP { 34415 v_0 := b.Controls[0] 34416 _ = v_0.Args[1] 34417 x := v_0.Args[0] 34418 z := v_0.Args[1] 34419 if z.Op != OpARM64NEG { 34420 break 34421 } 34422 y := z.Args[0] 34423 if !(z.Uses == 1) { 34424 break 34425 } 34426 b.Reset(BlockARM64GT) 34427 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34428 v0.AddArg(x) 34429 v0.AddArg(y) 34430 b.AddControl(v0) 34431 return true 34432 } 34433 // match: (GT (CMPW x z:(NEG y)) yes no) 34434 // cond: z.Uses == 1 34435 // result: (GT (CMNW x y) yes no) 34436 for b.Controls[0].Op == OpARM64CMPW { 34437 v_0 := b.Controls[0] 34438 _ = v_0.Args[1] 34439 x := v_0.Args[0] 34440 z := v_0.Args[1] 34441 if z.Op != OpARM64NEG { 34442 break 34443 } 34444 y := z.Args[0] 34445 if !(z.Uses == 1) { 34446 break 34447 } 34448 b.Reset(BlockARM64GT) 34449 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34450 v0.AddArg(x) 34451 v0.AddArg(y) 34452 b.AddControl(v0) 34453 return true 34454 } 34455 // match: (GT (CMPconst [0] z:(MADD a x y)) yes no) 34456 // cond: z.Uses==1 34457 // result: (GT (CMN a (MUL <x.Type> x y)) yes no) 34458 for b.Controls[0].Op == OpARM64CMPconst { 34459 v_0 := b.Controls[0] 34460 if v_0.AuxInt != 0 { 34461 break 34462 } 34463 z := v_0.Args[0] 34464 if z.Op != OpARM64MADD { 34465 break 34466 } 34467 y := z.Args[2] 34468 a := z.Args[0] 34469 x := z.Args[1] 34470 if !(z.Uses == 1) { 34471 break 34472 } 34473 b.Reset(BlockARM64GT) 34474 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34475 v0.AddArg(a) 34476 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 34477 v1.AddArg(x) 34478 v1.AddArg(y) 34479 v0.AddArg(v1) 34480 b.AddControl(v0) 34481 return true 34482 } 34483 // match: (GT (CMPconst [0] z:(MSUB a x y)) yes no) 34484 // cond: z.Uses==1 34485 // result: (GT (CMP a (MUL <x.Type> x y)) yes no) 34486 for b.Controls[0].Op == OpARM64CMPconst { 34487 v_0 := b.Controls[0] 34488 if v_0.AuxInt != 0 { 34489 break 34490 } 34491 z := v_0.Args[0] 34492 if z.Op != OpARM64MSUB { 34493 break 34494 } 34495 y := z.Args[2] 34496 a := z.Args[0] 34497 x := z.Args[1] 34498 if !(z.Uses == 1) { 34499 break 34500 } 34501 b.Reset(BlockARM64GT) 34502 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 34503 v0.AddArg(a) 34504 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 34505 v1.AddArg(x) 34506 v1.AddArg(y) 34507 v0.AddArg(v1) 34508 b.AddControl(v0) 34509 return true 34510 } 34511 // match: (GT (CMPWconst [0] z:(MADDW a x y)) yes no) 34512 // cond: z.Uses==1 34513 // result: (GT (CMNW a (MULW <x.Type> x y)) yes no) 34514 for b.Controls[0].Op == OpARM64CMPWconst { 34515 v_0 := b.Controls[0] 34516 if v_0.AuxInt != 0 { 34517 break 34518 } 34519 z := v_0.Args[0] 34520 if z.Op != OpARM64MADDW { 34521 break 34522 } 34523 y := z.Args[2] 34524 a := z.Args[0] 34525 x := z.Args[1] 34526 if !(z.Uses == 1) { 34527 break 34528 } 34529 b.Reset(BlockARM64GT) 34530 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34531 v0.AddArg(a) 34532 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 34533 v1.AddArg(x) 34534 v1.AddArg(y) 34535 v0.AddArg(v1) 34536 b.AddControl(v0) 34537 return true 34538 } 34539 // match: (GT (CMPWconst [0] z:(MSUBW a x y)) yes no) 34540 // cond: z.Uses==1 34541 // result: (GT (CMPW a (MULW <x.Type> x y)) yes no) 34542 for b.Controls[0].Op == OpARM64CMPWconst { 34543 v_0 := b.Controls[0] 34544 if v_0.AuxInt != 0 { 34545 break 34546 } 34547 z := v_0.Args[0] 34548 if z.Op != OpARM64MSUBW { 34549 break 34550 } 34551 y := z.Args[2] 34552 a := z.Args[0] 34553 x := z.Args[1] 34554 if !(z.Uses == 1) { 34555 break 34556 } 34557 b.Reset(BlockARM64GT) 34558 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 34559 v0.AddArg(a) 34560 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 34561 v1.AddArg(x) 34562 v1.AddArg(y) 34563 v0.AddArg(v1) 34564 b.AddControl(v0) 34565 return true 34566 } 34567 // match: (GT (FlagEQ) yes no) 34568 // result: (First no yes) 34569 for b.Controls[0].Op == OpARM64FlagEQ { 34570 b.Reset(BlockFirst) 34571 b.swapSuccessors() 34572 return true 34573 } 34574 // match: (GT (FlagLT_ULT) yes no) 34575 // result: (First no yes) 34576 for b.Controls[0].Op == OpARM64FlagLT_ULT { 34577 b.Reset(BlockFirst) 34578 b.swapSuccessors() 34579 return true 34580 } 34581 // match: (GT (FlagLT_UGT) yes no) 34582 // result: (First no yes) 34583 for b.Controls[0].Op == OpARM64FlagLT_UGT { 34584 b.Reset(BlockFirst) 34585 b.swapSuccessors() 34586 return true 34587 } 34588 // match: (GT (FlagGT_ULT) yes no) 34589 // result: (First yes no) 34590 for b.Controls[0].Op == OpARM64FlagGT_ULT { 34591 b.Reset(BlockFirst) 34592 return true 34593 } 34594 // match: (GT (FlagGT_UGT) yes no) 34595 // result: (First yes no) 34596 for b.Controls[0].Op == OpARM64FlagGT_UGT { 34597 b.Reset(BlockFirst) 34598 return true 34599 } 34600 // match: (GT (InvertFlags cmp) yes no) 34601 // result: (LT cmp yes no) 34602 for b.Controls[0].Op == OpARM64InvertFlags { 34603 v_0 := b.Controls[0] 34604 cmp := v_0.Args[0] 34605 b.Reset(BlockARM64LT) 34606 b.AddControl(cmp) 34607 return true 34608 } 34609 case BlockIf: 34610 // match: (If (Equal cc) yes no) 34611 // result: (EQ cc yes no) 34612 for b.Controls[0].Op == OpARM64Equal { 34613 v_0 := b.Controls[0] 34614 cc := v_0.Args[0] 34615 b.Reset(BlockARM64EQ) 34616 b.AddControl(cc) 34617 return true 34618 } 34619 // match: (If (NotEqual cc) yes no) 34620 // result: (NE cc yes no) 34621 for b.Controls[0].Op == OpARM64NotEqual { 34622 v_0 := b.Controls[0] 34623 cc := v_0.Args[0] 34624 b.Reset(BlockARM64NE) 34625 b.AddControl(cc) 34626 return true 34627 } 34628 // match: (If (LessThan cc) yes no) 34629 // result: (LT cc yes no) 34630 for b.Controls[0].Op == OpARM64LessThan { 34631 v_0 := b.Controls[0] 34632 cc := v_0.Args[0] 34633 b.Reset(BlockARM64LT) 34634 b.AddControl(cc) 34635 return true 34636 } 34637 // match: (If (LessThanU cc) yes no) 34638 // result: (ULT cc yes no) 34639 for b.Controls[0].Op == OpARM64LessThanU { 34640 v_0 := b.Controls[0] 34641 cc := v_0.Args[0] 34642 b.Reset(BlockARM64ULT) 34643 b.AddControl(cc) 34644 return true 34645 } 34646 // match: (If (LessEqual cc) yes no) 34647 // result: (LE cc yes no) 34648 for b.Controls[0].Op == OpARM64LessEqual { 34649 v_0 := b.Controls[0] 34650 cc := v_0.Args[0] 34651 b.Reset(BlockARM64LE) 34652 b.AddControl(cc) 34653 return true 34654 } 34655 // match: (If (LessEqualU cc) yes no) 34656 // result: (ULE cc yes no) 34657 for b.Controls[0].Op == OpARM64LessEqualU { 34658 v_0 := b.Controls[0] 34659 cc := v_0.Args[0] 34660 b.Reset(BlockARM64ULE) 34661 b.AddControl(cc) 34662 return true 34663 } 34664 // match: (If (GreaterThan cc) yes no) 34665 // result: (GT cc yes no) 34666 for b.Controls[0].Op == OpARM64GreaterThan { 34667 v_0 := b.Controls[0] 34668 cc := v_0.Args[0] 34669 b.Reset(BlockARM64GT) 34670 b.AddControl(cc) 34671 return true 34672 } 34673 // match: (If (GreaterThanU cc) yes no) 34674 // result: (UGT cc yes no) 34675 for b.Controls[0].Op == OpARM64GreaterThanU { 34676 v_0 := b.Controls[0] 34677 cc := v_0.Args[0] 34678 b.Reset(BlockARM64UGT) 34679 b.AddControl(cc) 34680 return true 34681 } 34682 // match: (If (GreaterEqual cc) yes no) 34683 // result: (GE cc yes no) 34684 for b.Controls[0].Op == OpARM64GreaterEqual { 34685 v_0 := b.Controls[0] 34686 cc := v_0.Args[0] 34687 b.Reset(BlockARM64GE) 34688 b.AddControl(cc) 34689 return true 34690 } 34691 // match: (If (GreaterEqualU cc) yes no) 34692 // result: (UGE cc yes no) 34693 for b.Controls[0].Op == OpARM64GreaterEqualU { 34694 v_0 := b.Controls[0] 34695 cc := v_0.Args[0] 34696 b.Reset(BlockARM64UGE) 34697 b.AddControl(cc) 34698 return true 34699 } 34700 // match: (If (LessThanF cc) yes no) 34701 // result: (FLT cc yes no) 34702 for b.Controls[0].Op == OpARM64LessThanF { 34703 v_0 := b.Controls[0] 34704 cc := v_0.Args[0] 34705 b.Reset(BlockARM64FLT) 34706 b.AddControl(cc) 34707 return true 34708 } 34709 // match: (If (LessEqualF cc) yes no) 34710 // result: (FLE cc yes no) 34711 for b.Controls[0].Op == OpARM64LessEqualF { 34712 v_0 := b.Controls[0] 34713 cc := v_0.Args[0] 34714 b.Reset(BlockARM64FLE) 34715 b.AddControl(cc) 34716 return true 34717 } 34718 // match: (If (GreaterThanF cc) yes no) 34719 // result: (FGT cc yes no) 34720 for b.Controls[0].Op == OpARM64GreaterThanF { 34721 v_0 := b.Controls[0] 34722 cc := v_0.Args[0] 34723 b.Reset(BlockARM64FGT) 34724 b.AddControl(cc) 34725 return true 34726 } 34727 // match: (If (GreaterEqualF cc) yes no) 34728 // result: (FGE cc yes no) 34729 for b.Controls[0].Op == OpARM64GreaterEqualF { 34730 v_0 := b.Controls[0] 34731 cc := v_0.Args[0] 34732 b.Reset(BlockARM64FGE) 34733 b.AddControl(cc) 34734 return true 34735 } 34736 // match: (If cond yes no) 34737 // result: (NZ cond yes no) 34738 for { 34739 cond := b.Controls[0] 34740 b.Reset(BlockARM64NZ) 34741 b.AddControl(cond) 34742 return true 34743 } 34744 case BlockARM64LE: 34745 // match: (LE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 34746 // cond: x.Uses == 1 34747 // result: (LE (TSTWconst [c] y) yes no) 34748 for b.Controls[0].Op == OpARM64CMPWconst { 34749 v_0 := b.Controls[0] 34750 if v_0.AuxInt != 0 { 34751 break 34752 } 34753 x := v_0.Args[0] 34754 if x.Op != OpARM64ANDconst { 34755 break 34756 } 34757 c := x.AuxInt 34758 y := x.Args[0] 34759 if !(x.Uses == 1) { 34760 break 34761 } 34762 b.Reset(BlockARM64LE) 34763 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 34764 v0.AuxInt = c 34765 v0.AddArg(y) 34766 b.AddControl(v0) 34767 return true 34768 } 34769 // match: (LE (CMPconst [0] z:(AND x y)) yes no) 34770 // cond: z.Uses == 1 34771 // result: (LE (TST x y) yes no) 34772 for b.Controls[0].Op == OpARM64CMPconst { 34773 v_0 := b.Controls[0] 34774 if v_0.AuxInt != 0 { 34775 break 34776 } 34777 z := v_0.Args[0] 34778 if z.Op != OpARM64AND { 34779 break 34780 } 34781 y := z.Args[1] 34782 x := z.Args[0] 34783 if !(z.Uses == 1) { 34784 break 34785 } 34786 b.Reset(BlockARM64LE) 34787 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 34788 v0.AddArg(x) 34789 v0.AddArg(y) 34790 b.AddControl(v0) 34791 return true 34792 } 34793 // match: (LE (CMPWconst [0] z:(AND x y)) yes no) 34794 // cond: z.Uses == 1 34795 // result: (LE (TSTW x y) yes no) 34796 for b.Controls[0].Op == OpARM64CMPWconst { 34797 v_0 := b.Controls[0] 34798 if v_0.AuxInt != 0 { 34799 break 34800 } 34801 z := v_0.Args[0] 34802 if z.Op != OpARM64AND { 34803 break 34804 } 34805 y := z.Args[1] 34806 x := z.Args[0] 34807 if !(z.Uses == 1) { 34808 break 34809 } 34810 b.Reset(BlockARM64LE) 34811 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 34812 v0.AddArg(x) 34813 v0.AddArg(y) 34814 b.AddControl(v0) 34815 return true 34816 } 34817 // match: (LE (CMPconst [0] x:(ANDconst [c] y)) yes no) 34818 // cond: x.Uses == 1 34819 // result: (LE (TSTconst [c] y) yes no) 34820 for b.Controls[0].Op == OpARM64CMPconst { 34821 v_0 := b.Controls[0] 34822 if v_0.AuxInt != 0 { 34823 break 34824 } 34825 x := v_0.Args[0] 34826 if x.Op != OpARM64ANDconst { 34827 break 34828 } 34829 c := x.AuxInt 34830 y := x.Args[0] 34831 if !(x.Uses == 1) { 34832 break 34833 } 34834 b.Reset(BlockARM64LE) 34835 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 34836 v0.AuxInt = c 34837 v0.AddArg(y) 34838 b.AddControl(v0) 34839 return true 34840 } 34841 // match: (LE (CMPconst [0] x:(ADDconst [c] y)) yes no) 34842 // cond: x.Uses == 1 34843 // result: (LE (CMNconst [c] y) yes no) 34844 for b.Controls[0].Op == OpARM64CMPconst { 34845 v_0 := b.Controls[0] 34846 if v_0.AuxInt != 0 { 34847 break 34848 } 34849 x := v_0.Args[0] 34850 if x.Op != OpARM64ADDconst { 34851 break 34852 } 34853 c := x.AuxInt 34854 y := x.Args[0] 34855 if !(x.Uses == 1) { 34856 break 34857 } 34858 b.Reset(BlockARM64LE) 34859 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 34860 v0.AuxInt = c 34861 v0.AddArg(y) 34862 b.AddControl(v0) 34863 return true 34864 } 34865 // match: (LE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 34866 // cond: x.Uses == 1 34867 // result: (LE (CMNWconst [c] y) yes no) 34868 for b.Controls[0].Op == OpARM64CMPWconst { 34869 v_0 := b.Controls[0] 34870 if v_0.AuxInt != 0 { 34871 break 34872 } 34873 x := v_0.Args[0] 34874 if x.Op != OpARM64ADDconst { 34875 break 34876 } 34877 c := x.AuxInt 34878 y := x.Args[0] 34879 if !(x.Uses == 1) { 34880 break 34881 } 34882 b.Reset(BlockARM64LE) 34883 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 34884 v0.AuxInt = c 34885 v0.AddArg(y) 34886 b.AddControl(v0) 34887 return true 34888 } 34889 // match: (LE (CMPconst [0] z:(ADD x y)) yes no) 34890 // cond: z.Uses == 1 34891 // result: (LE (CMN x y) yes no) 34892 for b.Controls[0].Op == OpARM64CMPconst { 34893 v_0 := b.Controls[0] 34894 if v_0.AuxInt != 0 { 34895 break 34896 } 34897 z := v_0.Args[0] 34898 if z.Op != OpARM64ADD { 34899 break 34900 } 34901 y := z.Args[1] 34902 x := z.Args[0] 34903 if !(z.Uses == 1) { 34904 break 34905 } 34906 b.Reset(BlockARM64LE) 34907 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34908 v0.AddArg(x) 34909 v0.AddArg(y) 34910 b.AddControl(v0) 34911 return true 34912 } 34913 // match: (LE (CMPWconst [0] z:(ADD x y)) yes no) 34914 // cond: z.Uses == 1 34915 // result: (LE (CMNW x y) yes no) 34916 for b.Controls[0].Op == OpARM64CMPWconst { 34917 v_0 := b.Controls[0] 34918 if v_0.AuxInt != 0 { 34919 break 34920 } 34921 z := v_0.Args[0] 34922 if z.Op != OpARM64ADD { 34923 break 34924 } 34925 y := z.Args[1] 34926 x := z.Args[0] 34927 if !(z.Uses == 1) { 34928 break 34929 } 34930 b.Reset(BlockARM64LE) 34931 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34932 v0.AddArg(x) 34933 v0.AddArg(y) 34934 b.AddControl(v0) 34935 return true 34936 } 34937 // match: (LE (CMP x z:(NEG y)) yes no) 34938 // cond: z.Uses == 1 34939 // result: (LE (CMN x y) yes no) 34940 for b.Controls[0].Op == OpARM64CMP { 34941 v_0 := b.Controls[0] 34942 _ = v_0.Args[1] 34943 x := v_0.Args[0] 34944 z := v_0.Args[1] 34945 if z.Op != OpARM64NEG { 34946 break 34947 } 34948 y := z.Args[0] 34949 if !(z.Uses == 1) { 34950 break 34951 } 34952 b.Reset(BlockARM64LE) 34953 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 34954 v0.AddArg(x) 34955 v0.AddArg(y) 34956 b.AddControl(v0) 34957 return true 34958 } 34959 // match: (LE (CMPW x z:(NEG y)) yes no) 34960 // cond: z.Uses == 1 34961 // result: (LE (CMNW x y) yes no) 34962 for b.Controls[0].Op == OpARM64CMPW { 34963 v_0 := b.Controls[0] 34964 _ = v_0.Args[1] 34965 x := v_0.Args[0] 34966 z := v_0.Args[1] 34967 if z.Op != OpARM64NEG { 34968 break 34969 } 34970 y := z.Args[0] 34971 if !(z.Uses == 1) { 34972 break 34973 } 34974 b.Reset(BlockARM64LE) 34975 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 34976 v0.AddArg(x) 34977 v0.AddArg(y) 34978 b.AddControl(v0) 34979 return true 34980 } 34981 // match: (LE (CMPconst [0] z:(MADD a x y)) yes no) 34982 // cond: z.Uses==1 34983 // result: (LE (CMN a (MUL <x.Type> x y)) yes no) 34984 for b.Controls[0].Op == OpARM64CMPconst { 34985 v_0 := b.Controls[0] 34986 if v_0.AuxInt != 0 { 34987 break 34988 } 34989 z := v_0.Args[0] 34990 if z.Op != OpARM64MADD { 34991 break 34992 } 34993 y := z.Args[2] 34994 a := z.Args[0] 34995 x := z.Args[1] 34996 if !(z.Uses == 1) { 34997 break 34998 } 34999 b.Reset(BlockARM64LE) 35000 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35001 v0.AddArg(a) 35002 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 35003 v1.AddArg(x) 35004 v1.AddArg(y) 35005 v0.AddArg(v1) 35006 b.AddControl(v0) 35007 return true 35008 } 35009 // match: (LE (CMPconst [0] z:(MSUB a x y)) yes no) 35010 // cond: z.Uses==1 35011 // result: (LE (CMP a (MUL <x.Type> x y)) yes no) 35012 for b.Controls[0].Op == OpARM64CMPconst { 35013 v_0 := b.Controls[0] 35014 if v_0.AuxInt != 0 { 35015 break 35016 } 35017 z := v_0.Args[0] 35018 if z.Op != OpARM64MSUB { 35019 break 35020 } 35021 y := z.Args[2] 35022 a := z.Args[0] 35023 x := z.Args[1] 35024 if !(z.Uses == 1) { 35025 break 35026 } 35027 b.Reset(BlockARM64LE) 35028 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 35029 v0.AddArg(a) 35030 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 35031 v1.AddArg(x) 35032 v1.AddArg(y) 35033 v0.AddArg(v1) 35034 b.AddControl(v0) 35035 return true 35036 } 35037 // match: (LE (CMPWconst [0] z:(MADDW a x y)) yes no) 35038 // cond: z.Uses==1 35039 // result: (LE (CMNW a (MULW <x.Type> x y)) yes no) 35040 for b.Controls[0].Op == OpARM64CMPWconst { 35041 v_0 := b.Controls[0] 35042 if v_0.AuxInt != 0 { 35043 break 35044 } 35045 z := v_0.Args[0] 35046 if z.Op != OpARM64MADDW { 35047 break 35048 } 35049 y := z.Args[2] 35050 a := z.Args[0] 35051 x := z.Args[1] 35052 if !(z.Uses == 1) { 35053 break 35054 } 35055 b.Reset(BlockARM64LE) 35056 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35057 v0.AddArg(a) 35058 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 35059 v1.AddArg(x) 35060 v1.AddArg(y) 35061 v0.AddArg(v1) 35062 b.AddControl(v0) 35063 return true 35064 } 35065 // match: (LE (CMPWconst [0] z:(MSUBW a x y)) yes no) 35066 // cond: z.Uses==1 35067 // result: (LE (CMPW a (MULW <x.Type> x y)) yes no) 35068 for b.Controls[0].Op == OpARM64CMPWconst { 35069 v_0 := b.Controls[0] 35070 if v_0.AuxInt != 0 { 35071 break 35072 } 35073 z := v_0.Args[0] 35074 if z.Op != OpARM64MSUBW { 35075 break 35076 } 35077 y := z.Args[2] 35078 a := z.Args[0] 35079 x := z.Args[1] 35080 if !(z.Uses == 1) { 35081 break 35082 } 35083 b.Reset(BlockARM64LE) 35084 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 35085 v0.AddArg(a) 35086 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 35087 v1.AddArg(x) 35088 v1.AddArg(y) 35089 v0.AddArg(v1) 35090 b.AddControl(v0) 35091 return true 35092 } 35093 // match: (LE (FlagEQ) yes no) 35094 // result: (First yes no) 35095 for b.Controls[0].Op == OpARM64FlagEQ { 35096 b.Reset(BlockFirst) 35097 return true 35098 } 35099 // match: (LE (FlagLT_ULT) yes no) 35100 // result: (First yes no) 35101 for b.Controls[0].Op == OpARM64FlagLT_ULT { 35102 b.Reset(BlockFirst) 35103 return true 35104 } 35105 // match: (LE (FlagLT_UGT) yes no) 35106 // result: (First yes no) 35107 for b.Controls[0].Op == OpARM64FlagLT_UGT { 35108 b.Reset(BlockFirst) 35109 return true 35110 } 35111 // match: (LE (FlagGT_ULT) yes no) 35112 // result: (First no yes) 35113 for b.Controls[0].Op == OpARM64FlagGT_ULT { 35114 b.Reset(BlockFirst) 35115 b.swapSuccessors() 35116 return true 35117 } 35118 // match: (LE (FlagGT_UGT) yes no) 35119 // result: (First no yes) 35120 for b.Controls[0].Op == OpARM64FlagGT_UGT { 35121 b.Reset(BlockFirst) 35122 b.swapSuccessors() 35123 return true 35124 } 35125 // match: (LE (InvertFlags cmp) yes no) 35126 // result: (GE cmp yes no) 35127 for b.Controls[0].Op == OpARM64InvertFlags { 35128 v_0 := b.Controls[0] 35129 cmp := v_0.Args[0] 35130 b.Reset(BlockARM64GE) 35131 b.AddControl(cmp) 35132 return true 35133 } 35134 case BlockARM64LT: 35135 // match: (LT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 35136 // cond: x.Uses == 1 35137 // result: (LT (TSTWconst [c] y) yes no) 35138 for b.Controls[0].Op == OpARM64CMPWconst { 35139 v_0 := b.Controls[0] 35140 if v_0.AuxInt != 0 { 35141 break 35142 } 35143 x := v_0.Args[0] 35144 if x.Op != OpARM64ANDconst { 35145 break 35146 } 35147 c := x.AuxInt 35148 y := x.Args[0] 35149 if !(x.Uses == 1) { 35150 break 35151 } 35152 b.Reset(BlockARM64LT) 35153 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 35154 v0.AuxInt = c 35155 v0.AddArg(y) 35156 b.AddControl(v0) 35157 return true 35158 } 35159 // match: (LT (CMPconst [0] z:(AND x y)) yes no) 35160 // cond: z.Uses == 1 35161 // result: (LT (TST x y) yes no) 35162 for b.Controls[0].Op == OpARM64CMPconst { 35163 v_0 := b.Controls[0] 35164 if v_0.AuxInt != 0 { 35165 break 35166 } 35167 z := v_0.Args[0] 35168 if z.Op != OpARM64AND { 35169 break 35170 } 35171 y := z.Args[1] 35172 x := z.Args[0] 35173 if !(z.Uses == 1) { 35174 break 35175 } 35176 b.Reset(BlockARM64LT) 35177 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 35178 v0.AddArg(x) 35179 v0.AddArg(y) 35180 b.AddControl(v0) 35181 return true 35182 } 35183 // match: (LT (CMPWconst [0] z:(AND x y)) yes no) 35184 // cond: z.Uses == 1 35185 // result: (LT (TSTW x y) yes no) 35186 for b.Controls[0].Op == OpARM64CMPWconst { 35187 v_0 := b.Controls[0] 35188 if v_0.AuxInt != 0 { 35189 break 35190 } 35191 z := v_0.Args[0] 35192 if z.Op != OpARM64AND { 35193 break 35194 } 35195 y := z.Args[1] 35196 x := z.Args[0] 35197 if !(z.Uses == 1) { 35198 break 35199 } 35200 b.Reset(BlockARM64LT) 35201 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 35202 v0.AddArg(x) 35203 v0.AddArg(y) 35204 b.AddControl(v0) 35205 return true 35206 } 35207 // match: (LT (CMPconst [0] x:(ANDconst [c] y)) yes no) 35208 // cond: x.Uses == 1 35209 // result: (LT (TSTconst [c] y) yes no) 35210 for b.Controls[0].Op == OpARM64CMPconst { 35211 v_0 := b.Controls[0] 35212 if v_0.AuxInt != 0 { 35213 break 35214 } 35215 x := v_0.Args[0] 35216 if x.Op != OpARM64ANDconst { 35217 break 35218 } 35219 c := x.AuxInt 35220 y := x.Args[0] 35221 if !(x.Uses == 1) { 35222 break 35223 } 35224 b.Reset(BlockARM64LT) 35225 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 35226 v0.AuxInt = c 35227 v0.AddArg(y) 35228 b.AddControl(v0) 35229 return true 35230 } 35231 // match: (LT (CMPconst [0] x:(ADDconst [c] y)) yes no) 35232 // cond: x.Uses == 1 35233 // result: (LT (CMNconst [c] y) yes no) 35234 for b.Controls[0].Op == OpARM64CMPconst { 35235 v_0 := b.Controls[0] 35236 if v_0.AuxInt != 0 { 35237 break 35238 } 35239 x := v_0.Args[0] 35240 if x.Op != OpARM64ADDconst { 35241 break 35242 } 35243 c := x.AuxInt 35244 y := x.Args[0] 35245 if !(x.Uses == 1) { 35246 break 35247 } 35248 b.Reset(BlockARM64LT) 35249 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 35250 v0.AuxInt = c 35251 v0.AddArg(y) 35252 b.AddControl(v0) 35253 return true 35254 } 35255 // match: (LT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 35256 // cond: x.Uses == 1 35257 // result: (LT (CMNWconst [c] y) yes no) 35258 for b.Controls[0].Op == OpARM64CMPWconst { 35259 v_0 := b.Controls[0] 35260 if v_0.AuxInt != 0 { 35261 break 35262 } 35263 x := v_0.Args[0] 35264 if x.Op != OpARM64ADDconst { 35265 break 35266 } 35267 c := x.AuxInt 35268 y := x.Args[0] 35269 if !(x.Uses == 1) { 35270 break 35271 } 35272 b.Reset(BlockARM64LT) 35273 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 35274 v0.AuxInt = c 35275 v0.AddArg(y) 35276 b.AddControl(v0) 35277 return true 35278 } 35279 // match: (LT (CMPconst [0] z:(ADD x y)) yes no) 35280 // cond: z.Uses == 1 35281 // result: (LT (CMN x y) yes no) 35282 for b.Controls[0].Op == OpARM64CMPconst { 35283 v_0 := b.Controls[0] 35284 if v_0.AuxInt != 0 { 35285 break 35286 } 35287 z := v_0.Args[0] 35288 if z.Op != OpARM64ADD { 35289 break 35290 } 35291 y := z.Args[1] 35292 x := z.Args[0] 35293 if !(z.Uses == 1) { 35294 break 35295 } 35296 b.Reset(BlockARM64LT) 35297 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35298 v0.AddArg(x) 35299 v0.AddArg(y) 35300 b.AddControl(v0) 35301 return true 35302 } 35303 // match: (LT (CMPWconst [0] z:(ADD x y)) yes no) 35304 // cond: z.Uses == 1 35305 // result: (LT (CMNW x y) yes no) 35306 for b.Controls[0].Op == OpARM64CMPWconst { 35307 v_0 := b.Controls[0] 35308 if v_0.AuxInt != 0 { 35309 break 35310 } 35311 z := v_0.Args[0] 35312 if z.Op != OpARM64ADD { 35313 break 35314 } 35315 y := z.Args[1] 35316 x := z.Args[0] 35317 if !(z.Uses == 1) { 35318 break 35319 } 35320 b.Reset(BlockARM64LT) 35321 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35322 v0.AddArg(x) 35323 v0.AddArg(y) 35324 b.AddControl(v0) 35325 return true 35326 } 35327 // match: (LT (CMP x z:(NEG y)) yes no) 35328 // cond: z.Uses == 1 35329 // result: (LT (CMN x y) yes no) 35330 for b.Controls[0].Op == OpARM64CMP { 35331 v_0 := b.Controls[0] 35332 _ = v_0.Args[1] 35333 x := v_0.Args[0] 35334 z := v_0.Args[1] 35335 if z.Op != OpARM64NEG { 35336 break 35337 } 35338 y := z.Args[0] 35339 if !(z.Uses == 1) { 35340 break 35341 } 35342 b.Reset(BlockARM64LT) 35343 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35344 v0.AddArg(x) 35345 v0.AddArg(y) 35346 b.AddControl(v0) 35347 return true 35348 } 35349 // match: (LT (CMPW x z:(NEG y)) yes no) 35350 // cond: z.Uses == 1 35351 // result: (LT (CMNW x y) yes no) 35352 for b.Controls[0].Op == OpARM64CMPW { 35353 v_0 := b.Controls[0] 35354 _ = v_0.Args[1] 35355 x := v_0.Args[0] 35356 z := v_0.Args[1] 35357 if z.Op != OpARM64NEG { 35358 break 35359 } 35360 y := z.Args[0] 35361 if !(z.Uses == 1) { 35362 break 35363 } 35364 b.Reset(BlockARM64LT) 35365 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35366 v0.AddArg(x) 35367 v0.AddArg(y) 35368 b.AddControl(v0) 35369 return true 35370 } 35371 // match: (LT (CMPconst [0] z:(MADD a x y)) yes no) 35372 // cond: z.Uses==1 35373 // result: (LT (CMN a (MUL <x.Type> x y)) yes no) 35374 for b.Controls[0].Op == OpARM64CMPconst { 35375 v_0 := b.Controls[0] 35376 if v_0.AuxInt != 0 { 35377 break 35378 } 35379 z := v_0.Args[0] 35380 if z.Op != OpARM64MADD { 35381 break 35382 } 35383 y := z.Args[2] 35384 a := z.Args[0] 35385 x := z.Args[1] 35386 if !(z.Uses == 1) { 35387 break 35388 } 35389 b.Reset(BlockARM64LT) 35390 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35391 v0.AddArg(a) 35392 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 35393 v1.AddArg(x) 35394 v1.AddArg(y) 35395 v0.AddArg(v1) 35396 b.AddControl(v0) 35397 return true 35398 } 35399 // match: (LT (CMPconst [0] z:(MSUB a x y)) yes no) 35400 // cond: z.Uses==1 35401 // result: (LT (CMP a (MUL <x.Type> x y)) yes no) 35402 for b.Controls[0].Op == OpARM64CMPconst { 35403 v_0 := b.Controls[0] 35404 if v_0.AuxInt != 0 { 35405 break 35406 } 35407 z := v_0.Args[0] 35408 if z.Op != OpARM64MSUB { 35409 break 35410 } 35411 y := z.Args[2] 35412 a := z.Args[0] 35413 x := z.Args[1] 35414 if !(z.Uses == 1) { 35415 break 35416 } 35417 b.Reset(BlockARM64LT) 35418 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 35419 v0.AddArg(a) 35420 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 35421 v1.AddArg(x) 35422 v1.AddArg(y) 35423 v0.AddArg(v1) 35424 b.AddControl(v0) 35425 return true 35426 } 35427 // match: (LT (CMPWconst [0] z:(MADDW a x y)) yes no) 35428 // cond: z.Uses==1 35429 // result: (LT (CMNW a (MULW <x.Type> x y)) yes no) 35430 for b.Controls[0].Op == OpARM64CMPWconst { 35431 v_0 := b.Controls[0] 35432 if v_0.AuxInt != 0 { 35433 break 35434 } 35435 z := v_0.Args[0] 35436 if z.Op != OpARM64MADDW { 35437 break 35438 } 35439 y := z.Args[2] 35440 a := z.Args[0] 35441 x := z.Args[1] 35442 if !(z.Uses == 1) { 35443 break 35444 } 35445 b.Reset(BlockARM64LT) 35446 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35447 v0.AddArg(a) 35448 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 35449 v1.AddArg(x) 35450 v1.AddArg(y) 35451 v0.AddArg(v1) 35452 b.AddControl(v0) 35453 return true 35454 } 35455 // match: (LT (CMPWconst [0] z:(MSUBW a x y)) yes no) 35456 // cond: z.Uses==1 35457 // result: (LT (CMPW a (MULW <x.Type> x y)) yes no) 35458 for b.Controls[0].Op == OpARM64CMPWconst { 35459 v_0 := b.Controls[0] 35460 if v_0.AuxInt != 0 { 35461 break 35462 } 35463 z := v_0.Args[0] 35464 if z.Op != OpARM64MSUBW { 35465 break 35466 } 35467 y := z.Args[2] 35468 a := z.Args[0] 35469 x := z.Args[1] 35470 if !(z.Uses == 1) { 35471 break 35472 } 35473 b.Reset(BlockARM64LT) 35474 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 35475 v0.AddArg(a) 35476 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 35477 v1.AddArg(x) 35478 v1.AddArg(y) 35479 v0.AddArg(v1) 35480 b.AddControl(v0) 35481 return true 35482 } 35483 // match: (LT (CMPWconst [0] x) yes no) 35484 // result: (TBNZ {int64(31)} x yes no) 35485 for b.Controls[0].Op == OpARM64CMPWconst { 35486 v_0 := b.Controls[0] 35487 if v_0.AuxInt != 0 { 35488 break 35489 } 35490 x := v_0.Args[0] 35491 b.Reset(BlockARM64TBNZ) 35492 b.AddControl(x) 35493 b.Aux = int64(31) 35494 return true 35495 } 35496 // match: (LT (CMPconst [0] x) yes no) 35497 // result: (TBNZ {int64(63)} x yes no) 35498 for b.Controls[0].Op == OpARM64CMPconst { 35499 v_0 := b.Controls[0] 35500 if v_0.AuxInt != 0 { 35501 break 35502 } 35503 x := v_0.Args[0] 35504 b.Reset(BlockARM64TBNZ) 35505 b.AddControl(x) 35506 b.Aux = int64(63) 35507 return true 35508 } 35509 // match: (LT (FlagEQ) yes no) 35510 // result: (First no yes) 35511 for b.Controls[0].Op == OpARM64FlagEQ { 35512 b.Reset(BlockFirst) 35513 b.swapSuccessors() 35514 return true 35515 } 35516 // match: (LT (FlagLT_ULT) yes no) 35517 // result: (First yes no) 35518 for b.Controls[0].Op == OpARM64FlagLT_ULT { 35519 b.Reset(BlockFirst) 35520 return true 35521 } 35522 // match: (LT (FlagLT_UGT) yes no) 35523 // result: (First yes no) 35524 for b.Controls[0].Op == OpARM64FlagLT_UGT { 35525 b.Reset(BlockFirst) 35526 return true 35527 } 35528 // match: (LT (FlagGT_ULT) yes no) 35529 // result: (First no yes) 35530 for b.Controls[0].Op == OpARM64FlagGT_ULT { 35531 b.Reset(BlockFirst) 35532 b.swapSuccessors() 35533 return true 35534 } 35535 // match: (LT (FlagGT_UGT) yes no) 35536 // result: (First no yes) 35537 for b.Controls[0].Op == OpARM64FlagGT_UGT { 35538 b.Reset(BlockFirst) 35539 b.swapSuccessors() 35540 return true 35541 } 35542 // match: (LT (InvertFlags cmp) yes no) 35543 // result: (GT cmp yes no) 35544 for b.Controls[0].Op == OpARM64InvertFlags { 35545 v_0 := b.Controls[0] 35546 cmp := v_0.Args[0] 35547 b.Reset(BlockARM64GT) 35548 b.AddControl(cmp) 35549 return true 35550 } 35551 case BlockARM64NE: 35552 // match: (NE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 35553 // cond: x.Uses == 1 35554 // result: (NE (TSTWconst [c] y) yes no) 35555 for b.Controls[0].Op == OpARM64CMPWconst { 35556 v_0 := b.Controls[0] 35557 if v_0.AuxInt != 0 { 35558 break 35559 } 35560 x := v_0.Args[0] 35561 if x.Op != OpARM64ANDconst { 35562 break 35563 } 35564 c := x.AuxInt 35565 y := x.Args[0] 35566 if !(x.Uses == 1) { 35567 break 35568 } 35569 b.Reset(BlockARM64NE) 35570 v0 := b.NewValue0(v_0.Pos, OpARM64TSTWconst, types.TypeFlags) 35571 v0.AuxInt = c 35572 v0.AddArg(y) 35573 b.AddControl(v0) 35574 return true 35575 } 35576 // match: (NE (CMPconst [0] z:(AND x y)) yes no) 35577 // cond: z.Uses == 1 35578 // result: (NE (TST x y) yes no) 35579 for b.Controls[0].Op == OpARM64CMPconst { 35580 v_0 := b.Controls[0] 35581 if v_0.AuxInt != 0 { 35582 break 35583 } 35584 z := v_0.Args[0] 35585 if z.Op != OpARM64AND { 35586 break 35587 } 35588 y := z.Args[1] 35589 x := z.Args[0] 35590 if !(z.Uses == 1) { 35591 break 35592 } 35593 b.Reset(BlockARM64NE) 35594 v0 := b.NewValue0(v_0.Pos, OpARM64TST, types.TypeFlags) 35595 v0.AddArg(x) 35596 v0.AddArg(y) 35597 b.AddControl(v0) 35598 return true 35599 } 35600 // match: (NE (CMPWconst [0] z:(AND x y)) yes no) 35601 // cond: z.Uses == 1 35602 // result: (NE (TSTW x y) yes no) 35603 for b.Controls[0].Op == OpARM64CMPWconst { 35604 v_0 := b.Controls[0] 35605 if v_0.AuxInt != 0 { 35606 break 35607 } 35608 z := v_0.Args[0] 35609 if z.Op != OpARM64AND { 35610 break 35611 } 35612 y := z.Args[1] 35613 x := z.Args[0] 35614 if !(z.Uses == 1) { 35615 break 35616 } 35617 b.Reset(BlockARM64NE) 35618 v0 := b.NewValue0(v_0.Pos, OpARM64TSTW, types.TypeFlags) 35619 v0.AddArg(x) 35620 v0.AddArg(y) 35621 b.AddControl(v0) 35622 return true 35623 } 35624 // match: (NE (CMPconst [0] x:(ANDconst [c] y)) yes no) 35625 // cond: x.Uses == 1 35626 // result: (NE (TSTconst [c] y) yes no) 35627 for b.Controls[0].Op == OpARM64CMPconst { 35628 v_0 := b.Controls[0] 35629 if v_0.AuxInt != 0 { 35630 break 35631 } 35632 x := v_0.Args[0] 35633 if x.Op != OpARM64ANDconst { 35634 break 35635 } 35636 c := x.AuxInt 35637 y := x.Args[0] 35638 if !(x.Uses == 1) { 35639 break 35640 } 35641 b.Reset(BlockARM64NE) 35642 v0 := b.NewValue0(v_0.Pos, OpARM64TSTconst, types.TypeFlags) 35643 v0.AuxInt = c 35644 v0.AddArg(y) 35645 b.AddControl(v0) 35646 return true 35647 } 35648 // match: (NE (CMPconst [0] x:(ADDconst [c] y)) yes no) 35649 // cond: x.Uses == 1 35650 // result: (NE (CMNconst [c] y) yes no) 35651 for b.Controls[0].Op == OpARM64CMPconst { 35652 v_0 := b.Controls[0] 35653 if v_0.AuxInt != 0 { 35654 break 35655 } 35656 x := v_0.Args[0] 35657 if x.Op != OpARM64ADDconst { 35658 break 35659 } 35660 c := x.AuxInt 35661 y := x.Args[0] 35662 if !(x.Uses == 1) { 35663 break 35664 } 35665 b.Reset(BlockARM64NE) 35666 v0 := b.NewValue0(v_0.Pos, OpARM64CMNconst, types.TypeFlags) 35667 v0.AuxInt = c 35668 v0.AddArg(y) 35669 b.AddControl(v0) 35670 return true 35671 } 35672 // match: (NE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 35673 // cond: x.Uses == 1 35674 // result: (NE (CMNWconst [c] y) yes no) 35675 for b.Controls[0].Op == OpARM64CMPWconst { 35676 v_0 := b.Controls[0] 35677 if v_0.AuxInt != 0 { 35678 break 35679 } 35680 x := v_0.Args[0] 35681 if x.Op != OpARM64ADDconst { 35682 break 35683 } 35684 c := x.AuxInt 35685 y := x.Args[0] 35686 if !(x.Uses == 1) { 35687 break 35688 } 35689 b.Reset(BlockARM64NE) 35690 v0 := b.NewValue0(v_0.Pos, OpARM64CMNWconst, types.TypeFlags) 35691 v0.AuxInt = c 35692 v0.AddArg(y) 35693 b.AddControl(v0) 35694 return true 35695 } 35696 // match: (NE (CMPconst [0] z:(ADD x y)) yes no) 35697 // cond: z.Uses == 1 35698 // result: (NE (CMN x y) yes no) 35699 for b.Controls[0].Op == OpARM64CMPconst { 35700 v_0 := b.Controls[0] 35701 if v_0.AuxInt != 0 { 35702 break 35703 } 35704 z := v_0.Args[0] 35705 if z.Op != OpARM64ADD { 35706 break 35707 } 35708 y := z.Args[1] 35709 x := z.Args[0] 35710 if !(z.Uses == 1) { 35711 break 35712 } 35713 b.Reset(BlockARM64NE) 35714 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35715 v0.AddArg(x) 35716 v0.AddArg(y) 35717 b.AddControl(v0) 35718 return true 35719 } 35720 // match: (NE (CMPWconst [0] z:(ADD x y)) yes no) 35721 // cond: z.Uses == 1 35722 // result: (NE (CMNW x y) yes no) 35723 for b.Controls[0].Op == OpARM64CMPWconst { 35724 v_0 := b.Controls[0] 35725 if v_0.AuxInt != 0 { 35726 break 35727 } 35728 z := v_0.Args[0] 35729 if z.Op != OpARM64ADD { 35730 break 35731 } 35732 y := z.Args[1] 35733 x := z.Args[0] 35734 if !(z.Uses == 1) { 35735 break 35736 } 35737 b.Reset(BlockARM64NE) 35738 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35739 v0.AddArg(x) 35740 v0.AddArg(y) 35741 b.AddControl(v0) 35742 return true 35743 } 35744 // match: (NE (CMP x z:(NEG y)) yes no) 35745 // cond: z.Uses == 1 35746 // result: (NE (CMN x y) yes no) 35747 for b.Controls[0].Op == OpARM64CMP { 35748 v_0 := b.Controls[0] 35749 _ = v_0.Args[1] 35750 x := v_0.Args[0] 35751 z := v_0.Args[1] 35752 if z.Op != OpARM64NEG { 35753 break 35754 } 35755 y := z.Args[0] 35756 if !(z.Uses == 1) { 35757 break 35758 } 35759 b.Reset(BlockARM64NE) 35760 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35761 v0.AddArg(x) 35762 v0.AddArg(y) 35763 b.AddControl(v0) 35764 return true 35765 } 35766 // match: (NE (CMPW x z:(NEG y)) yes no) 35767 // cond: z.Uses == 1 35768 // result: (NE (CMNW x y) yes no) 35769 for b.Controls[0].Op == OpARM64CMPW { 35770 v_0 := b.Controls[0] 35771 _ = v_0.Args[1] 35772 x := v_0.Args[0] 35773 z := v_0.Args[1] 35774 if z.Op != OpARM64NEG { 35775 break 35776 } 35777 y := z.Args[0] 35778 if !(z.Uses == 1) { 35779 break 35780 } 35781 b.Reset(BlockARM64NE) 35782 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35783 v0.AddArg(x) 35784 v0.AddArg(y) 35785 b.AddControl(v0) 35786 return true 35787 } 35788 // match: (NE (CMPconst [0] x) yes no) 35789 // result: (NZ x yes no) 35790 for b.Controls[0].Op == OpARM64CMPconst { 35791 v_0 := b.Controls[0] 35792 if v_0.AuxInt != 0 { 35793 break 35794 } 35795 x := v_0.Args[0] 35796 b.Reset(BlockARM64NZ) 35797 b.AddControl(x) 35798 return true 35799 } 35800 // match: (NE (CMPWconst [0] x) yes no) 35801 // result: (NZW x yes no) 35802 for b.Controls[0].Op == OpARM64CMPWconst { 35803 v_0 := b.Controls[0] 35804 if v_0.AuxInt != 0 { 35805 break 35806 } 35807 x := v_0.Args[0] 35808 b.Reset(BlockARM64NZW) 35809 b.AddControl(x) 35810 return true 35811 } 35812 // match: (NE (CMPconst [0] z:(MADD a x y)) yes no) 35813 // cond: z.Uses==1 35814 // result: (NE (CMN a (MUL <x.Type> x y)) yes no) 35815 for b.Controls[0].Op == OpARM64CMPconst { 35816 v_0 := b.Controls[0] 35817 if v_0.AuxInt != 0 { 35818 break 35819 } 35820 z := v_0.Args[0] 35821 if z.Op != OpARM64MADD { 35822 break 35823 } 35824 y := z.Args[2] 35825 a := z.Args[0] 35826 x := z.Args[1] 35827 if !(z.Uses == 1) { 35828 break 35829 } 35830 b.Reset(BlockARM64NE) 35831 v0 := b.NewValue0(v_0.Pos, OpARM64CMN, types.TypeFlags) 35832 v0.AddArg(a) 35833 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 35834 v1.AddArg(x) 35835 v1.AddArg(y) 35836 v0.AddArg(v1) 35837 b.AddControl(v0) 35838 return true 35839 } 35840 // match: (NE (CMPconst [0] z:(MSUB a x y)) yes no) 35841 // cond: z.Uses==1 35842 // result: (NE (CMP a (MUL <x.Type> x y)) yes no) 35843 for b.Controls[0].Op == OpARM64CMPconst { 35844 v_0 := b.Controls[0] 35845 if v_0.AuxInt != 0 { 35846 break 35847 } 35848 z := v_0.Args[0] 35849 if z.Op != OpARM64MSUB { 35850 break 35851 } 35852 y := z.Args[2] 35853 a := z.Args[0] 35854 x := z.Args[1] 35855 if !(z.Uses == 1) { 35856 break 35857 } 35858 b.Reset(BlockARM64NE) 35859 v0 := b.NewValue0(v_0.Pos, OpARM64CMP, types.TypeFlags) 35860 v0.AddArg(a) 35861 v1 := b.NewValue0(v_0.Pos, OpARM64MUL, x.Type) 35862 v1.AddArg(x) 35863 v1.AddArg(y) 35864 v0.AddArg(v1) 35865 b.AddControl(v0) 35866 return true 35867 } 35868 // match: (NE (CMPWconst [0] z:(MADDW a x y)) yes no) 35869 // cond: z.Uses==1 35870 // result: (NE (CMNW a (MULW <x.Type> x y)) yes no) 35871 for b.Controls[0].Op == OpARM64CMPWconst { 35872 v_0 := b.Controls[0] 35873 if v_0.AuxInt != 0 { 35874 break 35875 } 35876 z := v_0.Args[0] 35877 if z.Op != OpARM64MADDW { 35878 break 35879 } 35880 y := z.Args[2] 35881 a := z.Args[0] 35882 x := z.Args[1] 35883 if !(z.Uses == 1) { 35884 break 35885 } 35886 b.Reset(BlockARM64NE) 35887 v0 := b.NewValue0(v_0.Pos, OpARM64CMNW, types.TypeFlags) 35888 v0.AddArg(a) 35889 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 35890 v1.AddArg(x) 35891 v1.AddArg(y) 35892 v0.AddArg(v1) 35893 b.AddControl(v0) 35894 return true 35895 } 35896 // match: (NE (CMPWconst [0] z:(MSUBW a x y)) yes no) 35897 // cond: z.Uses==1 35898 // result: (NE (CMPW a (MULW <x.Type> x y)) yes no) 35899 for b.Controls[0].Op == OpARM64CMPWconst { 35900 v_0 := b.Controls[0] 35901 if v_0.AuxInt != 0 { 35902 break 35903 } 35904 z := v_0.Args[0] 35905 if z.Op != OpARM64MSUBW { 35906 break 35907 } 35908 y := z.Args[2] 35909 a := z.Args[0] 35910 x := z.Args[1] 35911 if !(z.Uses == 1) { 35912 break 35913 } 35914 b.Reset(BlockARM64NE) 35915 v0 := b.NewValue0(v_0.Pos, OpARM64CMPW, types.TypeFlags) 35916 v0.AddArg(a) 35917 v1 := b.NewValue0(v_0.Pos, OpARM64MULW, x.Type) 35918 v1.AddArg(x) 35919 v1.AddArg(y) 35920 v0.AddArg(v1) 35921 b.AddControl(v0) 35922 return true 35923 } 35924 // match: (NE (TSTconst [c] x) yes no) 35925 // cond: oneBit(c) 35926 // result: (TBNZ {ntz(c)} x yes no) 35927 for b.Controls[0].Op == OpARM64TSTconst { 35928 v_0 := b.Controls[0] 35929 c := v_0.AuxInt 35930 x := v_0.Args[0] 35931 if !(oneBit(c)) { 35932 break 35933 } 35934 b.Reset(BlockARM64TBNZ) 35935 b.AddControl(x) 35936 b.Aux = ntz(c) 35937 return true 35938 } 35939 // match: (NE (TSTWconst [c] x) yes no) 35940 // cond: oneBit(int64(uint32(c))) 35941 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 35942 for b.Controls[0].Op == OpARM64TSTWconst { 35943 v_0 := b.Controls[0] 35944 c := v_0.AuxInt 35945 x := v_0.Args[0] 35946 if !(oneBit(int64(uint32(c)))) { 35947 break 35948 } 35949 b.Reset(BlockARM64TBNZ) 35950 b.AddControl(x) 35951 b.Aux = ntz(int64(uint32(c))) 35952 return true 35953 } 35954 // match: (NE (FlagEQ) yes no) 35955 // result: (First no yes) 35956 for b.Controls[0].Op == OpARM64FlagEQ { 35957 b.Reset(BlockFirst) 35958 b.swapSuccessors() 35959 return true 35960 } 35961 // match: (NE (FlagLT_ULT) yes no) 35962 // result: (First yes no) 35963 for b.Controls[0].Op == OpARM64FlagLT_ULT { 35964 b.Reset(BlockFirst) 35965 return true 35966 } 35967 // match: (NE (FlagLT_UGT) yes no) 35968 // result: (First yes no) 35969 for b.Controls[0].Op == OpARM64FlagLT_UGT { 35970 b.Reset(BlockFirst) 35971 return true 35972 } 35973 // match: (NE (FlagGT_ULT) yes no) 35974 // result: (First yes no) 35975 for b.Controls[0].Op == OpARM64FlagGT_ULT { 35976 b.Reset(BlockFirst) 35977 return true 35978 } 35979 // match: (NE (FlagGT_UGT) yes no) 35980 // result: (First yes no) 35981 for b.Controls[0].Op == OpARM64FlagGT_UGT { 35982 b.Reset(BlockFirst) 35983 return true 35984 } 35985 // match: (NE (InvertFlags cmp) yes no) 35986 // result: (NE cmp yes no) 35987 for b.Controls[0].Op == OpARM64InvertFlags { 35988 v_0 := b.Controls[0] 35989 cmp := v_0.Args[0] 35990 b.Reset(BlockARM64NE) 35991 b.AddControl(cmp) 35992 return true 35993 } 35994 case BlockARM64NZ: 35995 // match: (NZ (Equal cc) yes no) 35996 // result: (EQ cc yes no) 35997 for b.Controls[0].Op == OpARM64Equal { 35998 v_0 := b.Controls[0] 35999 cc := v_0.Args[0] 36000 b.Reset(BlockARM64EQ) 36001 b.AddControl(cc) 36002 return true 36003 } 36004 // match: (NZ (NotEqual cc) yes no) 36005 // result: (NE cc yes no) 36006 for b.Controls[0].Op == OpARM64NotEqual { 36007 v_0 := b.Controls[0] 36008 cc := v_0.Args[0] 36009 b.Reset(BlockARM64NE) 36010 b.AddControl(cc) 36011 return true 36012 } 36013 // match: (NZ (LessThan cc) yes no) 36014 // result: (LT cc yes no) 36015 for b.Controls[0].Op == OpARM64LessThan { 36016 v_0 := b.Controls[0] 36017 cc := v_0.Args[0] 36018 b.Reset(BlockARM64LT) 36019 b.AddControl(cc) 36020 return true 36021 } 36022 // match: (NZ (LessThanU cc) yes no) 36023 // result: (ULT cc yes no) 36024 for b.Controls[0].Op == OpARM64LessThanU { 36025 v_0 := b.Controls[0] 36026 cc := v_0.Args[0] 36027 b.Reset(BlockARM64ULT) 36028 b.AddControl(cc) 36029 return true 36030 } 36031 // match: (NZ (LessEqual cc) yes no) 36032 // result: (LE cc yes no) 36033 for b.Controls[0].Op == OpARM64LessEqual { 36034 v_0 := b.Controls[0] 36035 cc := v_0.Args[0] 36036 b.Reset(BlockARM64LE) 36037 b.AddControl(cc) 36038 return true 36039 } 36040 // match: (NZ (LessEqualU cc) yes no) 36041 // result: (ULE cc yes no) 36042 for b.Controls[0].Op == OpARM64LessEqualU { 36043 v_0 := b.Controls[0] 36044 cc := v_0.Args[0] 36045 b.Reset(BlockARM64ULE) 36046 b.AddControl(cc) 36047 return true 36048 } 36049 // match: (NZ (GreaterThan cc) yes no) 36050 // result: (GT cc yes no) 36051 for b.Controls[0].Op == OpARM64GreaterThan { 36052 v_0 := b.Controls[0] 36053 cc := v_0.Args[0] 36054 b.Reset(BlockARM64GT) 36055 b.AddControl(cc) 36056 return true 36057 } 36058 // match: (NZ (GreaterThanU cc) yes no) 36059 // result: (UGT cc yes no) 36060 for b.Controls[0].Op == OpARM64GreaterThanU { 36061 v_0 := b.Controls[0] 36062 cc := v_0.Args[0] 36063 b.Reset(BlockARM64UGT) 36064 b.AddControl(cc) 36065 return true 36066 } 36067 // match: (NZ (GreaterEqual cc) yes no) 36068 // result: (GE cc yes no) 36069 for b.Controls[0].Op == OpARM64GreaterEqual { 36070 v_0 := b.Controls[0] 36071 cc := v_0.Args[0] 36072 b.Reset(BlockARM64GE) 36073 b.AddControl(cc) 36074 return true 36075 } 36076 // match: (NZ (GreaterEqualU cc) yes no) 36077 // result: (UGE cc yes no) 36078 for b.Controls[0].Op == OpARM64GreaterEqualU { 36079 v_0 := b.Controls[0] 36080 cc := v_0.Args[0] 36081 b.Reset(BlockARM64UGE) 36082 b.AddControl(cc) 36083 return true 36084 } 36085 // match: (NZ (LessThanF cc) yes no) 36086 // result: (FLT cc yes no) 36087 for b.Controls[0].Op == OpARM64LessThanF { 36088 v_0 := b.Controls[0] 36089 cc := v_0.Args[0] 36090 b.Reset(BlockARM64FLT) 36091 b.AddControl(cc) 36092 return true 36093 } 36094 // match: (NZ (LessEqualF cc) yes no) 36095 // result: (FLE cc yes no) 36096 for b.Controls[0].Op == OpARM64LessEqualF { 36097 v_0 := b.Controls[0] 36098 cc := v_0.Args[0] 36099 b.Reset(BlockARM64FLE) 36100 b.AddControl(cc) 36101 return true 36102 } 36103 // match: (NZ (GreaterThanF cc) yes no) 36104 // result: (FGT cc yes no) 36105 for b.Controls[0].Op == OpARM64GreaterThanF { 36106 v_0 := b.Controls[0] 36107 cc := v_0.Args[0] 36108 b.Reset(BlockARM64FGT) 36109 b.AddControl(cc) 36110 return true 36111 } 36112 // match: (NZ (GreaterEqualF cc) yes no) 36113 // result: (FGE cc yes no) 36114 for b.Controls[0].Op == OpARM64GreaterEqualF { 36115 v_0 := b.Controls[0] 36116 cc := v_0.Args[0] 36117 b.Reset(BlockARM64FGE) 36118 b.AddControl(cc) 36119 return true 36120 } 36121 // match: (NZ (ANDconst [c] x) yes no) 36122 // cond: oneBit(c) 36123 // result: (TBNZ {ntz(c)} x yes no) 36124 for b.Controls[0].Op == OpARM64ANDconst { 36125 v_0 := b.Controls[0] 36126 c := v_0.AuxInt 36127 x := v_0.Args[0] 36128 if !(oneBit(c)) { 36129 break 36130 } 36131 b.Reset(BlockARM64TBNZ) 36132 b.AddControl(x) 36133 b.Aux = ntz(c) 36134 return true 36135 } 36136 // match: (NZ (MOVDconst [0]) yes no) 36137 // result: (First no yes) 36138 for b.Controls[0].Op == OpARM64MOVDconst { 36139 v_0 := b.Controls[0] 36140 if v_0.AuxInt != 0 { 36141 break 36142 } 36143 b.Reset(BlockFirst) 36144 b.swapSuccessors() 36145 return true 36146 } 36147 // match: (NZ (MOVDconst [c]) yes no) 36148 // cond: c != 0 36149 // result: (First yes no) 36150 for b.Controls[0].Op == OpARM64MOVDconst { 36151 v_0 := b.Controls[0] 36152 c := v_0.AuxInt 36153 if !(c != 0) { 36154 break 36155 } 36156 b.Reset(BlockFirst) 36157 return true 36158 } 36159 case BlockARM64NZW: 36160 // match: (NZW (ANDconst [c] x) yes no) 36161 // cond: oneBit(int64(uint32(c))) 36162 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 36163 for b.Controls[0].Op == OpARM64ANDconst { 36164 v_0 := b.Controls[0] 36165 c := v_0.AuxInt 36166 x := v_0.Args[0] 36167 if !(oneBit(int64(uint32(c)))) { 36168 break 36169 } 36170 b.Reset(BlockARM64TBNZ) 36171 b.AddControl(x) 36172 b.Aux = ntz(int64(uint32(c))) 36173 return true 36174 } 36175 // match: (NZW (MOVDconst [c]) yes no) 36176 // cond: int32(c) == 0 36177 // result: (First no yes) 36178 for b.Controls[0].Op == OpARM64MOVDconst { 36179 v_0 := b.Controls[0] 36180 c := v_0.AuxInt 36181 if !(int32(c) == 0) { 36182 break 36183 } 36184 b.Reset(BlockFirst) 36185 b.swapSuccessors() 36186 return true 36187 } 36188 // match: (NZW (MOVDconst [c]) yes no) 36189 // cond: int32(c) != 0 36190 // result: (First yes no) 36191 for b.Controls[0].Op == OpARM64MOVDconst { 36192 v_0 := b.Controls[0] 36193 c := v_0.AuxInt 36194 if !(int32(c) != 0) { 36195 break 36196 } 36197 b.Reset(BlockFirst) 36198 return true 36199 } 36200 case BlockARM64UGE: 36201 // match: (UGE (FlagEQ) yes no) 36202 // result: (First yes no) 36203 for b.Controls[0].Op == OpARM64FlagEQ { 36204 b.Reset(BlockFirst) 36205 return true 36206 } 36207 // match: (UGE (FlagLT_ULT) yes no) 36208 // result: (First no yes) 36209 for b.Controls[0].Op == OpARM64FlagLT_ULT { 36210 b.Reset(BlockFirst) 36211 b.swapSuccessors() 36212 return true 36213 } 36214 // match: (UGE (FlagLT_UGT) yes no) 36215 // result: (First yes no) 36216 for b.Controls[0].Op == OpARM64FlagLT_UGT { 36217 b.Reset(BlockFirst) 36218 return true 36219 } 36220 // match: (UGE (FlagGT_ULT) yes no) 36221 // result: (First no yes) 36222 for b.Controls[0].Op == OpARM64FlagGT_ULT { 36223 b.Reset(BlockFirst) 36224 b.swapSuccessors() 36225 return true 36226 } 36227 // match: (UGE (FlagGT_UGT) yes no) 36228 // result: (First yes no) 36229 for b.Controls[0].Op == OpARM64FlagGT_UGT { 36230 b.Reset(BlockFirst) 36231 return true 36232 } 36233 // match: (UGE (InvertFlags cmp) yes no) 36234 // result: (ULE cmp yes no) 36235 for b.Controls[0].Op == OpARM64InvertFlags { 36236 v_0 := b.Controls[0] 36237 cmp := v_0.Args[0] 36238 b.Reset(BlockARM64ULE) 36239 b.AddControl(cmp) 36240 return true 36241 } 36242 case BlockARM64UGT: 36243 // match: (UGT (FlagEQ) yes no) 36244 // result: (First no yes) 36245 for b.Controls[0].Op == OpARM64FlagEQ { 36246 b.Reset(BlockFirst) 36247 b.swapSuccessors() 36248 return true 36249 } 36250 // match: (UGT (FlagLT_ULT) yes no) 36251 // result: (First no yes) 36252 for b.Controls[0].Op == OpARM64FlagLT_ULT { 36253 b.Reset(BlockFirst) 36254 b.swapSuccessors() 36255 return true 36256 } 36257 // match: (UGT (FlagLT_UGT) yes no) 36258 // result: (First yes no) 36259 for b.Controls[0].Op == OpARM64FlagLT_UGT { 36260 b.Reset(BlockFirst) 36261 return true 36262 } 36263 // match: (UGT (FlagGT_ULT) yes no) 36264 // result: (First no yes) 36265 for b.Controls[0].Op == OpARM64FlagGT_ULT { 36266 b.Reset(BlockFirst) 36267 b.swapSuccessors() 36268 return true 36269 } 36270 // match: (UGT (FlagGT_UGT) yes no) 36271 // result: (First yes no) 36272 for b.Controls[0].Op == OpARM64FlagGT_UGT { 36273 b.Reset(BlockFirst) 36274 return true 36275 } 36276 // match: (UGT (InvertFlags cmp) yes no) 36277 // result: (ULT cmp yes no) 36278 for b.Controls[0].Op == OpARM64InvertFlags { 36279 v_0 := b.Controls[0] 36280 cmp := v_0.Args[0] 36281 b.Reset(BlockARM64ULT) 36282 b.AddControl(cmp) 36283 return true 36284 } 36285 case BlockARM64ULE: 36286 // match: (ULE (FlagEQ) yes no) 36287 // result: (First yes no) 36288 for b.Controls[0].Op == OpARM64FlagEQ { 36289 b.Reset(BlockFirst) 36290 return true 36291 } 36292 // match: (ULE (FlagLT_ULT) yes no) 36293 // result: (First yes no) 36294 for b.Controls[0].Op == OpARM64FlagLT_ULT { 36295 b.Reset(BlockFirst) 36296 return true 36297 } 36298 // match: (ULE (FlagLT_UGT) yes no) 36299 // result: (First no yes) 36300 for b.Controls[0].Op == OpARM64FlagLT_UGT { 36301 b.Reset(BlockFirst) 36302 b.swapSuccessors() 36303 return true 36304 } 36305 // match: (ULE (FlagGT_ULT) yes no) 36306 // result: (First yes no) 36307 for b.Controls[0].Op == OpARM64FlagGT_ULT { 36308 b.Reset(BlockFirst) 36309 return true 36310 } 36311 // match: (ULE (FlagGT_UGT) yes no) 36312 // result: (First no yes) 36313 for b.Controls[0].Op == OpARM64FlagGT_UGT { 36314 b.Reset(BlockFirst) 36315 b.swapSuccessors() 36316 return true 36317 } 36318 // match: (ULE (InvertFlags cmp) yes no) 36319 // result: (UGE cmp yes no) 36320 for b.Controls[0].Op == OpARM64InvertFlags { 36321 v_0 := b.Controls[0] 36322 cmp := v_0.Args[0] 36323 b.Reset(BlockARM64UGE) 36324 b.AddControl(cmp) 36325 return true 36326 } 36327 case BlockARM64ULT: 36328 // match: (ULT (FlagEQ) yes no) 36329 // result: (First no yes) 36330 for b.Controls[0].Op == OpARM64FlagEQ { 36331 b.Reset(BlockFirst) 36332 b.swapSuccessors() 36333 return true 36334 } 36335 // match: (ULT (FlagLT_ULT) yes no) 36336 // result: (First yes no) 36337 for b.Controls[0].Op == OpARM64FlagLT_ULT { 36338 b.Reset(BlockFirst) 36339 return true 36340 } 36341 // match: (ULT (FlagLT_UGT) yes no) 36342 // result: (First no yes) 36343 for b.Controls[0].Op == OpARM64FlagLT_UGT { 36344 b.Reset(BlockFirst) 36345 b.swapSuccessors() 36346 return true 36347 } 36348 // match: (ULT (FlagGT_ULT) yes no) 36349 // result: (First yes no) 36350 for b.Controls[0].Op == OpARM64FlagGT_ULT { 36351 b.Reset(BlockFirst) 36352 return true 36353 } 36354 // match: (ULT (FlagGT_UGT) yes no) 36355 // result: (First no yes) 36356 for b.Controls[0].Op == OpARM64FlagGT_UGT { 36357 b.Reset(BlockFirst) 36358 b.swapSuccessors() 36359 return true 36360 } 36361 // match: (ULT (InvertFlags cmp) yes no) 36362 // result: (UGT cmp yes no) 36363 for b.Controls[0].Op == OpARM64InvertFlags { 36364 v_0 := b.Controls[0] 36365 cmp := v_0.Args[0] 36366 b.Reset(BlockARM64UGT) 36367 b.AddControl(cmp) 36368 return true 36369 } 36370 case BlockARM64Z: 36371 // match: (Z (ANDconst [c] x) yes no) 36372 // cond: oneBit(c) 36373 // result: (TBZ {ntz(c)} x yes no) 36374 for b.Controls[0].Op == OpARM64ANDconst { 36375 v_0 := b.Controls[0] 36376 c := v_0.AuxInt 36377 x := v_0.Args[0] 36378 if !(oneBit(c)) { 36379 break 36380 } 36381 b.Reset(BlockARM64TBZ) 36382 b.AddControl(x) 36383 b.Aux = ntz(c) 36384 return true 36385 } 36386 // match: (Z (MOVDconst [0]) yes no) 36387 // result: (First yes no) 36388 for b.Controls[0].Op == OpARM64MOVDconst { 36389 v_0 := b.Controls[0] 36390 if v_0.AuxInt != 0 { 36391 break 36392 } 36393 b.Reset(BlockFirst) 36394 return true 36395 } 36396 // match: (Z (MOVDconst [c]) yes no) 36397 // cond: c != 0 36398 // result: (First no yes) 36399 for b.Controls[0].Op == OpARM64MOVDconst { 36400 v_0 := b.Controls[0] 36401 c := v_0.AuxInt 36402 if !(c != 0) { 36403 break 36404 } 36405 b.Reset(BlockFirst) 36406 b.swapSuccessors() 36407 return true 36408 } 36409 case BlockARM64ZW: 36410 // match: (ZW (ANDconst [c] x) yes no) 36411 // cond: oneBit(int64(uint32(c))) 36412 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 36413 for b.Controls[0].Op == OpARM64ANDconst { 36414 v_0 := b.Controls[0] 36415 c := v_0.AuxInt 36416 x := v_0.Args[0] 36417 if !(oneBit(int64(uint32(c)))) { 36418 break 36419 } 36420 b.Reset(BlockARM64TBZ) 36421 b.AddControl(x) 36422 b.Aux = ntz(int64(uint32(c))) 36423 return true 36424 } 36425 // match: (ZW (MOVDconst [c]) yes no) 36426 // cond: int32(c) == 0 36427 // result: (First yes no) 36428 for b.Controls[0].Op == OpARM64MOVDconst { 36429 v_0 := b.Controls[0] 36430 c := v_0.AuxInt 36431 if !(int32(c) == 0) { 36432 break 36433 } 36434 b.Reset(BlockFirst) 36435 return true 36436 } 36437 // match: (ZW (MOVDconst [c]) yes no) 36438 // cond: int32(c) != 0 36439 // result: (First no yes) 36440 for b.Controls[0].Op == OpARM64MOVDconst { 36441 v_0 := b.Controls[0] 36442 c := v_0.AuxInt 36443 if !(int32(c) != 0) { 36444 break 36445 } 36446 b.Reset(BlockFirst) 36447 b.swapSuccessors() 36448 return true 36449 } 36450 } 36451 return false 36452 }