github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/ssa/rewriteARM64.go (about) 1 // Code generated from gen/ARM64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "fmt" 7 import "math" 8 import "cmd/internal/obj" 9 import "cmd/internal/objabi" 10 import "cmd/compile/internal/types" 11 12 var _ = fmt.Println // in case not otherwise used 13 var _ = math.MinInt8 // in case not otherwise used 14 var _ = obj.ANOP // in case not otherwise used 15 var _ = objabi.GOROOT // in case not otherwise used 16 var _ = types.TypeMem // in case not otherwise used 17 18 func rewriteValueARM64(v *Value) bool { 19 switch v.Op { 20 case OpARM64ADD: 21 return rewriteValueARM64_OpARM64ADD_0(v) || rewriteValueARM64_OpARM64ADD_10(v) || rewriteValueARM64_OpARM64ADD_20(v) 22 case OpARM64ADDconst: 23 return rewriteValueARM64_OpARM64ADDconst_0(v) 24 case OpARM64ADDshiftLL: 25 return rewriteValueARM64_OpARM64ADDshiftLL_0(v) 26 case OpARM64ADDshiftRA: 27 return rewriteValueARM64_OpARM64ADDshiftRA_0(v) 28 case OpARM64ADDshiftRL: 29 return rewriteValueARM64_OpARM64ADDshiftRL_0(v) 30 case OpARM64AND: 31 return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v) 32 case OpARM64ANDconst: 33 return rewriteValueARM64_OpARM64ANDconst_0(v) 34 case OpARM64ANDshiftLL: 35 return rewriteValueARM64_OpARM64ANDshiftLL_0(v) 36 case OpARM64ANDshiftRA: 37 return rewriteValueARM64_OpARM64ANDshiftRA_0(v) 38 case OpARM64ANDshiftRL: 39 return rewriteValueARM64_OpARM64ANDshiftRL_0(v) 40 case OpARM64BIC: 41 return rewriteValueARM64_OpARM64BIC_0(v) 42 case OpARM64BICshiftLL: 43 return rewriteValueARM64_OpARM64BICshiftLL_0(v) 44 case OpARM64BICshiftRA: 45 return rewriteValueARM64_OpARM64BICshiftRA_0(v) 46 case OpARM64BICshiftRL: 47 return rewriteValueARM64_OpARM64BICshiftRL_0(v) 48 case OpARM64CMN: 49 return rewriteValueARM64_OpARM64CMN_0(v) 50 case OpARM64CMNW: 51 return rewriteValueARM64_OpARM64CMNW_0(v) 52 case OpARM64CMNWconst: 53 return rewriteValueARM64_OpARM64CMNWconst_0(v) 54 case OpARM64CMNconst: 55 return rewriteValueARM64_OpARM64CMNconst_0(v) 56 case OpARM64CMNshiftLL: 57 return rewriteValueARM64_OpARM64CMNshiftLL_0(v) 58 case OpARM64CMNshiftRA: 59 return rewriteValueARM64_OpARM64CMNshiftRA_0(v) 60 case OpARM64CMNshiftRL: 61 return rewriteValueARM64_OpARM64CMNshiftRL_0(v) 62 case OpARM64CMP: 63 return rewriteValueARM64_OpARM64CMP_0(v) 64 case OpARM64CMPW: 65 return rewriteValueARM64_OpARM64CMPW_0(v) 66 case OpARM64CMPWconst: 67 return rewriteValueARM64_OpARM64CMPWconst_0(v) 68 case OpARM64CMPconst: 69 return rewriteValueARM64_OpARM64CMPconst_0(v) 70 case OpARM64CMPshiftLL: 71 return rewriteValueARM64_OpARM64CMPshiftLL_0(v) 72 case OpARM64CMPshiftRA: 73 return rewriteValueARM64_OpARM64CMPshiftRA_0(v) 74 case OpARM64CMPshiftRL: 75 return rewriteValueARM64_OpARM64CMPshiftRL_0(v) 76 case OpARM64CSEL: 77 return rewriteValueARM64_OpARM64CSEL_0(v) 78 case OpARM64CSEL0: 79 return rewriteValueARM64_OpARM64CSEL0_0(v) 80 case OpARM64DIV: 81 return rewriteValueARM64_OpARM64DIV_0(v) 82 case OpARM64DIVW: 83 return rewriteValueARM64_OpARM64DIVW_0(v) 84 case OpARM64EON: 85 return rewriteValueARM64_OpARM64EON_0(v) 86 case OpARM64EONshiftLL: 87 return rewriteValueARM64_OpARM64EONshiftLL_0(v) 88 case OpARM64EONshiftRA: 89 return rewriteValueARM64_OpARM64EONshiftRA_0(v) 90 case OpARM64EONshiftRL: 91 return rewriteValueARM64_OpARM64EONshiftRL_0(v) 92 case OpARM64Equal: 93 return rewriteValueARM64_OpARM64Equal_0(v) 94 case OpARM64FADDD: 95 return rewriteValueARM64_OpARM64FADDD_0(v) 96 case OpARM64FADDS: 97 return rewriteValueARM64_OpARM64FADDS_0(v) 98 case OpARM64FMOVDfpgp: 99 return rewriteValueARM64_OpARM64FMOVDfpgp_0(v) 100 case OpARM64FMOVDgpfp: 101 return rewriteValueARM64_OpARM64FMOVDgpfp_0(v) 102 case OpARM64FMOVDload: 103 return rewriteValueARM64_OpARM64FMOVDload_0(v) 104 case OpARM64FMOVDloadidx: 105 return rewriteValueARM64_OpARM64FMOVDloadidx_0(v) 106 case OpARM64FMOVDstore: 107 return rewriteValueARM64_OpARM64FMOVDstore_0(v) 108 case OpARM64FMOVDstoreidx: 109 return rewriteValueARM64_OpARM64FMOVDstoreidx_0(v) 110 case OpARM64FMOVSload: 111 return rewriteValueARM64_OpARM64FMOVSload_0(v) 112 case OpARM64FMOVSloadidx: 113 return rewriteValueARM64_OpARM64FMOVSloadidx_0(v) 114 case OpARM64FMOVSstore: 115 return rewriteValueARM64_OpARM64FMOVSstore_0(v) 116 case OpARM64FMOVSstoreidx: 117 return rewriteValueARM64_OpARM64FMOVSstoreidx_0(v) 118 case OpARM64FMULD: 119 return rewriteValueARM64_OpARM64FMULD_0(v) 120 case OpARM64FMULS: 121 return rewriteValueARM64_OpARM64FMULS_0(v) 122 case OpARM64FNEGD: 123 return rewriteValueARM64_OpARM64FNEGD_0(v) 124 case OpARM64FNEGS: 125 return rewriteValueARM64_OpARM64FNEGS_0(v) 126 case OpARM64FNMULD: 127 return rewriteValueARM64_OpARM64FNMULD_0(v) 128 case OpARM64FNMULS: 129 return rewriteValueARM64_OpARM64FNMULS_0(v) 130 case OpARM64FSUBD: 131 return rewriteValueARM64_OpARM64FSUBD_0(v) 132 case OpARM64FSUBS: 133 return rewriteValueARM64_OpARM64FSUBS_0(v) 134 case OpARM64GreaterEqual: 135 return rewriteValueARM64_OpARM64GreaterEqual_0(v) 136 case OpARM64GreaterEqualU: 137 return rewriteValueARM64_OpARM64GreaterEqualU_0(v) 138 case OpARM64GreaterThan: 139 return rewriteValueARM64_OpARM64GreaterThan_0(v) 140 case OpARM64GreaterThanU: 141 return rewriteValueARM64_OpARM64GreaterThanU_0(v) 142 case OpARM64LessEqual: 143 return rewriteValueARM64_OpARM64LessEqual_0(v) 144 case OpARM64LessEqualU: 145 return rewriteValueARM64_OpARM64LessEqualU_0(v) 146 case OpARM64LessThan: 147 return rewriteValueARM64_OpARM64LessThan_0(v) 148 case OpARM64LessThanU: 149 return rewriteValueARM64_OpARM64LessThanU_0(v) 150 case OpARM64MADD: 151 return rewriteValueARM64_OpARM64MADD_0(v) || rewriteValueARM64_OpARM64MADD_10(v) || rewriteValueARM64_OpARM64MADD_20(v) 152 case OpARM64MADDW: 153 return rewriteValueARM64_OpARM64MADDW_0(v) || rewriteValueARM64_OpARM64MADDW_10(v) || rewriteValueARM64_OpARM64MADDW_20(v) 154 case OpARM64MNEG: 155 return rewriteValueARM64_OpARM64MNEG_0(v) || rewriteValueARM64_OpARM64MNEG_10(v) || rewriteValueARM64_OpARM64MNEG_20(v) 156 case OpARM64MNEGW: 157 return rewriteValueARM64_OpARM64MNEGW_0(v) || rewriteValueARM64_OpARM64MNEGW_10(v) || rewriteValueARM64_OpARM64MNEGW_20(v) 158 case OpARM64MOD: 159 return rewriteValueARM64_OpARM64MOD_0(v) 160 case OpARM64MODW: 161 return rewriteValueARM64_OpARM64MODW_0(v) 162 case OpARM64MOVBUload: 163 return rewriteValueARM64_OpARM64MOVBUload_0(v) 164 case OpARM64MOVBUloadidx: 165 return rewriteValueARM64_OpARM64MOVBUloadidx_0(v) 166 case OpARM64MOVBUreg: 167 return rewriteValueARM64_OpARM64MOVBUreg_0(v) 168 case OpARM64MOVBload: 169 return rewriteValueARM64_OpARM64MOVBload_0(v) 170 case OpARM64MOVBloadidx: 171 return rewriteValueARM64_OpARM64MOVBloadidx_0(v) 172 case OpARM64MOVBreg: 173 return rewriteValueARM64_OpARM64MOVBreg_0(v) 174 case OpARM64MOVBstore: 175 return rewriteValueARM64_OpARM64MOVBstore_0(v) || rewriteValueARM64_OpARM64MOVBstore_10(v) || rewriteValueARM64_OpARM64MOVBstore_20(v) || rewriteValueARM64_OpARM64MOVBstore_30(v) || rewriteValueARM64_OpARM64MOVBstore_40(v) 176 case OpARM64MOVBstoreidx: 177 return rewriteValueARM64_OpARM64MOVBstoreidx_0(v) || rewriteValueARM64_OpARM64MOVBstoreidx_10(v) 178 case OpARM64MOVBstorezero: 179 return rewriteValueARM64_OpARM64MOVBstorezero_0(v) 180 case OpARM64MOVBstorezeroidx: 181 return rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v) 182 case OpARM64MOVDload: 183 return rewriteValueARM64_OpARM64MOVDload_0(v) 184 case OpARM64MOVDloadidx: 185 return rewriteValueARM64_OpARM64MOVDloadidx_0(v) 186 case OpARM64MOVDloadidx8: 187 return rewriteValueARM64_OpARM64MOVDloadidx8_0(v) 188 case OpARM64MOVDreg: 189 return rewriteValueARM64_OpARM64MOVDreg_0(v) 190 case OpARM64MOVDstore: 191 return rewriteValueARM64_OpARM64MOVDstore_0(v) 192 case OpARM64MOVDstoreidx: 193 return rewriteValueARM64_OpARM64MOVDstoreidx_0(v) 194 case OpARM64MOVDstoreidx8: 195 return rewriteValueARM64_OpARM64MOVDstoreidx8_0(v) 196 case OpARM64MOVDstorezero: 197 return rewriteValueARM64_OpARM64MOVDstorezero_0(v) 198 case OpARM64MOVDstorezeroidx: 199 return rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v) 200 case OpARM64MOVDstorezeroidx8: 201 return rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v) 202 case OpARM64MOVHUload: 203 return rewriteValueARM64_OpARM64MOVHUload_0(v) 204 case OpARM64MOVHUloadidx: 205 return rewriteValueARM64_OpARM64MOVHUloadidx_0(v) 206 case OpARM64MOVHUloadidx2: 207 return rewriteValueARM64_OpARM64MOVHUloadidx2_0(v) 208 case OpARM64MOVHUreg: 209 return rewriteValueARM64_OpARM64MOVHUreg_0(v) || rewriteValueARM64_OpARM64MOVHUreg_10(v) 210 case OpARM64MOVHload: 211 return rewriteValueARM64_OpARM64MOVHload_0(v) 212 case OpARM64MOVHloadidx: 213 return rewriteValueARM64_OpARM64MOVHloadidx_0(v) 214 case OpARM64MOVHloadidx2: 215 return rewriteValueARM64_OpARM64MOVHloadidx2_0(v) 216 case OpARM64MOVHreg: 217 return rewriteValueARM64_OpARM64MOVHreg_0(v) || rewriteValueARM64_OpARM64MOVHreg_10(v) 218 case OpARM64MOVHstore: 219 return rewriteValueARM64_OpARM64MOVHstore_0(v) || rewriteValueARM64_OpARM64MOVHstore_10(v) || rewriteValueARM64_OpARM64MOVHstore_20(v) 220 case OpARM64MOVHstoreidx: 221 return rewriteValueARM64_OpARM64MOVHstoreidx_0(v) || rewriteValueARM64_OpARM64MOVHstoreidx_10(v) 222 case OpARM64MOVHstoreidx2: 223 return rewriteValueARM64_OpARM64MOVHstoreidx2_0(v) 224 case OpARM64MOVHstorezero: 225 return rewriteValueARM64_OpARM64MOVHstorezero_0(v) 226 case OpARM64MOVHstorezeroidx: 227 return rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v) 228 case OpARM64MOVHstorezeroidx2: 229 return rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v) 230 case OpARM64MOVQstorezero: 231 return rewriteValueARM64_OpARM64MOVQstorezero_0(v) 232 case OpARM64MOVWUload: 233 return rewriteValueARM64_OpARM64MOVWUload_0(v) 234 case OpARM64MOVWUloadidx: 235 return rewriteValueARM64_OpARM64MOVWUloadidx_0(v) 236 case OpARM64MOVWUloadidx4: 237 return rewriteValueARM64_OpARM64MOVWUloadidx4_0(v) 238 case OpARM64MOVWUreg: 239 return rewriteValueARM64_OpARM64MOVWUreg_0(v) || rewriteValueARM64_OpARM64MOVWUreg_10(v) 240 case OpARM64MOVWload: 241 return rewriteValueARM64_OpARM64MOVWload_0(v) 242 case OpARM64MOVWloadidx: 243 return rewriteValueARM64_OpARM64MOVWloadidx_0(v) 244 case OpARM64MOVWloadidx4: 245 return rewriteValueARM64_OpARM64MOVWloadidx4_0(v) 246 case OpARM64MOVWreg: 247 return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v) 248 case OpARM64MOVWstore: 249 return rewriteValueARM64_OpARM64MOVWstore_0(v) || rewriteValueARM64_OpARM64MOVWstore_10(v) 250 case OpARM64MOVWstoreidx: 251 return rewriteValueARM64_OpARM64MOVWstoreidx_0(v) 252 case OpARM64MOVWstoreidx4: 253 return rewriteValueARM64_OpARM64MOVWstoreidx4_0(v) 254 case OpARM64MOVWstorezero: 255 return rewriteValueARM64_OpARM64MOVWstorezero_0(v) 256 case OpARM64MOVWstorezeroidx: 257 return rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v) 258 case OpARM64MOVWstorezeroidx4: 259 return rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v) 260 case OpARM64MSUB: 261 return rewriteValueARM64_OpARM64MSUB_0(v) || rewriteValueARM64_OpARM64MSUB_10(v) || rewriteValueARM64_OpARM64MSUB_20(v) 262 case OpARM64MSUBW: 263 return rewriteValueARM64_OpARM64MSUBW_0(v) || rewriteValueARM64_OpARM64MSUBW_10(v) || rewriteValueARM64_OpARM64MSUBW_20(v) 264 case OpARM64MUL: 265 return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v) 266 case OpARM64MULW: 267 return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v) 268 case OpARM64MVN: 269 return rewriteValueARM64_OpARM64MVN_0(v) 270 case OpARM64MVNshiftLL: 271 return rewriteValueARM64_OpARM64MVNshiftLL_0(v) 272 case OpARM64MVNshiftRA: 273 return rewriteValueARM64_OpARM64MVNshiftRA_0(v) 274 case OpARM64MVNshiftRL: 275 return rewriteValueARM64_OpARM64MVNshiftRL_0(v) 276 case OpARM64NEG: 277 return rewriteValueARM64_OpARM64NEG_0(v) 278 case OpARM64NEGshiftLL: 279 return rewriteValueARM64_OpARM64NEGshiftLL_0(v) 280 case OpARM64NEGshiftRA: 281 return rewriteValueARM64_OpARM64NEGshiftRA_0(v) 282 case OpARM64NEGshiftRL: 283 return rewriteValueARM64_OpARM64NEGshiftRL_0(v) 284 case OpARM64NotEqual: 285 return rewriteValueARM64_OpARM64NotEqual_0(v) 286 case OpARM64OR: 287 return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) || rewriteValueARM64_OpARM64OR_20(v) || rewriteValueARM64_OpARM64OR_30(v) || rewriteValueARM64_OpARM64OR_40(v) 288 case OpARM64ORN: 289 return rewriteValueARM64_OpARM64ORN_0(v) 290 case OpARM64ORNshiftLL: 291 return rewriteValueARM64_OpARM64ORNshiftLL_0(v) 292 case OpARM64ORNshiftRA: 293 return rewriteValueARM64_OpARM64ORNshiftRA_0(v) 294 case OpARM64ORNshiftRL: 295 return rewriteValueARM64_OpARM64ORNshiftRL_0(v) 296 case OpARM64ORconst: 297 return rewriteValueARM64_OpARM64ORconst_0(v) 298 case OpARM64ORshiftLL: 299 return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) || rewriteValueARM64_OpARM64ORshiftLL_20(v) 300 case OpARM64ORshiftRA: 301 return rewriteValueARM64_OpARM64ORshiftRA_0(v) 302 case OpARM64ORshiftRL: 303 return rewriteValueARM64_OpARM64ORshiftRL_0(v) 304 case OpARM64RORWconst: 305 return rewriteValueARM64_OpARM64RORWconst_0(v) 306 case OpARM64RORconst: 307 return rewriteValueARM64_OpARM64RORconst_0(v) 308 case OpARM64SLL: 309 return rewriteValueARM64_OpARM64SLL_0(v) 310 case OpARM64SLLconst: 311 return rewriteValueARM64_OpARM64SLLconst_0(v) 312 case OpARM64SRA: 313 return rewriteValueARM64_OpARM64SRA_0(v) 314 case OpARM64SRAconst: 315 return rewriteValueARM64_OpARM64SRAconst_0(v) 316 case OpARM64SRL: 317 return rewriteValueARM64_OpARM64SRL_0(v) 318 case OpARM64SRLconst: 319 return rewriteValueARM64_OpARM64SRLconst_0(v) || rewriteValueARM64_OpARM64SRLconst_10(v) 320 case OpARM64STP: 321 return rewriteValueARM64_OpARM64STP_0(v) 322 case OpARM64SUB: 323 return rewriteValueARM64_OpARM64SUB_0(v) || rewriteValueARM64_OpARM64SUB_10(v) 324 case OpARM64SUBconst: 325 return rewriteValueARM64_OpARM64SUBconst_0(v) 326 case OpARM64SUBshiftLL: 327 return rewriteValueARM64_OpARM64SUBshiftLL_0(v) 328 case OpARM64SUBshiftRA: 329 return rewriteValueARM64_OpARM64SUBshiftRA_0(v) 330 case OpARM64SUBshiftRL: 331 return rewriteValueARM64_OpARM64SUBshiftRL_0(v) 332 case OpARM64TST: 333 return rewriteValueARM64_OpARM64TST_0(v) 334 case OpARM64TSTW: 335 return rewriteValueARM64_OpARM64TSTW_0(v) 336 case OpARM64TSTWconst: 337 return rewriteValueARM64_OpARM64TSTWconst_0(v) 338 case OpARM64TSTconst: 339 return rewriteValueARM64_OpARM64TSTconst_0(v) 340 case OpARM64TSTshiftLL: 341 return rewriteValueARM64_OpARM64TSTshiftLL_0(v) 342 case OpARM64TSTshiftRA: 343 return rewriteValueARM64_OpARM64TSTshiftRA_0(v) 344 case OpARM64TSTshiftRL: 345 return rewriteValueARM64_OpARM64TSTshiftRL_0(v) 346 case OpARM64UBFIZ: 347 return rewriteValueARM64_OpARM64UBFIZ_0(v) 348 case OpARM64UBFX: 349 return rewriteValueARM64_OpARM64UBFX_0(v) 350 case OpARM64UDIV: 351 return rewriteValueARM64_OpARM64UDIV_0(v) 352 case OpARM64UDIVW: 353 return rewriteValueARM64_OpARM64UDIVW_0(v) 354 case OpARM64UMOD: 355 return rewriteValueARM64_OpARM64UMOD_0(v) 356 case OpARM64UMODW: 357 return rewriteValueARM64_OpARM64UMODW_0(v) 358 case OpARM64XOR: 359 return rewriteValueARM64_OpARM64XOR_0(v) || rewriteValueARM64_OpARM64XOR_10(v) 360 case OpARM64XORconst: 361 return rewriteValueARM64_OpARM64XORconst_0(v) 362 case OpARM64XORshiftLL: 363 return rewriteValueARM64_OpARM64XORshiftLL_0(v) 364 case OpARM64XORshiftRA: 365 return rewriteValueARM64_OpARM64XORshiftRA_0(v) 366 case OpARM64XORshiftRL: 367 return rewriteValueARM64_OpARM64XORshiftRL_0(v) 368 case OpAbs: 369 return rewriteValueARM64_OpAbs_0(v) 370 case OpAdd16: 371 return rewriteValueARM64_OpAdd16_0(v) 372 case OpAdd32: 373 return rewriteValueARM64_OpAdd32_0(v) 374 case OpAdd32F: 375 return rewriteValueARM64_OpAdd32F_0(v) 376 case OpAdd64: 377 return rewriteValueARM64_OpAdd64_0(v) 378 case OpAdd64F: 379 return rewriteValueARM64_OpAdd64F_0(v) 380 case OpAdd8: 381 return rewriteValueARM64_OpAdd8_0(v) 382 case OpAddPtr: 383 return rewriteValueARM64_OpAddPtr_0(v) 384 case OpAddr: 385 return rewriteValueARM64_OpAddr_0(v) 386 case OpAnd16: 387 return rewriteValueARM64_OpAnd16_0(v) 388 case OpAnd32: 389 return rewriteValueARM64_OpAnd32_0(v) 390 case OpAnd64: 391 return rewriteValueARM64_OpAnd64_0(v) 392 case OpAnd8: 393 return rewriteValueARM64_OpAnd8_0(v) 394 case OpAndB: 395 return rewriteValueARM64_OpAndB_0(v) 396 case OpAtomicAdd32: 397 return rewriteValueARM64_OpAtomicAdd32_0(v) 398 case OpAtomicAdd32Variant: 399 return rewriteValueARM64_OpAtomicAdd32Variant_0(v) 400 case OpAtomicAdd64: 401 return rewriteValueARM64_OpAtomicAdd64_0(v) 402 case OpAtomicAdd64Variant: 403 return rewriteValueARM64_OpAtomicAdd64Variant_0(v) 404 case OpAtomicAnd8: 405 return rewriteValueARM64_OpAtomicAnd8_0(v) 406 case OpAtomicCompareAndSwap32: 407 return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v) 408 case OpAtomicCompareAndSwap64: 409 return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v) 410 case OpAtomicExchange32: 411 return rewriteValueARM64_OpAtomicExchange32_0(v) 412 case OpAtomicExchange64: 413 return rewriteValueARM64_OpAtomicExchange64_0(v) 414 case OpAtomicLoad32: 415 return rewriteValueARM64_OpAtomicLoad32_0(v) 416 case OpAtomicLoad64: 417 return rewriteValueARM64_OpAtomicLoad64_0(v) 418 case OpAtomicLoadPtr: 419 return rewriteValueARM64_OpAtomicLoadPtr_0(v) 420 case OpAtomicOr8: 421 return rewriteValueARM64_OpAtomicOr8_0(v) 422 case OpAtomicStore32: 423 return rewriteValueARM64_OpAtomicStore32_0(v) 424 case OpAtomicStore64: 425 return rewriteValueARM64_OpAtomicStore64_0(v) 426 case OpAtomicStorePtrNoWB: 427 return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v) 428 case OpAvg64u: 429 return rewriteValueARM64_OpAvg64u_0(v) 430 case OpBitLen64: 431 return rewriteValueARM64_OpBitLen64_0(v) 432 case OpBitRev16: 433 return rewriteValueARM64_OpBitRev16_0(v) 434 case OpBitRev32: 435 return rewriteValueARM64_OpBitRev32_0(v) 436 case OpBitRev64: 437 return rewriteValueARM64_OpBitRev64_0(v) 438 case OpBitRev8: 439 return rewriteValueARM64_OpBitRev8_0(v) 440 case OpBswap32: 441 return rewriteValueARM64_OpBswap32_0(v) 442 case OpBswap64: 443 return rewriteValueARM64_OpBswap64_0(v) 444 case OpCeil: 445 return rewriteValueARM64_OpCeil_0(v) 446 case OpClosureCall: 447 return rewriteValueARM64_OpClosureCall_0(v) 448 case OpCom16: 449 return rewriteValueARM64_OpCom16_0(v) 450 case OpCom32: 451 return rewriteValueARM64_OpCom32_0(v) 452 case OpCom64: 453 return rewriteValueARM64_OpCom64_0(v) 454 case OpCom8: 455 return rewriteValueARM64_OpCom8_0(v) 456 case OpCondSelect: 457 return rewriteValueARM64_OpCondSelect_0(v) 458 case OpConst16: 459 return rewriteValueARM64_OpConst16_0(v) 460 case OpConst32: 461 return rewriteValueARM64_OpConst32_0(v) 462 case OpConst32F: 463 return rewriteValueARM64_OpConst32F_0(v) 464 case OpConst64: 465 return rewriteValueARM64_OpConst64_0(v) 466 case OpConst64F: 467 return rewriteValueARM64_OpConst64F_0(v) 468 case OpConst8: 469 return rewriteValueARM64_OpConst8_0(v) 470 case OpConstBool: 471 return rewriteValueARM64_OpConstBool_0(v) 472 case OpConstNil: 473 return rewriteValueARM64_OpConstNil_0(v) 474 case OpCtz32: 475 return rewriteValueARM64_OpCtz32_0(v) 476 case OpCtz32NonZero: 477 return rewriteValueARM64_OpCtz32NonZero_0(v) 478 case OpCtz64: 479 return rewriteValueARM64_OpCtz64_0(v) 480 case OpCtz64NonZero: 481 return rewriteValueARM64_OpCtz64NonZero_0(v) 482 case OpCvt32Fto32: 483 return rewriteValueARM64_OpCvt32Fto32_0(v) 484 case OpCvt32Fto32U: 485 return rewriteValueARM64_OpCvt32Fto32U_0(v) 486 case OpCvt32Fto64: 487 return rewriteValueARM64_OpCvt32Fto64_0(v) 488 case OpCvt32Fto64F: 489 return rewriteValueARM64_OpCvt32Fto64F_0(v) 490 case OpCvt32Fto64U: 491 return rewriteValueARM64_OpCvt32Fto64U_0(v) 492 case OpCvt32Uto32F: 493 return rewriteValueARM64_OpCvt32Uto32F_0(v) 494 case OpCvt32Uto64F: 495 return rewriteValueARM64_OpCvt32Uto64F_0(v) 496 case OpCvt32to32F: 497 return rewriteValueARM64_OpCvt32to32F_0(v) 498 case OpCvt32to64F: 499 return rewriteValueARM64_OpCvt32to64F_0(v) 500 case OpCvt64Fto32: 501 return rewriteValueARM64_OpCvt64Fto32_0(v) 502 case OpCvt64Fto32F: 503 return rewriteValueARM64_OpCvt64Fto32F_0(v) 504 case OpCvt64Fto32U: 505 return rewriteValueARM64_OpCvt64Fto32U_0(v) 506 case OpCvt64Fto64: 507 return rewriteValueARM64_OpCvt64Fto64_0(v) 508 case OpCvt64Fto64U: 509 return rewriteValueARM64_OpCvt64Fto64U_0(v) 510 case OpCvt64Uto32F: 511 return rewriteValueARM64_OpCvt64Uto32F_0(v) 512 case OpCvt64Uto64F: 513 return rewriteValueARM64_OpCvt64Uto64F_0(v) 514 case OpCvt64to32F: 515 return rewriteValueARM64_OpCvt64to32F_0(v) 516 case OpCvt64to64F: 517 return rewriteValueARM64_OpCvt64to64F_0(v) 518 case OpDiv16: 519 return rewriteValueARM64_OpDiv16_0(v) 520 case OpDiv16u: 521 return rewriteValueARM64_OpDiv16u_0(v) 522 case OpDiv32: 523 return rewriteValueARM64_OpDiv32_0(v) 524 case OpDiv32F: 525 return rewriteValueARM64_OpDiv32F_0(v) 526 case OpDiv32u: 527 return rewriteValueARM64_OpDiv32u_0(v) 528 case OpDiv64: 529 return rewriteValueARM64_OpDiv64_0(v) 530 case OpDiv64F: 531 return rewriteValueARM64_OpDiv64F_0(v) 532 case OpDiv64u: 533 return rewriteValueARM64_OpDiv64u_0(v) 534 case OpDiv8: 535 return rewriteValueARM64_OpDiv8_0(v) 536 case OpDiv8u: 537 return rewriteValueARM64_OpDiv8u_0(v) 538 case OpEq16: 539 return rewriteValueARM64_OpEq16_0(v) 540 case OpEq32: 541 return rewriteValueARM64_OpEq32_0(v) 542 case OpEq32F: 543 return rewriteValueARM64_OpEq32F_0(v) 544 case OpEq64: 545 return rewriteValueARM64_OpEq64_0(v) 546 case OpEq64F: 547 return rewriteValueARM64_OpEq64F_0(v) 548 case OpEq8: 549 return rewriteValueARM64_OpEq8_0(v) 550 case OpEqB: 551 return rewriteValueARM64_OpEqB_0(v) 552 case OpEqPtr: 553 return rewriteValueARM64_OpEqPtr_0(v) 554 case OpFloor: 555 return rewriteValueARM64_OpFloor_0(v) 556 case OpGeq16: 557 return rewriteValueARM64_OpGeq16_0(v) 558 case OpGeq16U: 559 return rewriteValueARM64_OpGeq16U_0(v) 560 case OpGeq32: 561 return rewriteValueARM64_OpGeq32_0(v) 562 case OpGeq32F: 563 return rewriteValueARM64_OpGeq32F_0(v) 564 case OpGeq32U: 565 return rewriteValueARM64_OpGeq32U_0(v) 566 case OpGeq64: 567 return rewriteValueARM64_OpGeq64_0(v) 568 case OpGeq64F: 569 return rewriteValueARM64_OpGeq64F_0(v) 570 case OpGeq64U: 571 return rewriteValueARM64_OpGeq64U_0(v) 572 case OpGeq8: 573 return rewriteValueARM64_OpGeq8_0(v) 574 case OpGeq8U: 575 return rewriteValueARM64_OpGeq8U_0(v) 576 case OpGetCallerPC: 577 return rewriteValueARM64_OpGetCallerPC_0(v) 578 case OpGetCallerSP: 579 return rewriteValueARM64_OpGetCallerSP_0(v) 580 case OpGetClosurePtr: 581 return rewriteValueARM64_OpGetClosurePtr_0(v) 582 case OpGreater16: 583 return rewriteValueARM64_OpGreater16_0(v) 584 case OpGreater16U: 585 return rewriteValueARM64_OpGreater16U_0(v) 586 case OpGreater32: 587 return rewriteValueARM64_OpGreater32_0(v) 588 case OpGreater32F: 589 return rewriteValueARM64_OpGreater32F_0(v) 590 case OpGreater32U: 591 return rewriteValueARM64_OpGreater32U_0(v) 592 case OpGreater64: 593 return rewriteValueARM64_OpGreater64_0(v) 594 case OpGreater64F: 595 return rewriteValueARM64_OpGreater64F_0(v) 596 case OpGreater64U: 597 return rewriteValueARM64_OpGreater64U_0(v) 598 case OpGreater8: 599 return rewriteValueARM64_OpGreater8_0(v) 600 case OpGreater8U: 601 return rewriteValueARM64_OpGreater8U_0(v) 602 case OpHmul32: 603 return rewriteValueARM64_OpHmul32_0(v) 604 case OpHmul32u: 605 return rewriteValueARM64_OpHmul32u_0(v) 606 case OpHmul64: 607 return rewriteValueARM64_OpHmul64_0(v) 608 case OpHmul64u: 609 return rewriteValueARM64_OpHmul64u_0(v) 610 case OpInterCall: 611 return rewriteValueARM64_OpInterCall_0(v) 612 case OpIsInBounds: 613 return rewriteValueARM64_OpIsInBounds_0(v) 614 case OpIsNonNil: 615 return rewriteValueARM64_OpIsNonNil_0(v) 616 case OpIsSliceInBounds: 617 return rewriteValueARM64_OpIsSliceInBounds_0(v) 618 case OpLeq16: 619 return rewriteValueARM64_OpLeq16_0(v) 620 case OpLeq16U: 621 return rewriteValueARM64_OpLeq16U_0(v) 622 case OpLeq32: 623 return rewriteValueARM64_OpLeq32_0(v) 624 case OpLeq32F: 625 return rewriteValueARM64_OpLeq32F_0(v) 626 case OpLeq32U: 627 return rewriteValueARM64_OpLeq32U_0(v) 628 case OpLeq64: 629 return rewriteValueARM64_OpLeq64_0(v) 630 case OpLeq64F: 631 return rewriteValueARM64_OpLeq64F_0(v) 632 case OpLeq64U: 633 return rewriteValueARM64_OpLeq64U_0(v) 634 case OpLeq8: 635 return rewriteValueARM64_OpLeq8_0(v) 636 case OpLeq8U: 637 return rewriteValueARM64_OpLeq8U_0(v) 638 case OpLess16: 639 return rewriteValueARM64_OpLess16_0(v) 640 case OpLess16U: 641 return rewriteValueARM64_OpLess16U_0(v) 642 case OpLess32: 643 return rewriteValueARM64_OpLess32_0(v) 644 case OpLess32F: 645 return rewriteValueARM64_OpLess32F_0(v) 646 case OpLess32U: 647 return rewriteValueARM64_OpLess32U_0(v) 648 case OpLess64: 649 return rewriteValueARM64_OpLess64_0(v) 650 case OpLess64F: 651 return rewriteValueARM64_OpLess64F_0(v) 652 case OpLess64U: 653 return rewriteValueARM64_OpLess64U_0(v) 654 case OpLess8: 655 return rewriteValueARM64_OpLess8_0(v) 656 case OpLess8U: 657 return rewriteValueARM64_OpLess8U_0(v) 658 case OpLoad: 659 return rewriteValueARM64_OpLoad_0(v) 660 case OpLocalAddr: 661 return rewriteValueARM64_OpLocalAddr_0(v) 662 case OpLsh16x16: 663 return rewriteValueARM64_OpLsh16x16_0(v) 664 case OpLsh16x32: 665 return rewriteValueARM64_OpLsh16x32_0(v) 666 case OpLsh16x64: 667 return rewriteValueARM64_OpLsh16x64_0(v) 668 case OpLsh16x8: 669 return rewriteValueARM64_OpLsh16x8_0(v) 670 case OpLsh32x16: 671 return rewriteValueARM64_OpLsh32x16_0(v) 672 case OpLsh32x32: 673 return rewriteValueARM64_OpLsh32x32_0(v) 674 case OpLsh32x64: 675 return rewriteValueARM64_OpLsh32x64_0(v) 676 case OpLsh32x8: 677 return rewriteValueARM64_OpLsh32x8_0(v) 678 case OpLsh64x16: 679 return rewriteValueARM64_OpLsh64x16_0(v) 680 case OpLsh64x32: 681 return rewriteValueARM64_OpLsh64x32_0(v) 682 case OpLsh64x64: 683 return rewriteValueARM64_OpLsh64x64_0(v) 684 case OpLsh64x8: 685 return rewriteValueARM64_OpLsh64x8_0(v) 686 case OpLsh8x16: 687 return rewriteValueARM64_OpLsh8x16_0(v) 688 case OpLsh8x32: 689 return rewriteValueARM64_OpLsh8x32_0(v) 690 case OpLsh8x64: 691 return rewriteValueARM64_OpLsh8x64_0(v) 692 case OpLsh8x8: 693 return rewriteValueARM64_OpLsh8x8_0(v) 694 case OpMod16: 695 return rewriteValueARM64_OpMod16_0(v) 696 case OpMod16u: 697 return rewriteValueARM64_OpMod16u_0(v) 698 case OpMod32: 699 return rewriteValueARM64_OpMod32_0(v) 700 case OpMod32u: 701 return rewriteValueARM64_OpMod32u_0(v) 702 case OpMod64: 703 return rewriteValueARM64_OpMod64_0(v) 704 case OpMod64u: 705 return rewriteValueARM64_OpMod64u_0(v) 706 case OpMod8: 707 return rewriteValueARM64_OpMod8_0(v) 708 case OpMod8u: 709 return rewriteValueARM64_OpMod8u_0(v) 710 case OpMove: 711 return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v) 712 case OpMul16: 713 return rewriteValueARM64_OpMul16_0(v) 714 case OpMul32: 715 return rewriteValueARM64_OpMul32_0(v) 716 case OpMul32F: 717 return rewriteValueARM64_OpMul32F_0(v) 718 case OpMul64: 719 return rewriteValueARM64_OpMul64_0(v) 720 case OpMul64F: 721 return rewriteValueARM64_OpMul64F_0(v) 722 case OpMul64uhilo: 723 return rewriteValueARM64_OpMul64uhilo_0(v) 724 case OpMul8: 725 return rewriteValueARM64_OpMul8_0(v) 726 case OpNeg16: 727 return rewriteValueARM64_OpNeg16_0(v) 728 case OpNeg32: 729 return rewriteValueARM64_OpNeg32_0(v) 730 case OpNeg32F: 731 return rewriteValueARM64_OpNeg32F_0(v) 732 case OpNeg64: 733 return rewriteValueARM64_OpNeg64_0(v) 734 case OpNeg64F: 735 return rewriteValueARM64_OpNeg64F_0(v) 736 case OpNeg8: 737 return rewriteValueARM64_OpNeg8_0(v) 738 case OpNeq16: 739 return rewriteValueARM64_OpNeq16_0(v) 740 case OpNeq32: 741 return rewriteValueARM64_OpNeq32_0(v) 742 case OpNeq32F: 743 return rewriteValueARM64_OpNeq32F_0(v) 744 case OpNeq64: 745 return rewriteValueARM64_OpNeq64_0(v) 746 case OpNeq64F: 747 return rewriteValueARM64_OpNeq64F_0(v) 748 case OpNeq8: 749 return rewriteValueARM64_OpNeq8_0(v) 750 case OpNeqB: 751 return rewriteValueARM64_OpNeqB_0(v) 752 case OpNeqPtr: 753 return rewriteValueARM64_OpNeqPtr_0(v) 754 case OpNilCheck: 755 return rewriteValueARM64_OpNilCheck_0(v) 756 case OpNot: 757 return rewriteValueARM64_OpNot_0(v) 758 case OpOffPtr: 759 return rewriteValueARM64_OpOffPtr_0(v) 760 case OpOr16: 761 return rewriteValueARM64_OpOr16_0(v) 762 case OpOr32: 763 return rewriteValueARM64_OpOr32_0(v) 764 case OpOr64: 765 return rewriteValueARM64_OpOr64_0(v) 766 case OpOr8: 767 return rewriteValueARM64_OpOr8_0(v) 768 case OpOrB: 769 return rewriteValueARM64_OpOrB_0(v) 770 case OpPopCount16: 771 return rewriteValueARM64_OpPopCount16_0(v) 772 case OpPopCount32: 773 return rewriteValueARM64_OpPopCount32_0(v) 774 case OpPopCount64: 775 return rewriteValueARM64_OpPopCount64_0(v) 776 case OpRotateLeft32: 777 return rewriteValueARM64_OpRotateLeft32_0(v) 778 case OpRotateLeft64: 779 return rewriteValueARM64_OpRotateLeft64_0(v) 780 case OpRound: 781 return rewriteValueARM64_OpRound_0(v) 782 case OpRound32F: 783 return rewriteValueARM64_OpRound32F_0(v) 784 case OpRound64F: 785 return rewriteValueARM64_OpRound64F_0(v) 786 case OpRoundToEven: 787 return rewriteValueARM64_OpRoundToEven_0(v) 788 case OpRsh16Ux16: 789 return rewriteValueARM64_OpRsh16Ux16_0(v) 790 case OpRsh16Ux32: 791 return rewriteValueARM64_OpRsh16Ux32_0(v) 792 case OpRsh16Ux64: 793 return rewriteValueARM64_OpRsh16Ux64_0(v) 794 case OpRsh16Ux8: 795 return rewriteValueARM64_OpRsh16Ux8_0(v) 796 case OpRsh16x16: 797 return rewriteValueARM64_OpRsh16x16_0(v) 798 case OpRsh16x32: 799 return rewriteValueARM64_OpRsh16x32_0(v) 800 case OpRsh16x64: 801 return rewriteValueARM64_OpRsh16x64_0(v) 802 case OpRsh16x8: 803 return rewriteValueARM64_OpRsh16x8_0(v) 804 case OpRsh32Ux16: 805 return rewriteValueARM64_OpRsh32Ux16_0(v) 806 case OpRsh32Ux32: 807 return rewriteValueARM64_OpRsh32Ux32_0(v) 808 case OpRsh32Ux64: 809 return rewriteValueARM64_OpRsh32Ux64_0(v) 810 case OpRsh32Ux8: 811 return rewriteValueARM64_OpRsh32Ux8_0(v) 812 case OpRsh32x16: 813 return rewriteValueARM64_OpRsh32x16_0(v) 814 case OpRsh32x32: 815 return rewriteValueARM64_OpRsh32x32_0(v) 816 case OpRsh32x64: 817 return rewriteValueARM64_OpRsh32x64_0(v) 818 case OpRsh32x8: 819 return rewriteValueARM64_OpRsh32x8_0(v) 820 case OpRsh64Ux16: 821 return rewriteValueARM64_OpRsh64Ux16_0(v) 822 case OpRsh64Ux32: 823 return rewriteValueARM64_OpRsh64Ux32_0(v) 824 case OpRsh64Ux64: 825 return rewriteValueARM64_OpRsh64Ux64_0(v) 826 case OpRsh64Ux8: 827 return rewriteValueARM64_OpRsh64Ux8_0(v) 828 case OpRsh64x16: 829 return rewriteValueARM64_OpRsh64x16_0(v) 830 case OpRsh64x32: 831 return rewriteValueARM64_OpRsh64x32_0(v) 832 case OpRsh64x64: 833 return rewriteValueARM64_OpRsh64x64_0(v) 834 case OpRsh64x8: 835 return rewriteValueARM64_OpRsh64x8_0(v) 836 case OpRsh8Ux16: 837 return rewriteValueARM64_OpRsh8Ux16_0(v) 838 case OpRsh8Ux32: 839 return rewriteValueARM64_OpRsh8Ux32_0(v) 840 case OpRsh8Ux64: 841 return rewriteValueARM64_OpRsh8Ux64_0(v) 842 case OpRsh8Ux8: 843 return rewriteValueARM64_OpRsh8Ux8_0(v) 844 case OpRsh8x16: 845 return rewriteValueARM64_OpRsh8x16_0(v) 846 case OpRsh8x32: 847 return rewriteValueARM64_OpRsh8x32_0(v) 848 case OpRsh8x64: 849 return rewriteValueARM64_OpRsh8x64_0(v) 850 case OpRsh8x8: 851 return rewriteValueARM64_OpRsh8x8_0(v) 852 case OpSignExt16to32: 853 return rewriteValueARM64_OpSignExt16to32_0(v) 854 case OpSignExt16to64: 855 return rewriteValueARM64_OpSignExt16to64_0(v) 856 case OpSignExt32to64: 857 return rewriteValueARM64_OpSignExt32to64_0(v) 858 case OpSignExt8to16: 859 return rewriteValueARM64_OpSignExt8to16_0(v) 860 case OpSignExt8to32: 861 return rewriteValueARM64_OpSignExt8to32_0(v) 862 case OpSignExt8to64: 863 return rewriteValueARM64_OpSignExt8to64_0(v) 864 case OpSlicemask: 865 return rewriteValueARM64_OpSlicemask_0(v) 866 case OpSqrt: 867 return rewriteValueARM64_OpSqrt_0(v) 868 case OpStaticCall: 869 return rewriteValueARM64_OpStaticCall_0(v) 870 case OpStore: 871 return rewriteValueARM64_OpStore_0(v) 872 case OpSub16: 873 return rewriteValueARM64_OpSub16_0(v) 874 case OpSub32: 875 return rewriteValueARM64_OpSub32_0(v) 876 case OpSub32F: 877 return rewriteValueARM64_OpSub32F_0(v) 878 case OpSub64: 879 return rewriteValueARM64_OpSub64_0(v) 880 case OpSub64F: 881 return rewriteValueARM64_OpSub64F_0(v) 882 case OpSub8: 883 return rewriteValueARM64_OpSub8_0(v) 884 case OpSubPtr: 885 return rewriteValueARM64_OpSubPtr_0(v) 886 case OpTrunc: 887 return rewriteValueARM64_OpTrunc_0(v) 888 case OpTrunc16to8: 889 return rewriteValueARM64_OpTrunc16to8_0(v) 890 case OpTrunc32to16: 891 return rewriteValueARM64_OpTrunc32to16_0(v) 892 case OpTrunc32to8: 893 return rewriteValueARM64_OpTrunc32to8_0(v) 894 case OpTrunc64to16: 895 return rewriteValueARM64_OpTrunc64to16_0(v) 896 case OpTrunc64to32: 897 return rewriteValueARM64_OpTrunc64to32_0(v) 898 case OpTrunc64to8: 899 return rewriteValueARM64_OpTrunc64to8_0(v) 900 case OpWB: 901 return rewriteValueARM64_OpWB_0(v) 902 case OpXor16: 903 return rewriteValueARM64_OpXor16_0(v) 904 case OpXor32: 905 return rewriteValueARM64_OpXor32_0(v) 906 case OpXor64: 907 return rewriteValueARM64_OpXor64_0(v) 908 case OpXor8: 909 return rewriteValueARM64_OpXor8_0(v) 910 case OpZero: 911 return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v) 912 case OpZeroExt16to32: 913 return rewriteValueARM64_OpZeroExt16to32_0(v) 914 case OpZeroExt16to64: 915 return rewriteValueARM64_OpZeroExt16to64_0(v) 916 case OpZeroExt32to64: 917 return rewriteValueARM64_OpZeroExt32to64_0(v) 918 case OpZeroExt8to16: 919 return rewriteValueARM64_OpZeroExt8to16_0(v) 920 case OpZeroExt8to32: 921 return rewriteValueARM64_OpZeroExt8to32_0(v) 922 case OpZeroExt8to64: 923 return rewriteValueARM64_OpZeroExt8to64_0(v) 924 } 925 return false 926 } 927 func rewriteValueARM64_OpARM64ADD_0(v *Value) bool { 928 // match: (ADD x (MOVDconst [c])) 929 // cond: 930 // result: (ADDconst [c] x) 931 for { 932 _ = v.Args[1] 933 x := v.Args[0] 934 v_1 := v.Args[1] 935 if v_1.Op != OpARM64MOVDconst { 936 break 937 } 938 c := v_1.AuxInt 939 v.reset(OpARM64ADDconst) 940 v.AuxInt = c 941 v.AddArg(x) 942 return true 943 } 944 // match: (ADD (MOVDconst [c]) x) 945 // cond: 946 // result: (ADDconst [c] x) 947 for { 948 _ = v.Args[1] 949 v_0 := v.Args[0] 950 if v_0.Op != OpARM64MOVDconst { 951 break 952 } 953 c := v_0.AuxInt 954 x := v.Args[1] 955 v.reset(OpARM64ADDconst) 956 v.AuxInt = c 957 v.AddArg(x) 958 return true 959 } 960 // match: (ADD a l:(MUL x y)) 961 // cond: l.Uses==1 && clobber(l) 962 // result: (MADD a x y) 963 for { 964 _ = v.Args[1] 965 a := v.Args[0] 966 l := v.Args[1] 967 if l.Op != OpARM64MUL { 968 break 969 } 970 _ = l.Args[1] 971 x := l.Args[0] 972 y := l.Args[1] 973 if !(l.Uses == 1 && clobber(l)) { 974 break 975 } 976 v.reset(OpARM64MADD) 977 v.AddArg(a) 978 v.AddArg(x) 979 v.AddArg(y) 980 return true 981 } 982 // match: (ADD l:(MUL x y) a) 983 // cond: l.Uses==1 && clobber(l) 984 // result: (MADD a x y) 985 for { 986 _ = v.Args[1] 987 l := v.Args[0] 988 if l.Op != OpARM64MUL { 989 break 990 } 991 _ = l.Args[1] 992 x := l.Args[0] 993 y := l.Args[1] 994 a := v.Args[1] 995 if !(l.Uses == 1 && clobber(l)) { 996 break 997 } 998 v.reset(OpARM64MADD) 999 v.AddArg(a) 1000 v.AddArg(x) 1001 v.AddArg(y) 1002 return true 1003 } 1004 // match: (ADD a l:(MNEG x y)) 1005 // cond: l.Uses==1 && clobber(l) 1006 // result: (MSUB a x y) 1007 for { 1008 _ = v.Args[1] 1009 a := v.Args[0] 1010 l := v.Args[1] 1011 if l.Op != OpARM64MNEG { 1012 break 1013 } 1014 _ = l.Args[1] 1015 x := l.Args[0] 1016 y := l.Args[1] 1017 if !(l.Uses == 1 && clobber(l)) { 1018 break 1019 } 1020 v.reset(OpARM64MSUB) 1021 v.AddArg(a) 1022 v.AddArg(x) 1023 v.AddArg(y) 1024 return true 1025 } 1026 // match: (ADD l:(MNEG x y) a) 1027 // cond: l.Uses==1 && clobber(l) 1028 // result: (MSUB a x y) 1029 for { 1030 _ = v.Args[1] 1031 l := v.Args[0] 1032 if l.Op != OpARM64MNEG { 1033 break 1034 } 1035 _ = l.Args[1] 1036 x := l.Args[0] 1037 y := l.Args[1] 1038 a := v.Args[1] 1039 if !(l.Uses == 1 && clobber(l)) { 1040 break 1041 } 1042 v.reset(OpARM64MSUB) 1043 v.AddArg(a) 1044 v.AddArg(x) 1045 v.AddArg(y) 1046 return true 1047 } 1048 // match: (ADD a l:(MULW x y)) 1049 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1050 // result: (MADDW a x y) 1051 for { 1052 _ = v.Args[1] 1053 a := v.Args[0] 1054 l := v.Args[1] 1055 if l.Op != OpARM64MULW { 1056 break 1057 } 1058 _ = l.Args[1] 1059 x := l.Args[0] 1060 y := l.Args[1] 1061 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1062 break 1063 } 1064 v.reset(OpARM64MADDW) 1065 v.AddArg(a) 1066 v.AddArg(x) 1067 v.AddArg(y) 1068 return true 1069 } 1070 // match: (ADD l:(MULW x y) a) 1071 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1072 // result: (MADDW a x y) 1073 for { 1074 _ = v.Args[1] 1075 l := v.Args[0] 1076 if l.Op != OpARM64MULW { 1077 break 1078 } 1079 _ = l.Args[1] 1080 x := l.Args[0] 1081 y := l.Args[1] 1082 a := v.Args[1] 1083 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1084 break 1085 } 1086 v.reset(OpARM64MADDW) 1087 v.AddArg(a) 1088 v.AddArg(x) 1089 v.AddArg(y) 1090 return true 1091 } 1092 // match: (ADD a l:(MNEGW x y)) 1093 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1094 // result: (MSUBW a x y) 1095 for { 1096 _ = v.Args[1] 1097 a := v.Args[0] 1098 l := v.Args[1] 1099 if l.Op != OpARM64MNEGW { 1100 break 1101 } 1102 _ = l.Args[1] 1103 x := l.Args[0] 1104 y := l.Args[1] 1105 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1106 break 1107 } 1108 v.reset(OpARM64MSUBW) 1109 v.AddArg(a) 1110 v.AddArg(x) 1111 v.AddArg(y) 1112 return true 1113 } 1114 // match: (ADD l:(MNEGW x y) a) 1115 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 1116 // result: (MSUBW a x y) 1117 for { 1118 _ = v.Args[1] 1119 l := v.Args[0] 1120 if l.Op != OpARM64MNEGW { 1121 break 1122 } 1123 _ = l.Args[1] 1124 x := l.Args[0] 1125 y := l.Args[1] 1126 a := v.Args[1] 1127 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 1128 break 1129 } 1130 v.reset(OpARM64MSUBW) 1131 v.AddArg(a) 1132 v.AddArg(x) 1133 v.AddArg(y) 1134 return true 1135 } 1136 return false 1137 } 1138 func rewriteValueARM64_OpARM64ADD_10(v *Value) bool { 1139 b := v.Block 1140 _ = b 1141 typ := &b.Func.Config.Types 1142 _ = typ 1143 // match: (ADD x (NEG y)) 1144 // cond: 1145 // result: (SUB x y) 1146 for { 1147 _ = v.Args[1] 1148 x := v.Args[0] 1149 v_1 := v.Args[1] 1150 if v_1.Op != OpARM64NEG { 1151 break 1152 } 1153 y := v_1.Args[0] 1154 v.reset(OpARM64SUB) 1155 v.AddArg(x) 1156 v.AddArg(y) 1157 return true 1158 } 1159 // match: (ADD (NEG y) x) 1160 // cond: 1161 // result: (SUB x y) 1162 for { 1163 _ = v.Args[1] 1164 v_0 := v.Args[0] 1165 if v_0.Op != OpARM64NEG { 1166 break 1167 } 1168 y := v_0.Args[0] 1169 x := v.Args[1] 1170 v.reset(OpARM64SUB) 1171 v.AddArg(x) 1172 v.AddArg(y) 1173 return true 1174 } 1175 // match: (ADD x0 x1:(SLLconst [c] y)) 1176 // cond: clobberIfDead(x1) 1177 // result: (ADDshiftLL x0 y [c]) 1178 for { 1179 _ = v.Args[1] 1180 x0 := v.Args[0] 1181 x1 := v.Args[1] 1182 if x1.Op != OpARM64SLLconst { 1183 break 1184 } 1185 c := x1.AuxInt 1186 y := x1.Args[0] 1187 if !(clobberIfDead(x1)) { 1188 break 1189 } 1190 v.reset(OpARM64ADDshiftLL) 1191 v.AuxInt = c 1192 v.AddArg(x0) 1193 v.AddArg(y) 1194 return true 1195 } 1196 // match: (ADD x1:(SLLconst [c] y) x0) 1197 // cond: clobberIfDead(x1) 1198 // result: (ADDshiftLL x0 y [c]) 1199 for { 1200 _ = v.Args[1] 1201 x1 := v.Args[0] 1202 if x1.Op != OpARM64SLLconst { 1203 break 1204 } 1205 c := x1.AuxInt 1206 y := x1.Args[0] 1207 x0 := v.Args[1] 1208 if !(clobberIfDead(x1)) { 1209 break 1210 } 1211 v.reset(OpARM64ADDshiftLL) 1212 v.AuxInt = c 1213 v.AddArg(x0) 1214 v.AddArg(y) 1215 return true 1216 } 1217 // match: (ADD x0 x1:(SRLconst [c] y)) 1218 // cond: clobberIfDead(x1) 1219 // result: (ADDshiftRL x0 y [c]) 1220 for { 1221 _ = v.Args[1] 1222 x0 := v.Args[0] 1223 x1 := v.Args[1] 1224 if x1.Op != OpARM64SRLconst { 1225 break 1226 } 1227 c := x1.AuxInt 1228 y := x1.Args[0] 1229 if !(clobberIfDead(x1)) { 1230 break 1231 } 1232 v.reset(OpARM64ADDshiftRL) 1233 v.AuxInt = c 1234 v.AddArg(x0) 1235 v.AddArg(y) 1236 return true 1237 } 1238 // match: (ADD x1:(SRLconst [c] y) x0) 1239 // cond: clobberIfDead(x1) 1240 // result: (ADDshiftRL x0 y [c]) 1241 for { 1242 _ = v.Args[1] 1243 x1 := v.Args[0] 1244 if x1.Op != OpARM64SRLconst { 1245 break 1246 } 1247 c := x1.AuxInt 1248 y := x1.Args[0] 1249 x0 := v.Args[1] 1250 if !(clobberIfDead(x1)) { 1251 break 1252 } 1253 v.reset(OpARM64ADDshiftRL) 1254 v.AuxInt = c 1255 v.AddArg(x0) 1256 v.AddArg(y) 1257 return true 1258 } 1259 // match: (ADD x0 x1:(SRAconst [c] y)) 1260 // cond: clobberIfDead(x1) 1261 // result: (ADDshiftRA x0 y [c]) 1262 for { 1263 _ = v.Args[1] 1264 x0 := v.Args[0] 1265 x1 := v.Args[1] 1266 if x1.Op != OpARM64SRAconst { 1267 break 1268 } 1269 c := x1.AuxInt 1270 y := x1.Args[0] 1271 if !(clobberIfDead(x1)) { 1272 break 1273 } 1274 v.reset(OpARM64ADDshiftRA) 1275 v.AuxInt = c 1276 v.AddArg(x0) 1277 v.AddArg(y) 1278 return true 1279 } 1280 // match: (ADD x1:(SRAconst [c] y) x0) 1281 // cond: clobberIfDead(x1) 1282 // result: (ADDshiftRA x0 y [c]) 1283 for { 1284 _ = v.Args[1] 1285 x1 := v.Args[0] 1286 if x1.Op != OpARM64SRAconst { 1287 break 1288 } 1289 c := x1.AuxInt 1290 y := x1.Args[0] 1291 x0 := v.Args[1] 1292 if !(clobberIfDead(x1)) { 1293 break 1294 } 1295 v.reset(OpARM64ADDshiftRA) 1296 v.AuxInt = c 1297 v.AddArg(x0) 1298 v.AddArg(y) 1299 return true 1300 } 1301 // match: (ADD (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 1302 // cond: cc.(Op) == OpARM64LessThanU 1303 // result: (ROR x (NEG <t> y)) 1304 for { 1305 _ = v.Args[1] 1306 v_0 := v.Args[0] 1307 if v_0.Op != OpARM64SLL { 1308 break 1309 } 1310 _ = v_0.Args[1] 1311 x := v_0.Args[0] 1312 v_0_1 := v_0.Args[1] 1313 if v_0_1.Op != OpARM64ANDconst { 1314 break 1315 } 1316 t := v_0_1.Type 1317 if v_0_1.AuxInt != 63 { 1318 break 1319 } 1320 y := v_0_1.Args[0] 1321 v_1 := v.Args[1] 1322 if v_1.Op != OpARM64CSEL0 { 1323 break 1324 } 1325 if v_1.Type != typ.UInt64 { 1326 break 1327 } 1328 cc := v_1.Aux 1329 _ = v_1.Args[1] 1330 v_1_0 := v_1.Args[0] 1331 if v_1_0.Op != OpARM64SRL { 1332 break 1333 } 1334 if v_1_0.Type != typ.UInt64 { 1335 break 1336 } 1337 _ = v_1_0.Args[1] 1338 if x != v_1_0.Args[0] { 1339 break 1340 } 1341 v_1_0_1 := v_1_0.Args[1] 1342 if v_1_0_1.Op != OpARM64SUB { 1343 break 1344 } 1345 if v_1_0_1.Type != t { 1346 break 1347 } 1348 _ = v_1_0_1.Args[1] 1349 v_1_0_1_0 := v_1_0_1.Args[0] 1350 if v_1_0_1_0.Op != OpARM64MOVDconst { 1351 break 1352 } 1353 if v_1_0_1_0.AuxInt != 64 { 1354 break 1355 } 1356 v_1_0_1_1 := v_1_0_1.Args[1] 1357 if v_1_0_1_1.Op != OpARM64ANDconst { 1358 break 1359 } 1360 if v_1_0_1_1.Type != t { 1361 break 1362 } 1363 if v_1_0_1_1.AuxInt != 63 { 1364 break 1365 } 1366 if y != v_1_0_1_1.Args[0] { 1367 break 1368 } 1369 v_1_1 := v_1.Args[1] 1370 if v_1_1.Op != OpARM64CMPconst { 1371 break 1372 } 1373 if v_1_1.AuxInt != 64 { 1374 break 1375 } 1376 v_1_1_0 := v_1_1.Args[0] 1377 if v_1_1_0.Op != OpARM64SUB { 1378 break 1379 } 1380 if v_1_1_0.Type != t { 1381 break 1382 } 1383 _ = v_1_1_0.Args[1] 1384 v_1_1_0_0 := v_1_1_0.Args[0] 1385 if v_1_1_0_0.Op != OpARM64MOVDconst { 1386 break 1387 } 1388 if v_1_1_0_0.AuxInt != 64 { 1389 break 1390 } 1391 v_1_1_0_1 := v_1_1_0.Args[1] 1392 if v_1_1_0_1.Op != OpARM64ANDconst { 1393 break 1394 } 1395 if v_1_1_0_1.Type != t { 1396 break 1397 } 1398 if v_1_1_0_1.AuxInt != 63 { 1399 break 1400 } 1401 if y != v_1_1_0_1.Args[0] { 1402 break 1403 } 1404 if !(cc.(Op) == OpARM64LessThanU) { 1405 break 1406 } 1407 v.reset(OpARM64ROR) 1408 v.AddArg(x) 1409 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1410 v0.AddArg(y) 1411 v.AddArg(v0) 1412 return true 1413 } 1414 // match: (ADD (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 1415 // cond: cc.(Op) == OpARM64LessThanU 1416 // result: (ROR x (NEG <t> y)) 1417 for { 1418 _ = v.Args[1] 1419 v_0 := v.Args[0] 1420 if v_0.Op != OpARM64CSEL0 { 1421 break 1422 } 1423 if v_0.Type != typ.UInt64 { 1424 break 1425 } 1426 cc := v_0.Aux 1427 _ = v_0.Args[1] 1428 v_0_0 := v_0.Args[0] 1429 if v_0_0.Op != OpARM64SRL { 1430 break 1431 } 1432 if v_0_0.Type != typ.UInt64 { 1433 break 1434 } 1435 _ = v_0_0.Args[1] 1436 x := v_0_0.Args[0] 1437 v_0_0_1 := v_0_0.Args[1] 1438 if v_0_0_1.Op != OpARM64SUB { 1439 break 1440 } 1441 t := v_0_0_1.Type 1442 _ = v_0_0_1.Args[1] 1443 v_0_0_1_0 := v_0_0_1.Args[0] 1444 if v_0_0_1_0.Op != OpARM64MOVDconst { 1445 break 1446 } 1447 if v_0_0_1_0.AuxInt != 64 { 1448 break 1449 } 1450 v_0_0_1_1 := v_0_0_1.Args[1] 1451 if v_0_0_1_1.Op != OpARM64ANDconst { 1452 break 1453 } 1454 if v_0_0_1_1.Type != t { 1455 break 1456 } 1457 if v_0_0_1_1.AuxInt != 63 { 1458 break 1459 } 1460 y := v_0_0_1_1.Args[0] 1461 v_0_1 := v_0.Args[1] 1462 if v_0_1.Op != OpARM64CMPconst { 1463 break 1464 } 1465 if v_0_1.AuxInt != 64 { 1466 break 1467 } 1468 v_0_1_0 := v_0_1.Args[0] 1469 if v_0_1_0.Op != OpARM64SUB { 1470 break 1471 } 1472 if v_0_1_0.Type != t { 1473 break 1474 } 1475 _ = v_0_1_0.Args[1] 1476 v_0_1_0_0 := v_0_1_0.Args[0] 1477 if v_0_1_0_0.Op != OpARM64MOVDconst { 1478 break 1479 } 1480 if v_0_1_0_0.AuxInt != 64 { 1481 break 1482 } 1483 v_0_1_0_1 := v_0_1_0.Args[1] 1484 if v_0_1_0_1.Op != OpARM64ANDconst { 1485 break 1486 } 1487 if v_0_1_0_1.Type != t { 1488 break 1489 } 1490 if v_0_1_0_1.AuxInt != 63 { 1491 break 1492 } 1493 if y != v_0_1_0_1.Args[0] { 1494 break 1495 } 1496 v_1 := v.Args[1] 1497 if v_1.Op != OpARM64SLL { 1498 break 1499 } 1500 _ = v_1.Args[1] 1501 if x != v_1.Args[0] { 1502 break 1503 } 1504 v_1_1 := v_1.Args[1] 1505 if v_1_1.Op != OpARM64ANDconst { 1506 break 1507 } 1508 if v_1_1.Type != t { 1509 break 1510 } 1511 if v_1_1.AuxInt != 63 { 1512 break 1513 } 1514 if y != v_1_1.Args[0] { 1515 break 1516 } 1517 if !(cc.(Op) == OpARM64LessThanU) { 1518 break 1519 } 1520 v.reset(OpARM64ROR) 1521 v.AddArg(x) 1522 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1523 v0.AddArg(y) 1524 v.AddArg(v0) 1525 return true 1526 } 1527 return false 1528 } 1529 func rewriteValueARM64_OpARM64ADD_20(v *Value) bool { 1530 b := v.Block 1531 _ = b 1532 typ := &b.Func.Config.Types 1533 _ = typ 1534 // match: (ADD (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 1535 // cond: cc.(Op) == OpARM64LessThanU 1536 // result: (ROR x y) 1537 for { 1538 _ = v.Args[1] 1539 v_0 := v.Args[0] 1540 if v_0.Op != OpARM64SRL { 1541 break 1542 } 1543 if v_0.Type != typ.UInt64 { 1544 break 1545 } 1546 _ = v_0.Args[1] 1547 x := v_0.Args[0] 1548 v_0_1 := v_0.Args[1] 1549 if v_0_1.Op != OpARM64ANDconst { 1550 break 1551 } 1552 t := v_0_1.Type 1553 if v_0_1.AuxInt != 63 { 1554 break 1555 } 1556 y := v_0_1.Args[0] 1557 v_1 := v.Args[1] 1558 if v_1.Op != OpARM64CSEL0 { 1559 break 1560 } 1561 if v_1.Type != typ.UInt64 { 1562 break 1563 } 1564 cc := v_1.Aux 1565 _ = v_1.Args[1] 1566 v_1_0 := v_1.Args[0] 1567 if v_1_0.Op != OpARM64SLL { 1568 break 1569 } 1570 _ = v_1_0.Args[1] 1571 if x != v_1_0.Args[0] { 1572 break 1573 } 1574 v_1_0_1 := v_1_0.Args[1] 1575 if v_1_0_1.Op != OpARM64SUB { 1576 break 1577 } 1578 if v_1_0_1.Type != t { 1579 break 1580 } 1581 _ = v_1_0_1.Args[1] 1582 v_1_0_1_0 := v_1_0_1.Args[0] 1583 if v_1_0_1_0.Op != OpARM64MOVDconst { 1584 break 1585 } 1586 if v_1_0_1_0.AuxInt != 64 { 1587 break 1588 } 1589 v_1_0_1_1 := v_1_0_1.Args[1] 1590 if v_1_0_1_1.Op != OpARM64ANDconst { 1591 break 1592 } 1593 if v_1_0_1_1.Type != t { 1594 break 1595 } 1596 if v_1_0_1_1.AuxInt != 63 { 1597 break 1598 } 1599 if y != v_1_0_1_1.Args[0] { 1600 break 1601 } 1602 v_1_1 := v_1.Args[1] 1603 if v_1_1.Op != OpARM64CMPconst { 1604 break 1605 } 1606 if v_1_1.AuxInt != 64 { 1607 break 1608 } 1609 v_1_1_0 := v_1_1.Args[0] 1610 if v_1_1_0.Op != OpARM64SUB { 1611 break 1612 } 1613 if v_1_1_0.Type != t { 1614 break 1615 } 1616 _ = v_1_1_0.Args[1] 1617 v_1_1_0_0 := v_1_1_0.Args[0] 1618 if v_1_1_0_0.Op != OpARM64MOVDconst { 1619 break 1620 } 1621 if v_1_1_0_0.AuxInt != 64 { 1622 break 1623 } 1624 v_1_1_0_1 := v_1_1_0.Args[1] 1625 if v_1_1_0_1.Op != OpARM64ANDconst { 1626 break 1627 } 1628 if v_1_1_0_1.Type != t { 1629 break 1630 } 1631 if v_1_1_0_1.AuxInt != 63 { 1632 break 1633 } 1634 if y != v_1_1_0_1.Args[0] { 1635 break 1636 } 1637 if !(cc.(Op) == OpARM64LessThanU) { 1638 break 1639 } 1640 v.reset(OpARM64ROR) 1641 v.AddArg(x) 1642 v.AddArg(y) 1643 return true 1644 } 1645 // match: (ADD (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 1646 // cond: cc.(Op) == OpARM64LessThanU 1647 // result: (ROR x y) 1648 for { 1649 _ = v.Args[1] 1650 v_0 := v.Args[0] 1651 if v_0.Op != OpARM64CSEL0 { 1652 break 1653 } 1654 if v_0.Type != typ.UInt64 { 1655 break 1656 } 1657 cc := v_0.Aux 1658 _ = v_0.Args[1] 1659 v_0_0 := v_0.Args[0] 1660 if v_0_0.Op != OpARM64SLL { 1661 break 1662 } 1663 _ = v_0_0.Args[1] 1664 x := v_0_0.Args[0] 1665 v_0_0_1 := v_0_0.Args[1] 1666 if v_0_0_1.Op != OpARM64SUB { 1667 break 1668 } 1669 t := v_0_0_1.Type 1670 _ = v_0_0_1.Args[1] 1671 v_0_0_1_0 := v_0_0_1.Args[0] 1672 if v_0_0_1_0.Op != OpARM64MOVDconst { 1673 break 1674 } 1675 if v_0_0_1_0.AuxInt != 64 { 1676 break 1677 } 1678 v_0_0_1_1 := v_0_0_1.Args[1] 1679 if v_0_0_1_1.Op != OpARM64ANDconst { 1680 break 1681 } 1682 if v_0_0_1_1.Type != t { 1683 break 1684 } 1685 if v_0_0_1_1.AuxInt != 63 { 1686 break 1687 } 1688 y := v_0_0_1_1.Args[0] 1689 v_0_1 := v_0.Args[1] 1690 if v_0_1.Op != OpARM64CMPconst { 1691 break 1692 } 1693 if v_0_1.AuxInt != 64 { 1694 break 1695 } 1696 v_0_1_0 := v_0_1.Args[0] 1697 if v_0_1_0.Op != OpARM64SUB { 1698 break 1699 } 1700 if v_0_1_0.Type != t { 1701 break 1702 } 1703 _ = v_0_1_0.Args[1] 1704 v_0_1_0_0 := v_0_1_0.Args[0] 1705 if v_0_1_0_0.Op != OpARM64MOVDconst { 1706 break 1707 } 1708 if v_0_1_0_0.AuxInt != 64 { 1709 break 1710 } 1711 v_0_1_0_1 := v_0_1_0.Args[1] 1712 if v_0_1_0_1.Op != OpARM64ANDconst { 1713 break 1714 } 1715 if v_0_1_0_1.Type != t { 1716 break 1717 } 1718 if v_0_1_0_1.AuxInt != 63 { 1719 break 1720 } 1721 if y != v_0_1_0_1.Args[0] { 1722 break 1723 } 1724 v_1 := v.Args[1] 1725 if v_1.Op != OpARM64SRL { 1726 break 1727 } 1728 if v_1.Type != typ.UInt64 { 1729 break 1730 } 1731 _ = v_1.Args[1] 1732 if x != v_1.Args[0] { 1733 break 1734 } 1735 v_1_1 := v_1.Args[1] 1736 if v_1_1.Op != OpARM64ANDconst { 1737 break 1738 } 1739 if v_1_1.Type != t { 1740 break 1741 } 1742 if v_1_1.AuxInt != 63 { 1743 break 1744 } 1745 if y != v_1_1.Args[0] { 1746 break 1747 } 1748 if !(cc.(Op) == OpARM64LessThanU) { 1749 break 1750 } 1751 v.reset(OpARM64ROR) 1752 v.AddArg(x) 1753 v.AddArg(y) 1754 return true 1755 } 1756 // match: (ADD (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 1757 // cond: cc.(Op) == OpARM64LessThanU 1758 // result: (RORW x (NEG <t> y)) 1759 for { 1760 _ = v.Args[1] 1761 v_0 := v.Args[0] 1762 if v_0.Op != OpARM64SLL { 1763 break 1764 } 1765 _ = v_0.Args[1] 1766 x := v_0.Args[0] 1767 v_0_1 := v_0.Args[1] 1768 if v_0_1.Op != OpARM64ANDconst { 1769 break 1770 } 1771 t := v_0_1.Type 1772 if v_0_1.AuxInt != 31 { 1773 break 1774 } 1775 y := v_0_1.Args[0] 1776 v_1 := v.Args[1] 1777 if v_1.Op != OpARM64CSEL0 { 1778 break 1779 } 1780 if v_1.Type != typ.UInt32 { 1781 break 1782 } 1783 cc := v_1.Aux 1784 _ = v_1.Args[1] 1785 v_1_0 := v_1.Args[0] 1786 if v_1_0.Op != OpARM64SRL { 1787 break 1788 } 1789 if v_1_0.Type != typ.UInt32 { 1790 break 1791 } 1792 _ = v_1_0.Args[1] 1793 v_1_0_0 := v_1_0.Args[0] 1794 if v_1_0_0.Op != OpARM64MOVWUreg { 1795 break 1796 } 1797 if x != v_1_0_0.Args[0] { 1798 break 1799 } 1800 v_1_0_1 := v_1_0.Args[1] 1801 if v_1_0_1.Op != OpARM64SUB { 1802 break 1803 } 1804 if v_1_0_1.Type != t { 1805 break 1806 } 1807 _ = v_1_0_1.Args[1] 1808 v_1_0_1_0 := v_1_0_1.Args[0] 1809 if v_1_0_1_0.Op != OpARM64MOVDconst { 1810 break 1811 } 1812 if v_1_0_1_0.AuxInt != 32 { 1813 break 1814 } 1815 v_1_0_1_1 := v_1_0_1.Args[1] 1816 if v_1_0_1_1.Op != OpARM64ANDconst { 1817 break 1818 } 1819 if v_1_0_1_1.Type != t { 1820 break 1821 } 1822 if v_1_0_1_1.AuxInt != 31 { 1823 break 1824 } 1825 if y != v_1_0_1_1.Args[0] { 1826 break 1827 } 1828 v_1_1 := v_1.Args[1] 1829 if v_1_1.Op != OpARM64CMPconst { 1830 break 1831 } 1832 if v_1_1.AuxInt != 64 { 1833 break 1834 } 1835 v_1_1_0 := v_1_1.Args[0] 1836 if v_1_1_0.Op != OpARM64SUB { 1837 break 1838 } 1839 if v_1_1_0.Type != t { 1840 break 1841 } 1842 _ = v_1_1_0.Args[1] 1843 v_1_1_0_0 := v_1_1_0.Args[0] 1844 if v_1_1_0_0.Op != OpARM64MOVDconst { 1845 break 1846 } 1847 if v_1_1_0_0.AuxInt != 32 { 1848 break 1849 } 1850 v_1_1_0_1 := v_1_1_0.Args[1] 1851 if v_1_1_0_1.Op != OpARM64ANDconst { 1852 break 1853 } 1854 if v_1_1_0_1.Type != t { 1855 break 1856 } 1857 if v_1_1_0_1.AuxInt != 31 { 1858 break 1859 } 1860 if y != v_1_1_0_1.Args[0] { 1861 break 1862 } 1863 if !(cc.(Op) == OpARM64LessThanU) { 1864 break 1865 } 1866 v.reset(OpARM64RORW) 1867 v.AddArg(x) 1868 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1869 v0.AddArg(y) 1870 v.AddArg(v0) 1871 return true 1872 } 1873 // match: (ADD (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 1874 // cond: cc.(Op) == OpARM64LessThanU 1875 // result: (RORW x (NEG <t> y)) 1876 for { 1877 _ = v.Args[1] 1878 v_0 := v.Args[0] 1879 if v_0.Op != OpARM64CSEL0 { 1880 break 1881 } 1882 if v_0.Type != typ.UInt32 { 1883 break 1884 } 1885 cc := v_0.Aux 1886 _ = v_0.Args[1] 1887 v_0_0 := v_0.Args[0] 1888 if v_0_0.Op != OpARM64SRL { 1889 break 1890 } 1891 if v_0_0.Type != typ.UInt32 { 1892 break 1893 } 1894 _ = v_0_0.Args[1] 1895 v_0_0_0 := v_0_0.Args[0] 1896 if v_0_0_0.Op != OpARM64MOVWUreg { 1897 break 1898 } 1899 x := v_0_0_0.Args[0] 1900 v_0_0_1 := v_0_0.Args[1] 1901 if v_0_0_1.Op != OpARM64SUB { 1902 break 1903 } 1904 t := v_0_0_1.Type 1905 _ = v_0_0_1.Args[1] 1906 v_0_0_1_0 := v_0_0_1.Args[0] 1907 if v_0_0_1_0.Op != OpARM64MOVDconst { 1908 break 1909 } 1910 if v_0_0_1_0.AuxInt != 32 { 1911 break 1912 } 1913 v_0_0_1_1 := v_0_0_1.Args[1] 1914 if v_0_0_1_1.Op != OpARM64ANDconst { 1915 break 1916 } 1917 if v_0_0_1_1.Type != t { 1918 break 1919 } 1920 if v_0_0_1_1.AuxInt != 31 { 1921 break 1922 } 1923 y := v_0_0_1_1.Args[0] 1924 v_0_1 := v_0.Args[1] 1925 if v_0_1.Op != OpARM64CMPconst { 1926 break 1927 } 1928 if v_0_1.AuxInt != 64 { 1929 break 1930 } 1931 v_0_1_0 := v_0_1.Args[0] 1932 if v_0_1_0.Op != OpARM64SUB { 1933 break 1934 } 1935 if v_0_1_0.Type != t { 1936 break 1937 } 1938 _ = v_0_1_0.Args[1] 1939 v_0_1_0_0 := v_0_1_0.Args[0] 1940 if v_0_1_0_0.Op != OpARM64MOVDconst { 1941 break 1942 } 1943 if v_0_1_0_0.AuxInt != 32 { 1944 break 1945 } 1946 v_0_1_0_1 := v_0_1_0.Args[1] 1947 if v_0_1_0_1.Op != OpARM64ANDconst { 1948 break 1949 } 1950 if v_0_1_0_1.Type != t { 1951 break 1952 } 1953 if v_0_1_0_1.AuxInt != 31 { 1954 break 1955 } 1956 if y != v_0_1_0_1.Args[0] { 1957 break 1958 } 1959 v_1 := v.Args[1] 1960 if v_1.Op != OpARM64SLL { 1961 break 1962 } 1963 _ = v_1.Args[1] 1964 if x != v_1.Args[0] { 1965 break 1966 } 1967 v_1_1 := v_1.Args[1] 1968 if v_1_1.Op != OpARM64ANDconst { 1969 break 1970 } 1971 if v_1_1.Type != t { 1972 break 1973 } 1974 if v_1_1.AuxInt != 31 { 1975 break 1976 } 1977 if y != v_1_1.Args[0] { 1978 break 1979 } 1980 if !(cc.(Op) == OpARM64LessThanU) { 1981 break 1982 } 1983 v.reset(OpARM64RORW) 1984 v.AddArg(x) 1985 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 1986 v0.AddArg(y) 1987 v.AddArg(v0) 1988 return true 1989 } 1990 // match: (ADD (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 1991 // cond: cc.(Op) == OpARM64LessThanU 1992 // result: (RORW x y) 1993 for { 1994 _ = v.Args[1] 1995 v_0 := v.Args[0] 1996 if v_0.Op != OpARM64SRL { 1997 break 1998 } 1999 if v_0.Type != typ.UInt32 { 2000 break 2001 } 2002 _ = v_0.Args[1] 2003 v_0_0 := v_0.Args[0] 2004 if v_0_0.Op != OpARM64MOVWUreg { 2005 break 2006 } 2007 x := v_0_0.Args[0] 2008 v_0_1 := v_0.Args[1] 2009 if v_0_1.Op != OpARM64ANDconst { 2010 break 2011 } 2012 t := v_0_1.Type 2013 if v_0_1.AuxInt != 31 { 2014 break 2015 } 2016 y := v_0_1.Args[0] 2017 v_1 := v.Args[1] 2018 if v_1.Op != OpARM64CSEL0 { 2019 break 2020 } 2021 if v_1.Type != typ.UInt32 { 2022 break 2023 } 2024 cc := v_1.Aux 2025 _ = v_1.Args[1] 2026 v_1_0 := v_1.Args[0] 2027 if v_1_0.Op != OpARM64SLL { 2028 break 2029 } 2030 _ = v_1_0.Args[1] 2031 if x != v_1_0.Args[0] { 2032 break 2033 } 2034 v_1_0_1 := v_1_0.Args[1] 2035 if v_1_0_1.Op != OpARM64SUB { 2036 break 2037 } 2038 if v_1_0_1.Type != t { 2039 break 2040 } 2041 _ = v_1_0_1.Args[1] 2042 v_1_0_1_0 := v_1_0_1.Args[0] 2043 if v_1_0_1_0.Op != OpARM64MOVDconst { 2044 break 2045 } 2046 if v_1_0_1_0.AuxInt != 32 { 2047 break 2048 } 2049 v_1_0_1_1 := v_1_0_1.Args[1] 2050 if v_1_0_1_1.Op != OpARM64ANDconst { 2051 break 2052 } 2053 if v_1_0_1_1.Type != t { 2054 break 2055 } 2056 if v_1_0_1_1.AuxInt != 31 { 2057 break 2058 } 2059 if y != v_1_0_1_1.Args[0] { 2060 break 2061 } 2062 v_1_1 := v_1.Args[1] 2063 if v_1_1.Op != OpARM64CMPconst { 2064 break 2065 } 2066 if v_1_1.AuxInt != 64 { 2067 break 2068 } 2069 v_1_1_0 := v_1_1.Args[0] 2070 if v_1_1_0.Op != OpARM64SUB { 2071 break 2072 } 2073 if v_1_1_0.Type != t { 2074 break 2075 } 2076 _ = v_1_1_0.Args[1] 2077 v_1_1_0_0 := v_1_1_0.Args[0] 2078 if v_1_1_0_0.Op != OpARM64MOVDconst { 2079 break 2080 } 2081 if v_1_1_0_0.AuxInt != 32 { 2082 break 2083 } 2084 v_1_1_0_1 := v_1_1_0.Args[1] 2085 if v_1_1_0_1.Op != OpARM64ANDconst { 2086 break 2087 } 2088 if v_1_1_0_1.Type != t { 2089 break 2090 } 2091 if v_1_1_0_1.AuxInt != 31 { 2092 break 2093 } 2094 if y != v_1_1_0_1.Args[0] { 2095 break 2096 } 2097 if !(cc.(Op) == OpARM64LessThanU) { 2098 break 2099 } 2100 v.reset(OpARM64RORW) 2101 v.AddArg(x) 2102 v.AddArg(y) 2103 return true 2104 } 2105 // match: (ADD (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 2106 // cond: cc.(Op) == OpARM64LessThanU 2107 // result: (RORW x y) 2108 for { 2109 _ = v.Args[1] 2110 v_0 := v.Args[0] 2111 if v_0.Op != OpARM64CSEL0 { 2112 break 2113 } 2114 if v_0.Type != typ.UInt32 { 2115 break 2116 } 2117 cc := v_0.Aux 2118 _ = v_0.Args[1] 2119 v_0_0 := v_0.Args[0] 2120 if v_0_0.Op != OpARM64SLL { 2121 break 2122 } 2123 _ = v_0_0.Args[1] 2124 x := v_0_0.Args[0] 2125 v_0_0_1 := v_0_0.Args[1] 2126 if v_0_0_1.Op != OpARM64SUB { 2127 break 2128 } 2129 t := v_0_0_1.Type 2130 _ = v_0_0_1.Args[1] 2131 v_0_0_1_0 := v_0_0_1.Args[0] 2132 if v_0_0_1_0.Op != OpARM64MOVDconst { 2133 break 2134 } 2135 if v_0_0_1_0.AuxInt != 32 { 2136 break 2137 } 2138 v_0_0_1_1 := v_0_0_1.Args[1] 2139 if v_0_0_1_1.Op != OpARM64ANDconst { 2140 break 2141 } 2142 if v_0_0_1_1.Type != t { 2143 break 2144 } 2145 if v_0_0_1_1.AuxInt != 31 { 2146 break 2147 } 2148 y := v_0_0_1_1.Args[0] 2149 v_0_1 := v_0.Args[1] 2150 if v_0_1.Op != OpARM64CMPconst { 2151 break 2152 } 2153 if v_0_1.AuxInt != 64 { 2154 break 2155 } 2156 v_0_1_0 := v_0_1.Args[0] 2157 if v_0_1_0.Op != OpARM64SUB { 2158 break 2159 } 2160 if v_0_1_0.Type != t { 2161 break 2162 } 2163 _ = v_0_1_0.Args[1] 2164 v_0_1_0_0 := v_0_1_0.Args[0] 2165 if v_0_1_0_0.Op != OpARM64MOVDconst { 2166 break 2167 } 2168 if v_0_1_0_0.AuxInt != 32 { 2169 break 2170 } 2171 v_0_1_0_1 := v_0_1_0.Args[1] 2172 if v_0_1_0_1.Op != OpARM64ANDconst { 2173 break 2174 } 2175 if v_0_1_0_1.Type != t { 2176 break 2177 } 2178 if v_0_1_0_1.AuxInt != 31 { 2179 break 2180 } 2181 if y != v_0_1_0_1.Args[0] { 2182 break 2183 } 2184 v_1 := v.Args[1] 2185 if v_1.Op != OpARM64SRL { 2186 break 2187 } 2188 if v_1.Type != typ.UInt32 { 2189 break 2190 } 2191 _ = v_1.Args[1] 2192 v_1_0 := v_1.Args[0] 2193 if v_1_0.Op != OpARM64MOVWUreg { 2194 break 2195 } 2196 if x != v_1_0.Args[0] { 2197 break 2198 } 2199 v_1_1 := v_1.Args[1] 2200 if v_1_1.Op != OpARM64ANDconst { 2201 break 2202 } 2203 if v_1_1.Type != t { 2204 break 2205 } 2206 if v_1_1.AuxInt != 31 { 2207 break 2208 } 2209 if y != v_1_1.Args[0] { 2210 break 2211 } 2212 if !(cc.(Op) == OpARM64LessThanU) { 2213 break 2214 } 2215 v.reset(OpARM64RORW) 2216 v.AddArg(x) 2217 v.AddArg(y) 2218 return true 2219 } 2220 return false 2221 } 2222 func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool { 2223 // match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr)) 2224 // cond: 2225 // result: (MOVDaddr [off1+off2] {sym} ptr) 2226 for { 2227 off1 := v.AuxInt 2228 v_0 := v.Args[0] 2229 if v_0.Op != OpARM64MOVDaddr { 2230 break 2231 } 2232 off2 := v_0.AuxInt 2233 sym := v_0.Aux 2234 ptr := v_0.Args[0] 2235 v.reset(OpARM64MOVDaddr) 2236 v.AuxInt = off1 + off2 2237 v.Aux = sym 2238 v.AddArg(ptr) 2239 return true 2240 } 2241 // match: (ADDconst [0] x) 2242 // cond: 2243 // result: x 2244 for { 2245 if v.AuxInt != 0 { 2246 break 2247 } 2248 x := v.Args[0] 2249 v.reset(OpCopy) 2250 v.Type = x.Type 2251 v.AddArg(x) 2252 return true 2253 } 2254 // match: (ADDconst [c] (MOVDconst [d])) 2255 // cond: 2256 // result: (MOVDconst [c+d]) 2257 for { 2258 c := v.AuxInt 2259 v_0 := v.Args[0] 2260 if v_0.Op != OpARM64MOVDconst { 2261 break 2262 } 2263 d := v_0.AuxInt 2264 v.reset(OpARM64MOVDconst) 2265 v.AuxInt = c + d 2266 return true 2267 } 2268 // match: (ADDconst [c] (ADDconst [d] x)) 2269 // cond: 2270 // result: (ADDconst [c+d] x) 2271 for { 2272 c := v.AuxInt 2273 v_0 := v.Args[0] 2274 if v_0.Op != OpARM64ADDconst { 2275 break 2276 } 2277 d := v_0.AuxInt 2278 x := v_0.Args[0] 2279 v.reset(OpARM64ADDconst) 2280 v.AuxInt = c + d 2281 v.AddArg(x) 2282 return true 2283 } 2284 // match: (ADDconst [c] (SUBconst [d] x)) 2285 // cond: 2286 // result: (ADDconst [c-d] x) 2287 for { 2288 c := v.AuxInt 2289 v_0 := v.Args[0] 2290 if v_0.Op != OpARM64SUBconst { 2291 break 2292 } 2293 d := v_0.AuxInt 2294 x := v_0.Args[0] 2295 v.reset(OpARM64ADDconst) 2296 v.AuxInt = c - d 2297 v.AddArg(x) 2298 return true 2299 } 2300 return false 2301 } 2302 func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool { 2303 b := v.Block 2304 _ = b 2305 // match: (ADDshiftLL (MOVDconst [c]) x [d]) 2306 // cond: 2307 // result: (ADDconst [c] (SLLconst <x.Type> x [d])) 2308 for { 2309 d := v.AuxInt 2310 _ = v.Args[1] 2311 v_0 := v.Args[0] 2312 if v_0.Op != OpARM64MOVDconst { 2313 break 2314 } 2315 c := v_0.AuxInt 2316 x := v.Args[1] 2317 v.reset(OpARM64ADDconst) 2318 v.AuxInt = c 2319 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2320 v0.AuxInt = d 2321 v0.AddArg(x) 2322 v.AddArg(v0) 2323 return true 2324 } 2325 // match: (ADDshiftLL x (MOVDconst [c]) [d]) 2326 // cond: 2327 // result: (ADDconst x [int64(uint64(c)<<uint64(d))]) 2328 for { 2329 d := v.AuxInt 2330 _ = v.Args[1] 2331 x := v.Args[0] 2332 v_1 := v.Args[1] 2333 if v_1.Op != OpARM64MOVDconst { 2334 break 2335 } 2336 c := v_1.AuxInt 2337 v.reset(OpARM64ADDconst) 2338 v.AuxInt = int64(uint64(c) << uint64(d)) 2339 v.AddArg(x) 2340 return true 2341 } 2342 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x) 2343 // cond: 2344 // result: (RORconst [64-c] x) 2345 for { 2346 c := v.AuxInt 2347 _ = v.Args[1] 2348 v_0 := v.Args[0] 2349 if v_0.Op != OpARM64SRLconst { 2350 break 2351 } 2352 if v_0.AuxInt != 64-c { 2353 break 2354 } 2355 x := v_0.Args[0] 2356 if x != v.Args[1] { 2357 break 2358 } 2359 v.reset(OpARM64RORconst) 2360 v.AuxInt = 64 - c 2361 v.AddArg(x) 2362 return true 2363 } 2364 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x) 2365 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 2366 // result: (RORWconst [32-c] x) 2367 for { 2368 t := v.Type 2369 c := v.AuxInt 2370 _ = v.Args[1] 2371 v_0 := v.Args[0] 2372 if v_0.Op != OpARM64UBFX { 2373 break 2374 } 2375 bfc := v_0.AuxInt 2376 x := v_0.Args[0] 2377 if x != v.Args[1] { 2378 break 2379 } 2380 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 2381 break 2382 } 2383 v.reset(OpARM64RORWconst) 2384 v.AuxInt = 32 - c 2385 v.AddArg(x) 2386 return true 2387 } 2388 // match: (ADDshiftLL [c] (SRLconst x [64-c]) x2) 2389 // cond: 2390 // result: (EXTRconst [64-c] x2 x) 2391 for { 2392 c := v.AuxInt 2393 _ = v.Args[1] 2394 v_0 := v.Args[0] 2395 if v_0.Op != OpARM64SRLconst { 2396 break 2397 } 2398 if v_0.AuxInt != 64-c { 2399 break 2400 } 2401 x := v_0.Args[0] 2402 x2 := v.Args[1] 2403 v.reset(OpARM64EXTRconst) 2404 v.AuxInt = 64 - c 2405 v.AddArg(x2) 2406 v.AddArg(x) 2407 return true 2408 } 2409 // match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2) 2410 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 2411 // result: (EXTRWconst [32-c] x2 x) 2412 for { 2413 t := v.Type 2414 c := v.AuxInt 2415 _ = v.Args[1] 2416 v_0 := v.Args[0] 2417 if v_0.Op != OpARM64UBFX { 2418 break 2419 } 2420 bfc := v_0.AuxInt 2421 x := v_0.Args[0] 2422 x2 := v.Args[1] 2423 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 2424 break 2425 } 2426 v.reset(OpARM64EXTRWconst) 2427 v.AuxInt = 32 - c 2428 v.AddArg(x2) 2429 v.AddArg(x) 2430 return true 2431 } 2432 return false 2433 } 2434 func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool { 2435 b := v.Block 2436 _ = b 2437 // match: (ADDshiftRA (MOVDconst [c]) x [d]) 2438 // cond: 2439 // result: (ADDconst [c] (SRAconst <x.Type> x [d])) 2440 for { 2441 d := v.AuxInt 2442 _ = v.Args[1] 2443 v_0 := v.Args[0] 2444 if v_0.Op != OpARM64MOVDconst { 2445 break 2446 } 2447 c := v_0.AuxInt 2448 x := v.Args[1] 2449 v.reset(OpARM64ADDconst) 2450 v.AuxInt = c 2451 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 2452 v0.AuxInt = d 2453 v0.AddArg(x) 2454 v.AddArg(v0) 2455 return true 2456 } 2457 // match: (ADDshiftRA x (MOVDconst [c]) [d]) 2458 // cond: 2459 // result: (ADDconst x [c>>uint64(d)]) 2460 for { 2461 d := v.AuxInt 2462 _ = v.Args[1] 2463 x := v.Args[0] 2464 v_1 := v.Args[1] 2465 if v_1.Op != OpARM64MOVDconst { 2466 break 2467 } 2468 c := v_1.AuxInt 2469 v.reset(OpARM64ADDconst) 2470 v.AuxInt = c >> uint64(d) 2471 v.AddArg(x) 2472 return true 2473 } 2474 return false 2475 } 2476 func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool { 2477 b := v.Block 2478 _ = b 2479 // match: (ADDshiftRL (MOVDconst [c]) x [d]) 2480 // cond: 2481 // result: (ADDconst [c] (SRLconst <x.Type> x [d])) 2482 for { 2483 d := v.AuxInt 2484 _ = v.Args[1] 2485 v_0 := v.Args[0] 2486 if v_0.Op != OpARM64MOVDconst { 2487 break 2488 } 2489 c := v_0.AuxInt 2490 x := v.Args[1] 2491 v.reset(OpARM64ADDconst) 2492 v.AuxInt = c 2493 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 2494 v0.AuxInt = d 2495 v0.AddArg(x) 2496 v.AddArg(v0) 2497 return true 2498 } 2499 // match: (ADDshiftRL x (MOVDconst [c]) [d]) 2500 // cond: 2501 // result: (ADDconst x [int64(uint64(c)>>uint64(d))]) 2502 for { 2503 d := v.AuxInt 2504 _ = v.Args[1] 2505 x := v.Args[0] 2506 v_1 := v.Args[1] 2507 if v_1.Op != OpARM64MOVDconst { 2508 break 2509 } 2510 c := v_1.AuxInt 2511 v.reset(OpARM64ADDconst) 2512 v.AuxInt = int64(uint64(c) >> uint64(d)) 2513 v.AddArg(x) 2514 return true 2515 } 2516 // match: (ADDshiftRL [c] (SLLconst x [64-c]) x) 2517 // cond: 2518 // result: (RORconst [ c] x) 2519 for { 2520 c := v.AuxInt 2521 _ = v.Args[1] 2522 v_0 := v.Args[0] 2523 if v_0.Op != OpARM64SLLconst { 2524 break 2525 } 2526 if v_0.AuxInt != 64-c { 2527 break 2528 } 2529 x := v_0.Args[0] 2530 if x != v.Args[1] { 2531 break 2532 } 2533 v.reset(OpARM64RORconst) 2534 v.AuxInt = c 2535 v.AddArg(x) 2536 return true 2537 } 2538 // match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 2539 // cond: c < 32 && t.Size() == 4 2540 // result: (RORWconst [c] x) 2541 for { 2542 t := v.Type 2543 c := v.AuxInt 2544 _ = v.Args[1] 2545 v_0 := v.Args[0] 2546 if v_0.Op != OpARM64SLLconst { 2547 break 2548 } 2549 if v_0.AuxInt != 32-c { 2550 break 2551 } 2552 x := v_0.Args[0] 2553 v_1 := v.Args[1] 2554 if v_1.Op != OpARM64MOVWUreg { 2555 break 2556 } 2557 if x != v_1.Args[0] { 2558 break 2559 } 2560 if !(c < 32 && t.Size() == 4) { 2561 break 2562 } 2563 v.reset(OpARM64RORWconst) 2564 v.AuxInt = c 2565 v.AddArg(x) 2566 return true 2567 } 2568 return false 2569 } 2570 func rewriteValueARM64_OpARM64AND_0(v *Value) bool { 2571 // match: (AND x (MOVDconst [c])) 2572 // cond: 2573 // result: (ANDconst [c] x) 2574 for { 2575 _ = v.Args[1] 2576 x := v.Args[0] 2577 v_1 := v.Args[1] 2578 if v_1.Op != OpARM64MOVDconst { 2579 break 2580 } 2581 c := v_1.AuxInt 2582 v.reset(OpARM64ANDconst) 2583 v.AuxInt = c 2584 v.AddArg(x) 2585 return true 2586 } 2587 // match: (AND (MOVDconst [c]) x) 2588 // cond: 2589 // result: (ANDconst [c] x) 2590 for { 2591 _ = v.Args[1] 2592 v_0 := v.Args[0] 2593 if v_0.Op != OpARM64MOVDconst { 2594 break 2595 } 2596 c := v_0.AuxInt 2597 x := v.Args[1] 2598 v.reset(OpARM64ANDconst) 2599 v.AuxInt = c 2600 v.AddArg(x) 2601 return true 2602 } 2603 // match: (AND x x) 2604 // cond: 2605 // result: x 2606 for { 2607 _ = v.Args[1] 2608 x := v.Args[0] 2609 if x != v.Args[1] { 2610 break 2611 } 2612 v.reset(OpCopy) 2613 v.Type = x.Type 2614 v.AddArg(x) 2615 return true 2616 } 2617 // match: (AND x (MVN y)) 2618 // cond: 2619 // result: (BIC x y) 2620 for { 2621 _ = v.Args[1] 2622 x := v.Args[0] 2623 v_1 := v.Args[1] 2624 if v_1.Op != OpARM64MVN { 2625 break 2626 } 2627 y := v_1.Args[0] 2628 v.reset(OpARM64BIC) 2629 v.AddArg(x) 2630 v.AddArg(y) 2631 return true 2632 } 2633 // match: (AND (MVN y) x) 2634 // cond: 2635 // result: (BIC x y) 2636 for { 2637 _ = v.Args[1] 2638 v_0 := v.Args[0] 2639 if v_0.Op != OpARM64MVN { 2640 break 2641 } 2642 y := v_0.Args[0] 2643 x := v.Args[1] 2644 v.reset(OpARM64BIC) 2645 v.AddArg(x) 2646 v.AddArg(y) 2647 return true 2648 } 2649 // match: (AND x0 x1:(SLLconst [c] y)) 2650 // cond: clobberIfDead(x1) 2651 // result: (ANDshiftLL x0 y [c]) 2652 for { 2653 _ = v.Args[1] 2654 x0 := v.Args[0] 2655 x1 := v.Args[1] 2656 if x1.Op != OpARM64SLLconst { 2657 break 2658 } 2659 c := x1.AuxInt 2660 y := x1.Args[0] 2661 if !(clobberIfDead(x1)) { 2662 break 2663 } 2664 v.reset(OpARM64ANDshiftLL) 2665 v.AuxInt = c 2666 v.AddArg(x0) 2667 v.AddArg(y) 2668 return true 2669 } 2670 // match: (AND x1:(SLLconst [c] y) x0) 2671 // cond: clobberIfDead(x1) 2672 // result: (ANDshiftLL x0 y [c]) 2673 for { 2674 _ = v.Args[1] 2675 x1 := v.Args[0] 2676 if x1.Op != OpARM64SLLconst { 2677 break 2678 } 2679 c := x1.AuxInt 2680 y := x1.Args[0] 2681 x0 := v.Args[1] 2682 if !(clobberIfDead(x1)) { 2683 break 2684 } 2685 v.reset(OpARM64ANDshiftLL) 2686 v.AuxInt = c 2687 v.AddArg(x0) 2688 v.AddArg(y) 2689 return true 2690 } 2691 // match: (AND x0 x1:(SRLconst [c] y)) 2692 // cond: clobberIfDead(x1) 2693 // result: (ANDshiftRL x0 y [c]) 2694 for { 2695 _ = v.Args[1] 2696 x0 := v.Args[0] 2697 x1 := v.Args[1] 2698 if x1.Op != OpARM64SRLconst { 2699 break 2700 } 2701 c := x1.AuxInt 2702 y := x1.Args[0] 2703 if !(clobberIfDead(x1)) { 2704 break 2705 } 2706 v.reset(OpARM64ANDshiftRL) 2707 v.AuxInt = c 2708 v.AddArg(x0) 2709 v.AddArg(y) 2710 return true 2711 } 2712 // match: (AND x1:(SRLconst [c] y) x0) 2713 // cond: clobberIfDead(x1) 2714 // result: (ANDshiftRL x0 y [c]) 2715 for { 2716 _ = v.Args[1] 2717 x1 := v.Args[0] 2718 if x1.Op != OpARM64SRLconst { 2719 break 2720 } 2721 c := x1.AuxInt 2722 y := x1.Args[0] 2723 x0 := v.Args[1] 2724 if !(clobberIfDead(x1)) { 2725 break 2726 } 2727 v.reset(OpARM64ANDshiftRL) 2728 v.AuxInt = c 2729 v.AddArg(x0) 2730 v.AddArg(y) 2731 return true 2732 } 2733 // match: (AND x0 x1:(SRAconst [c] y)) 2734 // cond: clobberIfDead(x1) 2735 // result: (ANDshiftRA x0 y [c]) 2736 for { 2737 _ = v.Args[1] 2738 x0 := v.Args[0] 2739 x1 := v.Args[1] 2740 if x1.Op != OpARM64SRAconst { 2741 break 2742 } 2743 c := x1.AuxInt 2744 y := x1.Args[0] 2745 if !(clobberIfDead(x1)) { 2746 break 2747 } 2748 v.reset(OpARM64ANDshiftRA) 2749 v.AuxInt = c 2750 v.AddArg(x0) 2751 v.AddArg(y) 2752 return true 2753 } 2754 return false 2755 } 2756 func rewriteValueARM64_OpARM64AND_10(v *Value) bool { 2757 // match: (AND x1:(SRAconst [c] y) x0) 2758 // cond: clobberIfDead(x1) 2759 // result: (ANDshiftRA x0 y [c]) 2760 for { 2761 _ = v.Args[1] 2762 x1 := v.Args[0] 2763 if x1.Op != OpARM64SRAconst { 2764 break 2765 } 2766 c := x1.AuxInt 2767 y := x1.Args[0] 2768 x0 := v.Args[1] 2769 if !(clobberIfDead(x1)) { 2770 break 2771 } 2772 v.reset(OpARM64ANDshiftRA) 2773 v.AuxInt = c 2774 v.AddArg(x0) 2775 v.AddArg(y) 2776 return true 2777 } 2778 return false 2779 } 2780 func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool { 2781 // match: (ANDconst [0] _) 2782 // cond: 2783 // result: (MOVDconst [0]) 2784 for { 2785 if v.AuxInt != 0 { 2786 break 2787 } 2788 v.reset(OpARM64MOVDconst) 2789 v.AuxInt = 0 2790 return true 2791 } 2792 // match: (ANDconst [-1] x) 2793 // cond: 2794 // result: x 2795 for { 2796 if v.AuxInt != -1 { 2797 break 2798 } 2799 x := v.Args[0] 2800 v.reset(OpCopy) 2801 v.Type = x.Type 2802 v.AddArg(x) 2803 return true 2804 } 2805 // match: (ANDconst [c] (MOVDconst [d])) 2806 // cond: 2807 // result: (MOVDconst [c&d]) 2808 for { 2809 c := v.AuxInt 2810 v_0 := v.Args[0] 2811 if v_0.Op != OpARM64MOVDconst { 2812 break 2813 } 2814 d := v_0.AuxInt 2815 v.reset(OpARM64MOVDconst) 2816 v.AuxInt = c & d 2817 return true 2818 } 2819 // match: (ANDconst [c] (ANDconst [d] x)) 2820 // cond: 2821 // result: (ANDconst [c&d] x) 2822 for { 2823 c := v.AuxInt 2824 v_0 := v.Args[0] 2825 if v_0.Op != OpARM64ANDconst { 2826 break 2827 } 2828 d := v_0.AuxInt 2829 x := v_0.Args[0] 2830 v.reset(OpARM64ANDconst) 2831 v.AuxInt = c & d 2832 v.AddArg(x) 2833 return true 2834 } 2835 // match: (ANDconst [c] (MOVWUreg x)) 2836 // cond: 2837 // result: (ANDconst [c&(1<<32-1)] x) 2838 for { 2839 c := v.AuxInt 2840 v_0 := v.Args[0] 2841 if v_0.Op != OpARM64MOVWUreg { 2842 break 2843 } 2844 x := v_0.Args[0] 2845 v.reset(OpARM64ANDconst) 2846 v.AuxInt = c & (1<<32 - 1) 2847 v.AddArg(x) 2848 return true 2849 } 2850 // match: (ANDconst [c] (MOVHUreg x)) 2851 // cond: 2852 // result: (ANDconst [c&(1<<16-1)] x) 2853 for { 2854 c := v.AuxInt 2855 v_0 := v.Args[0] 2856 if v_0.Op != OpARM64MOVHUreg { 2857 break 2858 } 2859 x := v_0.Args[0] 2860 v.reset(OpARM64ANDconst) 2861 v.AuxInt = c & (1<<16 - 1) 2862 v.AddArg(x) 2863 return true 2864 } 2865 // match: (ANDconst [c] (MOVBUreg x)) 2866 // cond: 2867 // result: (ANDconst [c&(1<<8-1)] x) 2868 for { 2869 c := v.AuxInt 2870 v_0 := v.Args[0] 2871 if v_0.Op != OpARM64MOVBUreg { 2872 break 2873 } 2874 x := v_0.Args[0] 2875 v.reset(OpARM64ANDconst) 2876 v.AuxInt = c & (1<<8 - 1) 2877 v.AddArg(x) 2878 return true 2879 } 2880 // match: (ANDconst [ac] (SLLconst [sc] x)) 2881 // cond: isARM64BFMask(sc, ac, sc) 2882 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(ac, sc))] x) 2883 for { 2884 ac := v.AuxInt 2885 v_0 := v.Args[0] 2886 if v_0.Op != OpARM64SLLconst { 2887 break 2888 } 2889 sc := v_0.AuxInt 2890 x := v_0.Args[0] 2891 if !(isARM64BFMask(sc, ac, sc)) { 2892 break 2893 } 2894 v.reset(OpARM64UBFIZ) 2895 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, sc)) 2896 v.AddArg(x) 2897 return true 2898 } 2899 // match: (ANDconst [ac] (SRLconst [sc] x)) 2900 // cond: isARM64BFMask(sc, ac, 0) 2901 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(ac, 0))] x) 2902 for { 2903 ac := v.AuxInt 2904 v_0 := v.Args[0] 2905 if v_0.Op != OpARM64SRLconst { 2906 break 2907 } 2908 sc := v_0.AuxInt 2909 x := v_0.Args[0] 2910 if !(isARM64BFMask(sc, ac, 0)) { 2911 break 2912 } 2913 v.reset(OpARM64UBFX) 2914 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, 0)) 2915 v.AddArg(x) 2916 return true 2917 } 2918 return false 2919 } 2920 func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool { 2921 b := v.Block 2922 _ = b 2923 // match: (ANDshiftLL (MOVDconst [c]) x [d]) 2924 // cond: 2925 // result: (ANDconst [c] (SLLconst <x.Type> x [d])) 2926 for { 2927 d := v.AuxInt 2928 _ = v.Args[1] 2929 v_0 := v.Args[0] 2930 if v_0.Op != OpARM64MOVDconst { 2931 break 2932 } 2933 c := v_0.AuxInt 2934 x := v.Args[1] 2935 v.reset(OpARM64ANDconst) 2936 v.AuxInt = c 2937 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 2938 v0.AuxInt = d 2939 v0.AddArg(x) 2940 v.AddArg(v0) 2941 return true 2942 } 2943 // match: (ANDshiftLL x (MOVDconst [c]) [d]) 2944 // cond: 2945 // result: (ANDconst x [int64(uint64(c)<<uint64(d))]) 2946 for { 2947 d := v.AuxInt 2948 _ = v.Args[1] 2949 x := v.Args[0] 2950 v_1 := v.Args[1] 2951 if v_1.Op != OpARM64MOVDconst { 2952 break 2953 } 2954 c := v_1.AuxInt 2955 v.reset(OpARM64ANDconst) 2956 v.AuxInt = int64(uint64(c) << uint64(d)) 2957 v.AddArg(x) 2958 return true 2959 } 2960 // match: (ANDshiftLL x y:(SLLconst x [c]) [d]) 2961 // cond: c==d 2962 // result: y 2963 for { 2964 d := v.AuxInt 2965 _ = v.Args[1] 2966 x := v.Args[0] 2967 y := v.Args[1] 2968 if y.Op != OpARM64SLLconst { 2969 break 2970 } 2971 c := y.AuxInt 2972 if x != y.Args[0] { 2973 break 2974 } 2975 if !(c == d) { 2976 break 2977 } 2978 v.reset(OpCopy) 2979 v.Type = y.Type 2980 v.AddArg(y) 2981 return true 2982 } 2983 return false 2984 } 2985 func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool { 2986 b := v.Block 2987 _ = b 2988 // match: (ANDshiftRA (MOVDconst [c]) x [d]) 2989 // cond: 2990 // result: (ANDconst [c] (SRAconst <x.Type> x [d])) 2991 for { 2992 d := v.AuxInt 2993 _ = v.Args[1] 2994 v_0 := v.Args[0] 2995 if v_0.Op != OpARM64MOVDconst { 2996 break 2997 } 2998 c := v_0.AuxInt 2999 x := v.Args[1] 3000 v.reset(OpARM64ANDconst) 3001 v.AuxInt = c 3002 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3003 v0.AuxInt = d 3004 v0.AddArg(x) 3005 v.AddArg(v0) 3006 return true 3007 } 3008 // match: (ANDshiftRA x (MOVDconst [c]) [d]) 3009 // cond: 3010 // result: (ANDconst x [c>>uint64(d)]) 3011 for { 3012 d := v.AuxInt 3013 _ = v.Args[1] 3014 x := v.Args[0] 3015 v_1 := v.Args[1] 3016 if v_1.Op != OpARM64MOVDconst { 3017 break 3018 } 3019 c := v_1.AuxInt 3020 v.reset(OpARM64ANDconst) 3021 v.AuxInt = c >> uint64(d) 3022 v.AddArg(x) 3023 return true 3024 } 3025 // match: (ANDshiftRA x y:(SRAconst x [c]) [d]) 3026 // cond: c==d 3027 // result: y 3028 for { 3029 d := v.AuxInt 3030 _ = v.Args[1] 3031 x := v.Args[0] 3032 y := v.Args[1] 3033 if y.Op != OpARM64SRAconst { 3034 break 3035 } 3036 c := y.AuxInt 3037 if x != y.Args[0] { 3038 break 3039 } 3040 if !(c == d) { 3041 break 3042 } 3043 v.reset(OpCopy) 3044 v.Type = y.Type 3045 v.AddArg(y) 3046 return true 3047 } 3048 return false 3049 } 3050 func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool { 3051 b := v.Block 3052 _ = b 3053 // match: (ANDshiftRL (MOVDconst [c]) x [d]) 3054 // cond: 3055 // result: (ANDconst [c] (SRLconst <x.Type> x [d])) 3056 for { 3057 d := v.AuxInt 3058 _ = v.Args[1] 3059 v_0 := v.Args[0] 3060 if v_0.Op != OpARM64MOVDconst { 3061 break 3062 } 3063 c := v_0.AuxInt 3064 x := v.Args[1] 3065 v.reset(OpARM64ANDconst) 3066 v.AuxInt = c 3067 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3068 v0.AuxInt = d 3069 v0.AddArg(x) 3070 v.AddArg(v0) 3071 return true 3072 } 3073 // match: (ANDshiftRL x (MOVDconst [c]) [d]) 3074 // cond: 3075 // result: (ANDconst x [int64(uint64(c)>>uint64(d))]) 3076 for { 3077 d := v.AuxInt 3078 _ = v.Args[1] 3079 x := v.Args[0] 3080 v_1 := v.Args[1] 3081 if v_1.Op != OpARM64MOVDconst { 3082 break 3083 } 3084 c := v_1.AuxInt 3085 v.reset(OpARM64ANDconst) 3086 v.AuxInt = int64(uint64(c) >> uint64(d)) 3087 v.AddArg(x) 3088 return true 3089 } 3090 // match: (ANDshiftRL x y:(SRLconst x [c]) [d]) 3091 // cond: c==d 3092 // result: y 3093 for { 3094 d := v.AuxInt 3095 _ = v.Args[1] 3096 x := v.Args[0] 3097 y := v.Args[1] 3098 if y.Op != OpARM64SRLconst { 3099 break 3100 } 3101 c := y.AuxInt 3102 if x != y.Args[0] { 3103 break 3104 } 3105 if !(c == d) { 3106 break 3107 } 3108 v.reset(OpCopy) 3109 v.Type = y.Type 3110 v.AddArg(y) 3111 return true 3112 } 3113 return false 3114 } 3115 func rewriteValueARM64_OpARM64BIC_0(v *Value) bool { 3116 // match: (BIC x (MOVDconst [c])) 3117 // cond: 3118 // result: (ANDconst [^c] x) 3119 for { 3120 _ = v.Args[1] 3121 x := v.Args[0] 3122 v_1 := v.Args[1] 3123 if v_1.Op != OpARM64MOVDconst { 3124 break 3125 } 3126 c := v_1.AuxInt 3127 v.reset(OpARM64ANDconst) 3128 v.AuxInt = ^c 3129 v.AddArg(x) 3130 return true 3131 } 3132 // match: (BIC x x) 3133 // cond: 3134 // result: (MOVDconst [0]) 3135 for { 3136 _ = v.Args[1] 3137 x := v.Args[0] 3138 if x != v.Args[1] { 3139 break 3140 } 3141 v.reset(OpARM64MOVDconst) 3142 v.AuxInt = 0 3143 return true 3144 } 3145 // match: (BIC x0 x1:(SLLconst [c] y)) 3146 // cond: clobberIfDead(x1) 3147 // result: (BICshiftLL x0 y [c]) 3148 for { 3149 _ = v.Args[1] 3150 x0 := v.Args[0] 3151 x1 := v.Args[1] 3152 if x1.Op != OpARM64SLLconst { 3153 break 3154 } 3155 c := x1.AuxInt 3156 y := x1.Args[0] 3157 if !(clobberIfDead(x1)) { 3158 break 3159 } 3160 v.reset(OpARM64BICshiftLL) 3161 v.AuxInt = c 3162 v.AddArg(x0) 3163 v.AddArg(y) 3164 return true 3165 } 3166 // match: (BIC x0 x1:(SRLconst [c] y)) 3167 // cond: clobberIfDead(x1) 3168 // result: (BICshiftRL x0 y [c]) 3169 for { 3170 _ = v.Args[1] 3171 x0 := v.Args[0] 3172 x1 := v.Args[1] 3173 if x1.Op != OpARM64SRLconst { 3174 break 3175 } 3176 c := x1.AuxInt 3177 y := x1.Args[0] 3178 if !(clobberIfDead(x1)) { 3179 break 3180 } 3181 v.reset(OpARM64BICshiftRL) 3182 v.AuxInt = c 3183 v.AddArg(x0) 3184 v.AddArg(y) 3185 return true 3186 } 3187 // match: (BIC x0 x1:(SRAconst [c] y)) 3188 // cond: clobberIfDead(x1) 3189 // result: (BICshiftRA x0 y [c]) 3190 for { 3191 _ = v.Args[1] 3192 x0 := v.Args[0] 3193 x1 := v.Args[1] 3194 if x1.Op != OpARM64SRAconst { 3195 break 3196 } 3197 c := x1.AuxInt 3198 y := x1.Args[0] 3199 if !(clobberIfDead(x1)) { 3200 break 3201 } 3202 v.reset(OpARM64BICshiftRA) 3203 v.AuxInt = c 3204 v.AddArg(x0) 3205 v.AddArg(y) 3206 return true 3207 } 3208 return false 3209 } 3210 func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool { 3211 // match: (BICshiftLL x (MOVDconst [c]) [d]) 3212 // cond: 3213 // result: (ANDconst x [^int64(uint64(c)<<uint64(d))]) 3214 for { 3215 d := v.AuxInt 3216 _ = v.Args[1] 3217 x := v.Args[0] 3218 v_1 := v.Args[1] 3219 if v_1.Op != OpARM64MOVDconst { 3220 break 3221 } 3222 c := v_1.AuxInt 3223 v.reset(OpARM64ANDconst) 3224 v.AuxInt = ^int64(uint64(c) << uint64(d)) 3225 v.AddArg(x) 3226 return true 3227 } 3228 // match: (BICshiftLL x (SLLconst x [c]) [d]) 3229 // cond: c==d 3230 // result: (MOVDconst [0]) 3231 for { 3232 d := v.AuxInt 3233 _ = v.Args[1] 3234 x := v.Args[0] 3235 v_1 := v.Args[1] 3236 if v_1.Op != OpARM64SLLconst { 3237 break 3238 } 3239 c := v_1.AuxInt 3240 if x != v_1.Args[0] { 3241 break 3242 } 3243 if !(c == d) { 3244 break 3245 } 3246 v.reset(OpARM64MOVDconst) 3247 v.AuxInt = 0 3248 return true 3249 } 3250 return false 3251 } 3252 func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool { 3253 // match: (BICshiftRA x (MOVDconst [c]) [d]) 3254 // cond: 3255 // result: (ANDconst x [^(c>>uint64(d))]) 3256 for { 3257 d := v.AuxInt 3258 _ = v.Args[1] 3259 x := v.Args[0] 3260 v_1 := v.Args[1] 3261 if v_1.Op != OpARM64MOVDconst { 3262 break 3263 } 3264 c := v_1.AuxInt 3265 v.reset(OpARM64ANDconst) 3266 v.AuxInt = ^(c >> uint64(d)) 3267 v.AddArg(x) 3268 return true 3269 } 3270 // match: (BICshiftRA x (SRAconst x [c]) [d]) 3271 // cond: c==d 3272 // result: (MOVDconst [0]) 3273 for { 3274 d := v.AuxInt 3275 _ = v.Args[1] 3276 x := v.Args[0] 3277 v_1 := v.Args[1] 3278 if v_1.Op != OpARM64SRAconst { 3279 break 3280 } 3281 c := v_1.AuxInt 3282 if x != v_1.Args[0] { 3283 break 3284 } 3285 if !(c == d) { 3286 break 3287 } 3288 v.reset(OpARM64MOVDconst) 3289 v.AuxInt = 0 3290 return true 3291 } 3292 return false 3293 } 3294 func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool { 3295 // match: (BICshiftRL x (MOVDconst [c]) [d]) 3296 // cond: 3297 // result: (ANDconst x [^int64(uint64(c)>>uint64(d))]) 3298 for { 3299 d := v.AuxInt 3300 _ = v.Args[1] 3301 x := v.Args[0] 3302 v_1 := v.Args[1] 3303 if v_1.Op != OpARM64MOVDconst { 3304 break 3305 } 3306 c := v_1.AuxInt 3307 v.reset(OpARM64ANDconst) 3308 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 3309 v.AddArg(x) 3310 return true 3311 } 3312 // match: (BICshiftRL x (SRLconst x [c]) [d]) 3313 // cond: c==d 3314 // result: (MOVDconst [0]) 3315 for { 3316 d := v.AuxInt 3317 _ = v.Args[1] 3318 x := v.Args[0] 3319 v_1 := v.Args[1] 3320 if v_1.Op != OpARM64SRLconst { 3321 break 3322 } 3323 c := v_1.AuxInt 3324 if x != v_1.Args[0] { 3325 break 3326 } 3327 if !(c == d) { 3328 break 3329 } 3330 v.reset(OpARM64MOVDconst) 3331 v.AuxInt = 0 3332 return true 3333 } 3334 return false 3335 } 3336 func rewriteValueARM64_OpARM64CMN_0(v *Value) bool { 3337 // match: (CMN x (MOVDconst [c])) 3338 // cond: 3339 // result: (CMNconst [c] x) 3340 for { 3341 _ = v.Args[1] 3342 x := v.Args[0] 3343 v_1 := v.Args[1] 3344 if v_1.Op != OpARM64MOVDconst { 3345 break 3346 } 3347 c := v_1.AuxInt 3348 v.reset(OpARM64CMNconst) 3349 v.AuxInt = c 3350 v.AddArg(x) 3351 return true 3352 } 3353 // match: (CMN (MOVDconst [c]) x) 3354 // cond: 3355 // result: (CMNconst [c] x) 3356 for { 3357 _ = v.Args[1] 3358 v_0 := v.Args[0] 3359 if v_0.Op != OpARM64MOVDconst { 3360 break 3361 } 3362 c := v_0.AuxInt 3363 x := v.Args[1] 3364 v.reset(OpARM64CMNconst) 3365 v.AuxInt = c 3366 v.AddArg(x) 3367 return true 3368 } 3369 // match: (CMN x0 x1:(SLLconst [c] y)) 3370 // cond: clobberIfDead(x1) 3371 // result: (CMNshiftLL x0 y [c]) 3372 for { 3373 _ = v.Args[1] 3374 x0 := v.Args[0] 3375 x1 := v.Args[1] 3376 if x1.Op != OpARM64SLLconst { 3377 break 3378 } 3379 c := x1.AuxInt 3380 y := x1.Args[0] 3381 if !(clobberIfDead(x1)) { 3382 break 3383 } 3384 v.reset(OpARM64CMNshiftLL) 3385 v.AuxInt = c 3386 v.AddArg(x0) 3387 v.AddArg(y) 3388 return true 3389 } 3390 // match: (CMN x1:(SLLconst [c] y) x0) 3391 // cond: clobberIfDead(x1) 3392 // result: (CMNshiftLL x0 y [c]) 3393 for { 3394 _ = v.Args[1] 3395 x1 := v.Args[0] 3396 if x1.Op != OpARM64SLLconst { 3397 break 3398 } 3399 c := x1.AuxInt 3400 y := x1.Args[0] 3401 x0 := v.Args[1] 3402 if !(clobberIfDead(x1)) { 3403 break 3404 } 3405 v.reset(OpARM64CMNshiftLL) 3406 v.AuxInt = c 3407 v.AddArg(x0) 3408 v.AddArg(y) 3409 return true 3410 } 3411 // match: (CMN x0 x1:(SRLconst [c] y)) 3412 // cond: clobberIfDead(x1) 3413 // result: (CMNshiftRL x0 y [c]) 3414 for { 3415 _ = v.Args[1] 3416 x0 := v.Args[0] 3417 x1 := v.Args[1] 3418 if x1.Op != OpARM64SRLconst { 3419 break 3420 } 3421 c := x1.AuxInt 3422 y := x1.Args[0] 3423 if !(clobberIfDead(x1)) { 3424 break 3425 } 3426 v.reset(OpARM64CMNshiftRL) 3427 v.AuxInt = c 3428 v.AddArg(x0) 3429 v.AddArg(y) 3430 return true 3431 } 3432 // match: (CMN x1:(SRLconst [c] y) x0) 3433 // cond: clobberIfDead(x1) 3434 // result: (CMNshiftRL x0 y [c]) 3435 for { 3436 _ = v.Args[1] 3437 x1 := v.Args[0] 3438 if x1.Op != OpARM64SRLconst { 3439 break 3440 } 3441 c := x1.AuxInt 3442 y := x1.Args[0] 3443 x0 := v.Args[1] 3444 if !(clobberIfDead(x1)) { 3445 break 3446 } 3447 v.reset(OpARM64CMNshiftRL) 3448 v.AuxInt = c 3449 v.AddArg(x0) 3450 v.AddArg(y) 3451 return true 3452 } 3453 // match: (CMN x0 x1:(SRAconst [c] y)) 3454 // cond: clobberIfDead(x1) 3455 // result: (CMNshiftRA x0 y [c]) 3456 for { 3457 _ = v.Args[1] 3458 x0 := v.Args[0] 3459 x1 := v.Args[1] 3460 if x1.Op != OpARM64SRAconst { 3461 break 3462 } 3463 c := x1.AuxInt 3464 y := x1.Args[0] 3465 if !(clobberIfDead(x1)) { 3466 break 3467 } 3468 v.reset(OpARM64CMNshiftRA) 3469 v.AuxInt = c 3470 v.AddArg(x0) 3471 v.AddArg(y) 3472 return true 3473 } 3474 // match: (CMN x1:(SRAconst [c] y) x0) 3475 // cond: clobberIfDead(x1) 3476 // result: (CMNshiftRA x0 y [c]) 3477 for { 3478 _ = v.Args[1] 3479 x1 := v.Args[0] 3480 if x1.Op != OpARM64SRAconst { 3481 break 3482 } 3483 c := x1.AuxInt 3484 y := x1.Args[0] 3485 x0 := v.Args[1] 3486 if !(clobberIfDead(x1)) { 3487 break 3488 } 3489 v.reset(OpARM64CMNshiftRA) 3490 v.AuxInt = c 3491 v.AddArg(x0) 3492 v.AddArg(y) 3493 return true 3494 } 3495 return false 3496 } 3497 func rewriteValueARM64_OpARM64CMNW_0(v *Value) bool { 3498 // match: (CMNW x (MOVDconst [c])) 3499 // cond: 3500 // result: (CMNWconst [c] x) 3501 for { 3502 _ = v.Args[1] 3503 x := v.Args[0] 3504 v_1 := v.Args[1] 3505 if v_1.Op != OpARM64MOVDconst { 3506 break 3507 } 3508 c := v_1.AuxInt 3509 v.reset(OpARM64CMNWconst) 3510 v.AuxInt = c 3511 v.AddArg(x) 3512 return true 3513 } 3514 // match: (CMNW (MOVDconst [c]) x) 3515 // cond: 3516 // result: (CMNWconst [c] x) 3517 for { 3518 _ = v.Args[1] 3519 v_0 := v.Args[0] 3520 if v_0.Op != OpARM64MOVDconst { 3521 break 3522 } 3523 c := v_0.AuxInt 3524 x := v.Args[1] 3525 v.reset(OpARM64CMNWconst) 3526 v.AuxInt = c 3527 v.AddArg(x) 3528 return true 3529 } 3530 return false 3531 } 3532 func rewriteValueARM64_OpARM64CMNWconst_0(v *Value) bool { 3533 // match: (CMNWconst (MOVDconst [x]) [y]) 3534 // cond: int32(x)==int32(-y) 3535 // result: (FlagEQ) 3536 for { 3537 y := v.AuxInt 3538 v_0 := v.Args[0] 3539 if v_0.Op != OpARM64MOVDconst { 3540 break 3541 } 3542 x := v_0.AuxInt 3543 if !(int32(x) == int32(-y)) { 3544 break 3545 } 3546 v.reset(OpARM64FlagEQ) 3547 return true 3548 } 3549 // match: (CMNWconst (MOVDconst [x]) [y]) 3550 // cond: int32(x)<int32(-y) && uint32(x)<uint32(-y) 3551 // result: (FlagLT_ULT) 3552 for { 3553 y := v.AuxInt 3554 v_0 := v.Args[0] 3555 if v_0.Op != OpARM64MOVDconst { 3556 break 3557 } 3558 x := v_0.AuxInt 3559 if !(int32(x) < int32(-y) && uint32(x) < uint32(-y)) { 3560 break 3561 } 3562 v.reset(OpARM64FlagLT_ULT) 3563 return true 3564 } 3565 // match: (CMNWconst (MOVDconst [x]) [y]) 3566 // cond: int32(x)<int32(-y) && uint32(x)>uint32(-y) 3567 // result: (FlagLT_UGT) 3568 for { 3569 y := v.AuxInt 3570 v_0 := v.Args[0] 3571 if v_0.Op != OpARM64MOVDconst { 3572 break 3573 } 3574 x := v_0.AuxInt 3575 if !(int32(x) < int32(-y) && uint32(x) > uint32(-y)) { 3576 break 3577 } 3578 v.reset(OpARM64FlagLT_UGT) 3579 return true 3580 } 3581 // match: (CMNWconst (MOVDconst [x]) [y]) 3582 // cond: int32(x)>int32(-y) && uint32(x)<uint32(-y) 3583 // result: (FlagGT_ULT) 3584 for { 3585 y := v.AuxInt 3586 v_0 := v.Args[0] 3587 if v_0.Op != OpARM64MOVDconst { 3588 break 3589 } 3590 x := v_0.AuxInt 3591 if !(int32(x) > int32(-y) && uint32(x) < uint32(-y)) { 3592 break 3593 } 3594 v.reset(OpARM64FlagGT_ULT) 3595 return true 3596 } 3597 // match: (CMNWconst (MOVDconst [x]) [y]) 3598 // cond: int32(x)>int32(-y) && uint32(x)>uint32(-y) 3599 // result: (FlagGT_UGT) 3600 for { 3601 y := v.AuxInt 3602 v_0 := v.Args[0] 3603 if v_0.Op != OpARM64MOVDconst { 3604 break 3605 } 3606 x := v_0.AuxInt 3607 if !(int32(x) > int32(-y) && uint32(x) > uint32(-y)) { 3608 break 3609 } 3610 v.reset(OpARM64FlagGT_UGT) 3611 return true 3612 } 3613 return false 3614 } 3615 func rewriteValueARM64_OpARM64CMNconst_0(v *Value) bool { 3616 // match: (CMNconst (MOVDconst [x]) [y]) 3617 // cond: int64(x)==int64(-y) 3618 // result: (FlagEQ) 3619 for { 3620 y := v.AuxInt 3621 v_0 := v.Args[0] 3622 if v_0.Op != OpARM64MOVDconst { 3623 break 3624 } 3625 x := v_0.AuxInt 3626 if !(int64(x) == int64(-y)) { 3627 break 3628 } 3629 v.reset(OpARM64FlagEQ) 3630 return true 3631 } 3632 // match: (CMNconst (MOVDconst [x]) [y]) 3633 // cond: int64(x)<int64(-y) && uint64(x)<uint64(-y) 3634 // result: (FlagLT_ULT) 3635 for { 3636 y := v.AuxInt 3637 v_0 := v.Args[0] 3638 if v_0.Op != OpARM64MOVDconst { 3639 break 3640 } 3641 x := v_0.AuxInt 3642 if !(int64(x) < int64(-y) && uint64(x) < uint64(-y)) { 3643 break 3644 } 3645 v.reset(OpARM64FlagLT_ULT) 3646 return true 3647 } 3648 // match: (CMNconst (MOVDconst [x]) [y]) 3649 // cond: int64(x)<int64(-y) && uint64(x)>uint64(-y) 3650 // result: (FlagLT_UGT) 3651 for { 3652 y := v.AuxInt 3653 v_0 := v.Args[0] 3654 if v_0.Op != OpARM64MOVDconst { 3655 break 3656 } 3657 x := v_0.AuxInt 3658 if !(int64(x) < int64(-y) && uint64(x) > uint64(-y)) { 3659 break 3660 } 3661 v.reset(OpARM64FlagLT_UGT) 3662 return true 3663 } 3664 // match: (CMNconst (MOVDconst [x]) [y]) 3665 // cond: int64(x)>int64(-y) && uint64(x)<uint64(-y) 3666 // result: (FlagGT_ULT) 3667 for { 3668 y := v.AuxInt 3669 v_0 := v.Args[0] 3670 if v_0.Op != OpARM64MOVDconst { 3671 break 3672 } 3673 x := v_0.AuxInt 3674 if !(int64(x) > int64(-y) && uint64(x) < uint64(-y)) { 3675 break 3676 } 3677 v.reset(OpARM64FlagGT_ULT) 3678 return true 3679 } 3680 // match: (CMNconst (MOVDconst [x]) [y]) 3681 // cond: int64(x)>int64(-y) && uint64(x)>uint64(-y) 3682 // result: (FlagGT_UGT) 3683 for { 3684 y := v.AuxInt 3685 v_0 := v.Args[0] 3686 if v_0.Op != OpARM64MOVDconst { 3687 break 3688 } 3689 x := v_0.AuxInt 3690 if !(int64(x) > int64(-y) && uint64(x) > uint64(-y)) { 3691 break 3692 } 3693 v.reset(OpARM64FlagGT_UGT) 3694 return true 3695 } 3696 return false 3697 } 3698 func rewriteValueARM64_OpARM64CMNshiftLL_0(v *Value) bool { 3699 b := v.Block 3700 _ = b 3701 // match: (CMNshiftLL (MOVDconst [c]) x [d]) 3702 // cond: 3703 // result: (CMNconst [c] (SLLconst <x.Type> x [d])) 3704 for { 3705 d := v.AuxInt 3706 _ = v.Args[1] 3707 v_0 := v.Args[0] 3708 if v_0.Op != OpARM64MOVDconst { 3709 break 3710 } 3711 c := v_0.AuxInt 3712 x := v.Args[1] 3713 v.reset(OpARM64CMNconst) 3714 v.AuxInt = c 3715 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 3716 v0.AuxInt = d 3717 v0.AddArg(x) 3718 v.AddArg(v0) 3719 return true 3720 } 3721 // match: (CMNshiftLL x (MOVDconst [c]) [d]) 3722 // cond: 3723 // result: (CMNconst x [int64(uint64(c)<<uint64(d))]) 3724 for { 3725 d := v.AuxInt 3726 _ = v.Args[1] 3727 x := v.Args[0] 3728 v_1 := v.Args[1] 3729 if v_1.Op != OpARM64MOVDconst { 3730 break 3731 } 3732 c := v_1.AuxInt 3733 v.reset(OpARM64CMNconst) 3734 v.AuxInt = int64(uint64(c) << uint64(d)) 3735 v.AddArg(x) 3736 return true 3737 } 3738 return false 3739 } 3740 func rewriteValueARM64_OpARM64CMNshiftRA_0(v *Value) bool { 3741 b := v.Block 3742 _ = b 3743 // match: (CMNshiftRA (MOVDconst [c]) x [d]) 3744 // cond: 3745 // result: (CMNconst [c] (SRAconst <x.Type> x [d])) 3746 for { 3747 d := v.AuxInt 3748 _ = v.Args[1] 3749 v_0 := v.Args[0] 3750 if v_0.Op != OpARM64MOVDconst { 3751 break 3752 } 3753 c := v_0.AuxInt 3754 x := v.Args[1] 3755 v.reset(OpARM64CMNconst) 3756 v.AuxInt = c 3757 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 3758 v0.AuxInt = d 3759 v0.AddArg(x) 3760 v.AddArg(v0) 3761 return true 3762 } 3763 // match: (CMNshiftRA x (MOVDconst [c]) [d]) 3764 // cond: 3765 // result: (CMNconst x [c>>uint64(d)]) 3766 for { 3767 d := v.AuxInt 3768 _ = v.Args[1] 3769 x := v.Args[0] 3770 v_1 := v.Args[1] 3771 if v_1.Op != OpARM64MOVDconst { 3772 break 3773 } 3774 c := v_1.AuxInt 3775 v.reset(OpARM64CMNconst) 3776 v.AuxInt = c >> uint64(d) 3777 v.AddArg(x) 3778 return true 3779 } 3780 return false 3781 } 3782 func rewriteValueARM64_OpARM64CMNshiftRL_0(v *Value) bool { 3783 b := v.Block 3784 _ = b 3785 // match: (CMNshiftRL (MOVDconst [c]) x [d]) 3786 // cond: 3787 // result: (CMNconst [c] (SRLconst <x.Type> x [d])) 3788 for { 3789 d := v.AuxInt 3790 _ = v.Args[1] 3791 v_0 := v.Args[0] 3792 if v_0.Op != OpARM64MOVDconst { 3793 break 3794 } 3795 c := v_0.AuxInt 3796 x := v.Args[1] 3797 v.reset(OpARM64CMNconst) 3798 v.AuxInt = c 3799 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 3800 v0.AuxInt = d 3801 v0.AddArg(x) 3802 v.AddArg(v0) 3803 return true 3804 } 3805 // match: (CMNshiftRL x (MOVDconst [c]) [d]) 3806 // cond: 3807 // result: (CMNconst x [int64(uint64(c)>>uint64(d))]) 3808 for { 3809 d := v.AuxInt 3810 _ = v.Args[1] 3811 x := v.Args[0] 3812 v_1 := v.Args[1] 3813 if v_1.Op != OpARM64MOVDconst { 3814 break 3815 } 3816 c := v_1.AuxInt 3817 v.reset(OpARM64CMNconst) 3818 v.AuxInt = int64(uint64(c) >> uint64(d)) 3819 v.AddArg(x) 3820 return true 3821 } 3822 return false 3823 } 3824 func rewriteValueARM64_OpARM64CMP_0(v *Value) bool { 3825 b := v.Block 3826 _ = b 3827 // match: (CMP x (MOVDconst [c])) 3828 // cond: 3829 // result: (CMPconst [c] x) 3830 for { 3831 _ = v.Args[1] 3832 x := v.Args[0] 3833 v_1 := v.Args[1] 3834 if v_1.Op != OpARM64MOVDconst { 3835 break 3836 } 3837 c := v_1.AuxInt 3838 v.reset(OpARM64CMPconst) 3839 v.AuxInt = c 3840 v.AddArg(x) 3841 return true 3842 } 3843 // match: (CMP (MOVDconst [c]) x) 3844 // cond: 3845 // result: (InvertFlags (CMPconst [c] x)) 3846 for { 3847 _ = v.Args[1] 3848 v_0 := v.Args[0] 3849 if v_0.Op != OpARM64MOVDconst { 3850 break 3851 } 3852 c := v_0.AuxInt 3853 x := v.Args[1] 3854 v.reset(OpARM64InvertFlags) 3855 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 3856 v0.AuxInt = c 3857 v0.AddArg(x) 3858 v.AddArg(v0) 3859 return true 3860 } 3861 // match: (CMP x0 x1:(SLLconst [c] y)) 3862 // cond: clobberIfDead(x1) 3863 // result: (CMPshiftLL x0 y [c]) 3864 for { 3865 _ = v.Args[1] 3866 x0 := v.Args[0] 3867 x1 := v.Args[1] 3868 if x1.Op != OpARM64SLLconst { 3869 break 3870 } 3871 c := x1.AuxInt 3872 y := x1.Args[0] 3873 if !(clobberIfDead(x1)) { 3874 break 3875 } 3876 v.reset(OpARM64CMPshiftLL) 3877 v.AuxInt = c 3878 v.AddArg(x0) 3879 v.AddArg(y) 3880 return true 3881 } 3882 // match: (CMP x0:(SLLconst [c] y) x1) 3883 // cond: clobberIfDead(x0) 3884 // result: (InvertFlags (CMPshiftLL x1 y [c])) 3885 for { 3886 _ = v.Args[1] 3887 x0 := v.Args[0] 3888 if x0.Op != OpARM64SLLconst { 3889 break 3890 } 3891 c := x0.AuxInt 3892 y := x0.Args[0] 3893 x1 := v.Args[1] 3894 if !(clobberIfDead(x0)) { 3895 break 3896 } 3897 v.reset(OpARM64InvertFlags) 3898 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags) 3899 v0.AuxInt = c 3900 v0.AddArg(x1) 3901 v0.AddArg(y) 3902 v.AddArg(v0) 3903 return true 3904 } 3905 // match: (CMP x0 x1:(SRLconst [c] y)) 3906 // cond: clobberIfDead(x1) 3907 // result: (CMPshiftRL x0 y [c]) 3908 for { 3909 _ = v.Args[1] 3910 x0 := v.Args[0] 3911 x1 := v.Args[1] 3912 if x1.Op != OpARM64SRLconst { 3913 break 3914 } 3915 c := x1.AuxInt 3916 y := x1.Args[0] 3917 if !(clobberIfDead(x1)) { 3918 break 3919 } 3920 v.reset(OpARM64CMPshiftRL) 3921 v.AuxInt = c 3922 v.AddArg(x0) 3923 v.AddArg(y) 3924 return true 3925 } 3926 // match: (CMP x0:(SRLconst [c] y) x1) 3927 // cond: clobberIfDead(x0) 3928 // result: (InvertFlags (CMPshiftRL x1 y [c])) 3929 for { 3930 _ = v.Args[1] 3931 x0 := v.Args[0] 3932 if x0.Op != OpARM64SRLconst { 3933 break 3934 } 3935 c := x0.AuxInt 3936 y := x0.Args[0] 3937 x1 := v.Args[1] 3938 if !(clobberIfDead(x0)) { 3939 break 3940 } 3941 v.reset(OpARM64InvertFlags) 3942 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags) 3943 v0.AuxInt = c 3944 v0.AddArg(x1) 3945 v0.AddArg(y) 3946 v.AddArg(v0) 3947 return true 3948 } 3949 // match: (CMP x0 x1:(SRAconst [c] y)) 3950 // cond: clobberIfDead(x1) 3951 // result: (CMPshiftRA x0 y [c]) 3952 for { 3953 _ = v.Args[1] 3954 x0 := v.Args[0] 3955 x1 := v.Args[1] 3956 if x1.Op != OpARM64SRAconst { 3957 break 3958 } 3959 c := x1.AuxInt 3960 y := x1.Args[0] 3961 if !(clobberIfDead(x1)) { 3962 break 3963 } 3964 v.reset(OpARM64CMPshiftRA) 3965 v.AuxInt = c 3966 v.AddArg(x0) 3967 v.AddArg(y) 3968 return true 3969 } 3970 // match: (CMP x0:(SRAconst [c] y) x1) 3971 // cond: clobberIfDead(x0) 3972 // result: (InvertFlags (CMPshiftRA x1 y [c])) 3973 for { 3974 _ = v.Args[1] 3975 x0 := v.Args[0] 3976 if x0.Op != OpARM64SRAconst { 3977 break 3978 } 3979 c := x0.AuxInt 3980 y := x0.Args[0] 3981 x1 := v.Args[1] 3982 if !(clobberIfDead(x0)) { 3983 break 3984 } 3985 v.reset(OpARM64InvertFlags) 3986 v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags) 3987 v0.AuxInt = c 3988 v0.AddArg(x1) 3989 v0.AddArg(y) 3990 v.AddArg(v0) 3991 return true 3992 } 3993 return false 3994 } 3995 func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool { 3996 b := v.Block 3997 _ = b 3998 // match: (CMPW x (MOVDconst [c])) 3999 // cond: 4000 // result: (CMPWconst [int64(int32(c))] x) 4001 for { 4002 _ = v.Args[1] 4003 x := v.Args[0] 4004 v_1 := v.Args[1] 4005 if v_1.Op != OpARM64MOVDconst { 4006 break 4007 } 4008 c := v_1.AuxInt 4009 v.reset(OpARM64CMPWconst) 4010 v.AuxInt = int64(int32(c)) 4011 v.AddArg(x) 4012 return true 4013 } 4014 // match: (CMPW (MOVDconst [c]) x) 4015 // cond: 4016 // result: (InvertFlags (CMPWconst [int64(int32(c))] x)) 4017 for { 4018 _ = v.Args[1] 4019 v_0 := v.Args[0] 4020 if v_0.Op != OpARM64MOVDconst { 4021 break 4022 } 4023 c := v_0.AuxInt 4024 x := v.Args[1] 4025 v.reset(OpARM64InvertFlags) 4026 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 4027 v0.AuxInt = int64(int32(c)) 4028 v0.AddArg(x) 4029 v.AddArg(v0) 4030 return true 4031 } 4032 return false 4033 } 4034 func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool { 4035 // match: (CMPWconst (MOVDconst [x]) [y]) 4036 // cond: int32(x)==int32(y) 4037 // result: (FlagEQ) 4038 for { 4039 y := v.AuxInt 4040 v_0 := v.Args[0] 4041 if v_0.Op != OpARM64MOVDconst { 4042 break 4043 } 4044 x := v_0.AuxInt 4045 if !(int32(x) == int32(y)) { 4046 break 4047 } 4048 v.reset(OpARM64FlagEQ) 4049 return true 4050 } 4051 // match: (CMPWconst (MOVDconst [x]) [y]) 4052 // cond: int32(x)<int32(y) && uint32(x)<uint32(y) 4053 // result: (FlagLT_ULT) 4054 for { 4055 y := v.AuxInt 4056 v_0 := v.Args[0] 4057 if v_0.Op != OpARM64MOVDconst { 4058 break 4059 } 4060 x := v_0.AuxInt 4061 if !(int32(x) < int32(y) && uint32(x) < uint32(y)) { 4062 break 4063 } 4064 v.reset(OpARM64FlagLT_ULT) 4065 return true 4066 } 4067 // match: (CMPWconst (MOVDconst [x]) [y]) 4068 // cond: int32(x)<int32(y) && uint32(x)>uint32(y) 4069 // result: (FlagLT_UGT) 4070 for { 4071 y := v.AuxInt 4072 v_0 := v.Args[0] 4073 if v_0.Op != OpARM64MOVDconst { 4074 break 4075 } 4076 x := v_0.AuxInt 4077 if !(int32(x) < int32(y) && uint32(x) > uint32(y)) { 4078 break 4079 } 4080 v.reset(OpARM64FlagLT_UGT) 4081 return true 4082 } 4083 // match: (CMPWconst (MOVDconst [x]) [y]) 4084 // cond: int32(x)>int32(y) && uint32(x)<uint32(y) 4085 // result: (FlagGT_ULT) 4086 for { 4087 y := v.AuxInt 4088 v_0 := v.Args[0] 4089 if v_0.Op != OpARM64MOVDconst { 4090 break 4091 } 4092 x := v_0.AuxInt 4093 if !(int32(x) > int32(y) && uint32(x) < uint32(y)) { 4094 break 4095 } 4096 v.reset(OpARM64FlagGT_ULT) 4097 return true 4098 } 4099 // match: (CMPWconst (MOVDconst [x]) [y]) 4100 // cond: int32(x)>int32(y) && uint32(x)>uint32(y) 4101 // result: (FlagGT_UGT) 4102 for { 4103 y := v.AuxInt 4104 v_0 := v.Args[0] 4105 if v_0.Op != OpARM64MOVDconst { 4106 break 4107 } 4108 x := v_0.AuxInt 4109 if !(int32(x) > int32(y) && uint32(x) > uint32(y)) { 4110 break 4111 } 4112 v.reset(OpARM64FlagGT_UGT) 4113 return true 4114 } 4115 // match: (CMPWconst (MOVBUreg _) [c]) 4116 // cond: 0xff < int32(c) 4117 // result: (FlagLT_ULT) 4118 for { 4119 c := v.AuxInt 4120 v_0 := v.Args[0] 4121 if v_0.Op != OpARM64MOVBUreg { 4122 break 4123 } 4124 if !(0xff < int32(c)) { 4125 break 4126 } 4127 v.reset(OpARM64FlagLT_ULT) 4128 return true 4129 } 4130 // match: (CMPWconst (MOVHUreg _) [c]) 4131 // cond: 0xffff < int32(c) 4132 // result: (FlagLT_ULT) 4133 for { 4134 c := v.AuxInt 4135 v_0 := v.Args[0] 4136 if v_0.Op != OpARM64MOVHUreg { 4137 break 4138 } 4139 if !(0xffff < int32(c)) { 4140 break 4141 } 4142 v.reset(OpARM64FlagLT_ULT) 4143 return true 4144 } 4145 return false 4146 } 4147 func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool { 4148 // match: (CMPconst (MOVDconst [x]) [y]) 4149 // cond: x==y 4150 // result: (FlagEQ) 4151 for { 4152 y := v.AuxInt 4153 v_0 := v.Args[0] 4154 if v_0.Op != OpARM64MOVDconst { 4155 break 4156 } 4157 x := v_0.AuxInt 4158 if !(x == y) { 4159 break 4160 } 4161 v.reset(OpARM64FlagEQ) 4162 return true 4163 } 4164 // match: (CMPconst (MOVDconst [x]) [y]) 4165 // cond: x<y && uint64(x)<uint64(y) 4166 // result: (FlagLT_ULT) 4167 for { 4168 y := v.AuxInt 4169 v_0 := v.Args[0] 4170 if v_0.Op != OpARM64MOVDconst { 4171 break 4172 } 4173 x := v_0.AuxInt 4174 if !(x < y && uint64(x) < uint64(y)) { 4175 break 4176 } 4177 v.reset(OpARM64FlagLT_ULT) 4178 return true 4179 } 4180 // match: (CMPconst (MOVDconst [x]) [y]) 4181 // cond: x<y && uint64(x)>uint64(y) 4182 // result: (FlagLT_UGT) 4183 for { 4184 y := v.AuxInt 4185 v_0 := v.Args[0] 4186 if v_0.Op != OpARM64MOVDconst { 4187 break 4188 } 4189 x := v_0.AuxInt 4190 if !(x < y && uint64(x) > uint64(y)) { 4191 break 4192 } 4193 v.reset(OpARM64FlagLT_UGT) 4194 return true 4195 } 4196 // match: (CMPconst (MOVDconst [x]) [y]) 4197 // cond: x>y && uint64(x)<uint64(y) 4198 // result: (FlagGT_ULT) 4199 for { 4200 y := v.AuxInt 4201 v_0 := v.Args[0] 4202 if v_0.Op != OpARM64MOVDconst { 4203 break 4204 } 4205 x := v_0.AuxInt 4206 if !(x > y && uint64(x) < uint64(y)) { 4207 break 4208 } 4209 v.reset(OpARM64FlagGT_ULT) 4210 return true 4211 } 4212 // match: (CMPconst (MOVDconst [x]) [y]) 4213 // cond: x>y && uint64(x)>uint64(y) 4214 // result: (FlagGT_UGT) 4215 for { 4216 y := v.AuxInt 4217 v_0 := v.Args[0] 4218 if v_0.Op != OpARM64MOVDconst { 4219 break 4220 } 4221 x := v_0.AuxInt 4222 if !(x > y && uint64(x) > uint64(y)) { 4223 break 4224 } 4225 v.reset(OpARM64FlagGT_UGT) 4226 return true 4227 } 4228 // match: (CMPconst (MOVBUreg _) [c]) 4229 // cond: 0xff < c 4230 // result: (FlagLT_ULT) 4231 for { 4232 c := v.AuxInt 4233 v_0 := v.Args[0] 4234 if v_0.Op != OpARM64MOVBUreg { 4235 break 4236 } 4237 if !(0xff < c) { 4238 break 4239 } 4240 v.reset(OpARM64FlagLT_ULT) 4241 return true 4242 } 4243 // match: (CMPconst (MOVHUreg _) [c]) 4244 // cond: 0xffff < c 4245 // result: (FlagLT_ULT) 4246 for { 4247 c := v.AuxInt 4248 v_0 := v.Args[0] 4249 if v_0.Op != OpARM64MOVHUreg { 4250 break 4251 } 4252 if !(0xffff < c) { 4253 break 4254 } 4255 v.reset(OpARM64FlagLT_ULT) 4256 return true 4257 } 4258 // match: (CMPconst (MOVWUreg _) [c]) 4259 // cond: 0xffffffff < c 4260 // result: (FlagLT_ULT) 4261 for { 4262 c := v.AuxInt 4263 v_0 := v.Args[0] 4264 if v_0.Op != OpARM64MOVWUreg { 4265 break 4266 } 4267 if !(0xffffffff < c) { 4268 break 4269 } 4270 v.reset(OpARM64FlagLT_ULT) 4271 return true 4272 } 4273 // match: (CMPconst (ANDconst _ [m]) [n]) 4274 // cond: 0 <= m && m < n 4275 // result: (FlagLT_ULT) 4276 for { 4277 n := v.AuxInt 4278 v_0 := v.Args[0] 4279 if v_0.Op != OpARM64ANDconst { 4280 break 4281 } 4282 m := v_0.AuxInt 4283 if !(0 <= m && m < n) { 4284 break 4285 } 4286 v.reset(OpARM64FlagLT_ULT) 4287 return true 4288 } 4289 // match: (CMPconst (SRLconst _ [c]) [n]) 4290 // cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n) 4291 // result: (FlagLT_ULT) 4292 for { 4293 n := v.AuxInt 4294 v_0 := v.Args[0] 4295 if v_0.Op != OpARM64SRLconst { 4296 break 4297 } 4298 c := v_0.AuxInt 4299 if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) { 4300 break 4301 } 4302 v.reset(OpARM64FlagLT_ULT) 4303 return true 4304 } 4305 return false 4306 } 4307 func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool { 4308 b := v.Block 4309 _ = b 4310 // match: (CMPshiftLL (MOVDconst [c]) x [d]) 4311 // cond: 4312 // result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d]))) 4313 for { 4314 d := v.AuxInt 4315 _ = v.Args[1] 4316 v_0 := v.Args[0] 4317 if v_0.Op != OpARM64MOVDconst { 4318 break 4319 } 4320 c := v_0.AuxInt 4321 x := v.Args[1] 4322 v.reset(OpARM64InvertFlags) 4323 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 4324 v0.AuxInt = c 4325 v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 4326 v1.AuxInt = d 4327 v1.AddArg(x) 4328 v0.AddArg(v1) 4329 v.AddArg(v0) 4330 return true 4331 } 4332 // match: (CMPshiftLL x (MOVDconst [c]) [d]) 4333 // cond: 4334 // result: (CMPconst x [int64(uint64(c)<<uint64(d))]) 4335 for { 4336 d := v.AuxInt 4337 _ = v.Args[1] 4338 x := v.Args[0] 4339 v_1 := v.Args[1] 4340 if v_1.Op != OpARM64MOVDconst { 4341 break 4342 } 4343 c := v_1.AuxInt 4344 v.reset(OpARM64CMPconst) 4345 v.AuxInt = int64(uint64(c) << uint64(d)) 4346 v.AddArg(x) 4347 return true 4348 } 4349 return false 4350 } 4351 func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool { 4352 b := v.Block 4353 _ = b 4354 // match: (CMPshiftRA (MOVDconst [c]) x [d]) 4355 // cond: 4356 // result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d]))) 4357 for { 4358 d := v.AuxInt 4359 _ = v.Args[1] 4360 v_0 := v.Args[0] 4361 if v_0.Op != OpARM64MOVDconst { 4362 break 4363 } 4364 c := v_0.AuxInt 4365 x := v.Args[1] 4366 v.reset(OpARM64InvertFlags) 4367 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 4368 v0.AuxInt = c 4369 v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 4370 v1.AuxInt = d 4371 v1.AddArg(x) 4372 v0.AddArg(v1) 4373 v.AddArg(v0) 4374 return true 4375 } 4376 // match: (CMPshiftRA x (MOVDconst [c]) [d]) 4377 // cond: 4378 // result: (CMPconst x [c>>uint64(d)]) 4379 for { 4380 d := v.AuxInt 4381 _ = v.Args[1] 4382 x := v.Args[0] 4383 v_1 := v.Args[1] 4384 if v_1.Op != OpARM64MOVDconst { 4385 break 4386 } 4387 c := v_1.AuxInt 4388 v.reset(OpARM64CMPconst) 4389 v.AuxInt = c >> uint64(d) 4390 v.AddArg(x) 4391 return true 4392 } 4393 return false 4394 } 4395 func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool { 4396 b := v.Block 4397 _ = b 4398 // match: (CMPshiftRL (MOVDconst [c]) x [d]) 4399 // cond: 4400 // result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d]))) 4401 for { 4402 d := v.AuxInt 4403 _ = v.Args[1] 4404 v_0 := v.Args[0] 4405 if v_0.Op != OpARM64MOVDconst { 4406 break 4407 } 4408 c := v_0.AuxInt 4409 x := v.Args[1] 4410 v.reset(OpARM64InvertFlags) 4411 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 4412 v0.AuxInt = c 4413 v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 4414 v1.AuxInt = d 4415 v1.AddArg(x) 4416 v0.AddArg(v1) 4417 v.AddArg(v0) 4418 return true 4419 } 4420 // match: (CMPshiftRL x (MOVDconst [c]) [d]) 4421 // cond: 4422 // result: (CMPconst x [int64(uint64(c)>>uint64(d))]) 4423 for { 4424 d := v.AuxInt 4425 _ = v.Args[1] 4426 x := v.Args[0] 4427 v_1 := v.Args[1] 4428 if v_1.Op != OpARM64MOVDconst { 4429 break 4430 } 4431 c := v_1.AuxInt 4432 v.reset(OpARM64CMPconst) 4433 v.AuxInt = int64(uint64(c) >> uint64(d)) 4434 v.AddArg(x) 4435 return true 4436 } 4437 return false 4438 } 4439 func rewriteValueARM64_OpARM64CSEL_0(v *Value) bool { 4440 // match: (CSEL {cc} x (MOVDconst [0]) flag) 4441 // cond: 4442 // result: (CSEL0 {cc} x flag) 4443 for { 4444 cc := v.Aux 4445 _ = v.Args[2] 4446 x := v.Args[0] 4447 v_1 := v.Args[1] 4448 if v_1.Op != OpARM64MOVDconst { 4449 break 4450 } 4451 if v_1.AuxInt != 0 { 4452 break 4453 } 4454 flag := v.Args[2] 4455 v.reset(OpARM64CSEL0) 4456 v.Aux = cc 4457 v.AddArg(x) 4458 v.AddArg(flag) 4459 return true 4460 } 4461 // match: (CSEL {cc} (MOVDconst [0]) y flag) 4462 // cond: 4463 // result: (CSEL0 {arm64Negate(cc.(Op))} y flag) 4464 for { 4465 cc := v.Aux 4466 _ = v.Args[2] 4467 v_0 := v.Args[0] 4468 if v_0.Op != OpARM64MOVDconst { 4469 break 4470 } 4471 if v_0.AuxInt != 0 { 4472 break 4473 } 4474 y := v.Args[1] 4475 flag := v.Args[2] 4476 v.reset(OpARM64CSEL0) 4477 v.Aux = arm64Negate(cc.(Op)) 4478 v.AddArg(y) 4479 v.AddArg(flag) 4480 return true 4481 } 4482 // match: (CSEL {cc} x y (InvertFlags cmp)) 4483 // cond: 4484 // result: (CSEL {arm64Invert(cc.(Op))} x y cmp) 4485 for { 4486 cc := v.Aux 4487 _ = v.Args[2] 4488 x := v.Args[0] 4489 y := v.Args[1] 4490 v_2 := v.Args[2] 4491 if v_2.Op != OpARM64InvertFlags { 4492 break 4493 } 4494 cmp := v_2.Args[0] 4495 v.reset(OpARM64CSEL) 4496 v.Aux = arm64Invert(cc.(Op)) 4497 v.AddArg(x) 4498 v.AddArg(y) 4499 v.AddArg(cmp) 4500 return true 4501 } 4502 // match: (CSEL {cc} x _ flag) 4503 // cond: ccARM64Eval(cc, flag) > 0 4504 // result: x 4505 for { 4506 cc := v.Aux 4507 _ = v.Args[2] 4508 x := v.Args[0] 4509 flag := v.Args[2] 4510 if !(ccARM64Eval(cc, flag) > 0) { 4511 break 4512 } 4513 v.reset(OpCopy) 4514 v.Type = x.Type 4515 v.AddArg(x) 4516 return true 4517 } 4518 // match: (CSEL {cc} _ y flag) 4519 // cond: ccARM64Eval(cc, flag) < 0 4520 // result: y 4521 for { 4522 cc := v.Aux 4523 _ = v.Args[2] 4524 y := v.Args[1] 4525 flag := v.Args[2] 4526 if !(ccARM64Eval(cc, flag) < 0) { 4527 break 4528 } 4529 v.reset(OpCopy) 4530 v.Type = y.Type 4531 v.AddArg(y) 4532 return true 4533 } 4534 // match: (CSEL {cc} x y (CMPWconst [0] bool)) 4535 // cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil 4536 // result: (CSEL {bool.Op} x y flagArg(bool)) 4537 for { 4538 cc := v.Aux 4539 _ = v.Args[2] 4540 x := v.Args[0] 4541 y := v.Args[1] 4542 v_2 := v.Args[2] 4543 if v_2.Op != OpARM64CMPWconst { 4544 break 4545 } 4546 if v_2.AuxInt != 0 { 4547 break 4548 } 4549 bool := v_2.Args[0] 4550 if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) { 4551 break 4552 } 4553 v.reset(OpARM64CSEL) 4554 v.Aux = bool.Op 4555 v.AddArg(x) 4556 v.AddArg(y) 4557 v.AddArg(flagArg(bool)) 4558 return true 4559 } 4560 // match: (CSEL {cc} x y (CMPWconst [0] bool)) 4561 // cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil 4562 // result: (CSEL {arm64Negate(bool.Op)} x y flagArg(bool)) 4563 for { 4564 cc := v.Aux 4565 _ = v.Args[2] 4566 x := v.Args[0] 4567 y := v.Args[1] 4568 v_2 := v.Args[2] 4569 if v_2.Op != OpARM64CMPWconst { 4570 break 4571 } 4572 if v_2.AuxInt != 0 { 4573 break 4574 } 4575 bool := v_2.Args[0] 4576 if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) { 4577 break 4578 } 4579 v.reset(OpARM64CSEL) 4580 v.Aux = arm64Negate(bool.Op) 4581 v.AddArg(x) 4582 v.AddArg(y) 4583 v.AddArg(flagArg(bool)) 4584 return true 4585 } 4586 return false 4587 } 4588 func rewriteValueARM64_OpARM64CSEL0_0(v *Value) bool { 4589 // match: (CSEL0 {cc} x (InvertFlags cmp)) 4590 // cond: 4591 // result: (CSEL0 {arm64Invert(cc.(Op))} x cmp) 4592 for { 4593 cc := v.Aux 4594 _ = v.Args[1] 4595 x := v.Args[0] 4596 v_1 := v.Args[1] 4597 if v_1.Op != OpARM64InvertFlags { 4598 break 4599 } 4600 cmp := v_1.Args[0] 4601 v.reset(OpARM64CSEL0) 4602 v.Aux = arm64Invert(cc.(Op)) 4603 v.AddArg(x) 4604 v.AddArg(cmp) 4605 return true 4606 } 4607 // match: (CSEL0 {cc} x flag) 4608 // cond: ccARM64Eval(cc, flag) > 0 4609 // result: x 4610 for { 4611 cc := v.Aux 4612 _ = v.Args[1] 4613 x := v.Args[0] 4614 flag := v.Args[1] 4615 if !(ccARM64Eval(cc, flag) > 0) { 4616 break 4617 } 4618 v.reset(OpCopy) 4619 v.Type = x.Type 4620 v.AddArg(x) 4621 return true 4622 } 4623 // match: (CSEL0 {cc} _ flag) 4624 // cond: ccARM64Eval(cc, flag) < 0 4625 // result: (MOVDconst [0]) 4626 for { 4627 cc := v.Aux 4628 _ = v.Args[1] 4629 flag := v.Args[1] 4630 if !(ccARM64Eval(cc, flag) < 0) { 4631 break 4632 } 4633 v.reset(OpARM64MOVDconst) 4634 v.AuxInt = 0 4635 return true 4636 } 4637 // match: (CSEL0 {cc} x (CMPWconst [0] bool)) 4638 // cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil 4639 // result: (CSEL0 {bool.Op} x flagArg(bool)) 4640 for { 4641 cc := v.Aux 4642 _ = v.Args[1] 4643 x := v.Args[0] 4644 v_1 := v.Args[1] 4645 if v_1.Op != OpARM64CMPWconst { 4646 break 4647 } 4648 if v_1.AuxInt != 0 { 4649 break 4650 } 4651 bool := v_1.Args[0] 4652 if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) { 4653 break 4654 } 4655 v.reset(OpARM64CSEL0) 4656 v.Aux = bool.Op 4657 v.AddArg(x) 4658 v.AddArg(flagArg(bool)) 4659 return true 4660 } 4661 // match: (CSEL0 {cc} x (CMPWconst [0] bool)) 4662 // cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil 4663 // result: (CSEL0 {arm64Negate(bool.Op)} x flagArg(bool)) 4664 for { 4665 cc := v.Aux 4666 _ = v.Args[1] 4667 x := v.Args[0] 4668 v_1 := v.Args[1] 4669 if v_1.Op != OpARM64CMPWconst { 4670 break 4671 } 4672 if v_1.AuxInt != 0 { 4673 break 4674 } 4675 bool := v_1.Args[0] 4676 if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) { 4677 break 4678 } 4679 v.reset(OpARM64CSEL0) 4680 v.Aux = arm64Negate(bool.Op) 4681 v.AddArg(x) 4682 v.AddArg(flagArg(bool)) 4683 return true 4684 } 4685 return false 4686 } 4687 func rewriteValueARM64_OpARM64DIV_0(v *Value) bool { 4688 // match: (DIV (MOVDconst [c]) (MOVDconst [d])) 4689 // cond: 4690 // result: (MOVDconst [c/d]) 4691 for { 4692 _ = v.Args[1] 4693 v_0 := v.Args[0] 4694 if v_0.Op != OpARM64MOVDconst { 4695 break 4696 } 4697 c := v_0.AuxInt 4698 v_1 := v.Args[1] 4699 if v_1.Op != OpARM64MOVDconst { 4700 break 4701 } 4702 d := v_1.AuxInt 4703 v.reset(OpARM64MOVDconst) 4704 v.AuxInt = c / d 4705 return true 4706 } 4707 return false 4708 } 4709 func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool { 4710 // match: (DIVW (MOVDconst [c]) (MOVDconst [d])) 4711 // cond: 4712 // result: (MOVDconst [int64(int32(c)/int32(d))]) 4713 for { 4714 _ = v.Args[1] 4715 v_0 := v.Args[0] 4716 if v_0.Op != OpARM64MOVDconst { 4717 break 4718 } 4719 c := v_0.AuxInt 4720 v_1 := v.Args[1] 4721 if v_1.Op != OpARM64MOVDconst { 4722 break 4723 } 4724 d := v_1.AuxInt 4725 v.reset(OpARM64MOVDconst) 4726 v.AuxInt = int64(int32(c) / int32(d)) 4727 return true 4728 } 4729 return false 4730 } 4731 func rewriteValueARM64_OpARM64EON_0(v *Value) bool { 4732 // match: (EON x (MOVDconst [c])) 4733 // cond: 4734 // result: (XORconst [^c] x) 4735 for { 4736 _ = v.Args[1] 4737 x := v.Args[0] 4738 v_1 := v.Args[1] 4739 if v_1.Op != OpARM64MOVDconst { 4740 break 4741 } 4742 c := v_1.AuxInt 4743 v.reset(OpARM64XORconst) 4744 v.AuxInt = ^c 4745 v.AddArg(x) 4746 return true 4747 } 4748 // match: (EON x x) 4749 // cond: 4750 // result: (MOVDconst [-1]) 4751 for { 4752 _ = v.Args[1] 4753 x := v.Args[0] 4754 if x != v.Args[1] { 4755 break 4756 } 4757 v.reset(OpARM64MOVDconst) 4758 v.AuxInt = -1 4759 return true 4760 } 4761 // match: (EON x0 x1:(SLLconst [c] y)) 4762 // cond: clobberIfDead(x1) 4763 // result: (EONshiftLL x0 y [c]) 4764 for { 4765 _ = v.Args[1] 4766 x0 := v.Args[0] 4767 x1 := v.Args[1] 4768 if x1.Op != OpARM64SLLconst { 4769 break 4770 } 4771 c := x1.AuxInt 4772 y := x1.Args[0] 4773 if !(clobberIfDead(x1)) { 4774 break 4775 } 4776 v.reset(OpARM64EONshiftLL) 4777 v.AuxInt = c 4778 v.AddArg(x0) 4779 v.AddArg(y) 4780 return true 4781 } 4782 // match: (EON x0 x1:(SRLconst [c] y)) 4783 // cond: clobberIfDead(x1) 4784 // result: (EONshiftRL x0 y [c]) 4785 for { 4786 _ = v.Args[1] 4787 x0 := v.Args[0] 4788 x1 := v.Args[1] 4789 if x1.Op != OpARM64SRLconst { 4790 break 4791 } 4792 c := x1.AuxInt 4793 y := x1.Args[0] 4794 if !(clobberIfDead(x1)) { 4795 break 4796 } 4797 v.reset(OpARM64EONshiftRL) 4798 v.AuxInt = c 4799 v.AddArg(x0) 4800 v.AddArg(y) 4801 return true 4802 } 4803 // match: (EON x0 x1:(SRAconst [c] y)) 4804 // cond: clobberIfDead(x1) 4805 // result: (EONshiftRA x0 y [c]) 4806 for { 4807 _ = v.Args[1] 4808 x0 := v.Args[0] 4809 x1 := v.Args[1] 4810 if x1.Op != OpARM64SRAconst { 4811 break 4812 } 4813 c := x1.AuxInt 4814 y := x1.Args[0] 4815 if !(clobberIfDead(x1)) { 4816 break 4817 } 4818 v.reset(OpARM64EONshiftRA) 4819 v.AuxInt = c 4820 v.AddArg(x0) 4821 v.AddArg(y) 4822 return true 4823 } 4824 return false 4825 } 4826 func rewriteValueARM64_OpARM64EONshiftLL_0(v *Value) bool { 4827 // match: (EONshiftLL x (MOVDconst [c]) [d]) 4828 // cond: 4829 // result: (XORconst x [^int64(uint64(c)<<uint64(d))]) 4830 for { 4831 d := v.AuxInt 4832 _ = v.Args[1] 4833 x := v.Args[0] 4834 v_1 := v.Args[1] 4835 if v_1.Op != OpARM64MOVDconst { 4836 break 4837 } 4838 c := v_1.AuxInt 4839 v.reset(OpARM64XORconst) 4840 v.AuxInt = ^int64(uint64(c) << uint64(d)) 4841 v.AddArg(x) 4842 return true 4843 } 4844 // match: (EONshiftLL x (SLLconst x [c]) [d]) 4845 // cond: c==d 4846 // result: (MOVDconst [-1]) 4847 for { 4848 d := v.AuxInt 4849 _ = v.Args[1] 4850 x := v.Args[0] 4851 v_1 := v.Args[1] 4852 if v_1.Op != OpARM64SLLconst { 4853 break 4854 } 4855 c := v_1.AuxInt 4856 if x != v_1.Args[0] { 4857 break 4858 } 4859 if !(c == d) { 4860 break 4861 } 4862 v.reset(OpARM64MOVDconst) 4863 v.AuxInt = -1 4864 return true 4865 } 4866 return false 4867 } 4868 func rewriteValueARM64_OpARM64EONshiftRA_0(v *Value) bool { 4869 // match: (EONshiftRA x (MOVDconst [c]) [d]) 4870 // cond: 4871 // result: (XORconst x [^(c>>uint64(d))]) 4872 for { 4873 d := v.AuxInt 4874 _ = v.Args[1] 4875 x := v.Args[0] 4876 v_1 := v.Args[1] 4877 if v_1.Op != OpARM64MOVDconst { 4878 break 4879 } 4880 c := v_1.AuxInt 4881 v.reset(OpARM64XORconst) 4882 v.AuxInt = ^(c >> uint64(d)) 4883 v.AddArg(x) 4884 return true 4885 } 4886 // match: (EONshiftRA x (SRAconst x [c]) [d]) 4887 // cond: c==d 4888 // result: (MOVDconst [-1]) 4889 for { 4890 d := v.AuxInt 4891 _ = v.Args[1] 4892 x := v.Args[0] 4893 v_1 := v.Args[1] 4894 if v_1.Op != OpARM64SRAconst { 4895 break 4896 } 4897 c := v_1.AuxInt 4898 if x != v_1.Args[0] { 4899 break 4900 } 4901 if !(c == d) { 4902 break 4903 } 4904 v.reset(OpARM64MOVDconst) 4905 v.AuxInt = -1 4906 return true 4907 } 4908 return false 4909 } 4910 func rewriteValueARM64_OpARM64EONshiftRL_0(v *Value) bool { 4911 // match: (EONshiftRL x (MOVDconst [c]) [d]) 4912 // cond: 4913 // result: (XORconst x [^int64(uint64(c)>>uint64(d))]) 4914 for { 4915 d := v.AuxInt 4916 _ = v.Args[1] 4917 x := v.Args[0] 4918 v_1 := v.Args[1] 4919 if v_1.Op != OpARM64MOVDconst { 4920 break 4921 } 4922 c := v_1.AuxInt 4923 v.reset(OpARM64XORconst) 4924 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 4925 v.AddArg(x) 4926 return true 4927 } 4928 // match: (EONshiftRL x (SRLconst x [c]) [d]) 4929 // cond: c==d 4930 // result: (MOVDconst [-1]) 4931 for { 4932 d := v.AuxInt 4933 _ = v.Args[1] 4934 x := v.Args[0] 4935 v_1 := v.Args[1] 4936 if v_1.Op != OpARM64SRLconst { 4937 break 4938 } 4939 c := v_1.AuxInt 4940 if x != v_1.Args[0] { 4941 break 4942 } 4943 if !(c == d) { 4944 break 4945 } 4946 v.reset(OpARM64MOVDconst) 4947 v.AuxInt = -1 4948 return true 4949 } 4950 return false 4951 } 4952 func rewriteValueARM64_OpARM64Equal_0(v *Value) bool { 4953 // match: (Equal (FlagEQ)) 4954 // cond: 4955 // result: (MOVDconst [1]) 4956 for { 4957 v_0 := v.Args[0] 4958 if v_0.Op != OpARM64FlagEQ { 4959 break 4960 } 4961 v.reset(OpARM64MOVDconst) 4962 v.AuxInt = 1 4963 return true 4964 } 4965 // match: (Equal (FlagLT_ULT)) 4966 // cond: 4967 // result: (MOVDconst [0]) 4968 for { 4969 v_0 := v.Args[0] 4970 if v_0.Op != OpARM64FlagLT_ULT { 4971 break 4972 } 4973 v.reset(OpARM64MOVDconst) 4974 v.AuxInt = 0 4975 return true 4976 } 4977 // match: (Equal (FlagLT_UGT)) 4978 // cond: 4979 // result: (MOVDconst [0]) 4980 for { 4981 v_0 := v.Args[0] 4982 if v_0.Op != OpARM64FlagLT_UGT { 4983 break 4984 } 4985 v.reset(OpARM64MOVDconst) 4986 v.AuxInt = 0 4987 return true 4988 } 4989 // match: (Equal (FlagGT_ULT)) 4990 // cond: 4991 // result: (MOVDconst [0]) 4992 for { 4993 v_0 := v.Args[0] 4994 if v_0.Op != OpARM64FlagGT_ULT { 4995 break 4996 } 4997 v.reset(OpARM64MOVDconst) 4998 v.AuxInt = 0 4999 return true 5000 } 5001 // match: (Equal (FlagGT_UGT)) 5002 // cond: 5003 // result: (MOVDconst [0]) 5004 for { 5005 v_0 := v.Args[0] 5006 if v_0.Op != OpARM64FlagGT_UGT { 5007 break 5008 } 5009 v.reset(OpARM64MOVDconst) 5010 v.AuxInt = 0 5011 return true 5012 } 5013 // match: (Equal (InvertFlags x)) 5014 // cond: 5015 // result: (Equal x) 5016 for { 5017 v_0 := v.Args[0] 5018 if v_0.Op != OpARM64InvertFlags { 5019 break 5020 } 5021 x := v_0.Args[0] 5022 v.reset(OpARM64Equal) 5023 v.AddArg(x) 5024 return true 5025 } 5026 return false 5027 } 5028 func rewriteValueARM64_OpARM64FADDD_0(v *Value) bool { 5029 // match: (FADDD a (FMULD x y)) 5030 // cond: 5031 // result: (FMADDD a x y) 5032 for { 5033 _ = v.Args[1] 5034 a := v.Args[0] 5035 v_1 := v.Args[1] 5036 if v_1.Op != OpARM64FMULD { 5037 break 5038 } 5039 _ = v_1.Args[1] 5040 x := v_1.Args[0] 5041 y := v_1.Args[1] 5042 v.reset(OpARM64FMADDD) 5043 v.AddArg(a) 5044 v.AddArg(x) 5045 v.AddArg(y) 5046 return true 5047 } 5048 // match: (FADDD (FMULD x y) a) 5049 // cond: 5050 // result: (FMADDD a x y) 5051 for { 5052 _ = v.Args[1] 5053 v_0 := v.Args[0] 5054 if v_0.Op != OpARM64FMULD { 5055 break 5056 } 5057 _ = v_0.Args[1] 5058 x := v_0.Args[0] 5059 y := v_0.Args[1] 5060 a := v.Args[1] 5061 v.reset(OpARM64FMADDD) 5062 v.AddArg(a) 5063 v.AddArg(x) 5064 v.AddArg(y) 5065 return true 5066 } 5067 // match: (FADDD a (FNMULD x y)) 5068 // cond: 5069 // result: (FMSUBD a x y) 5070 for { 5071 _ = v.Args[1] 5072 a := v.Args[0] 5073 v_1 := v.Args[1] 5074 if v_1.Op != OpARM64FNMULD { 5075 break 5076 } 5077 _ = v_1.Args[1] 5078 x := v_1.Args[0] 5079 y := v_1.Args[1] 5080 v.reset(OpARM64FMSUBD) 5081 v.AddArg(a) 5082 v.AddArg(x) 5083 v.AddArg(y) 5084 return true 5085 } 5086 // match: (FADDD (FNMULD x y) a) 5087 // cond: 5088 // result: (FMSUBD a x y) 5089 for { 5090 _ = v.Args[1] 5091 v_0 := v.Args[0] 5092 if v_0.Op != OpARM64FNMULD { 5093 break 5094 } 5095 _ = v_0.Args[1] 5096 x := v_0.Args[0] 5097 y := v_0.Args[1] 5098 a := v.Args[1] 5099 v.reset(OpARM64FMSUBD) 5100 v.AddArg(a) 5101 v.AddArg(x) 5102 v.AddArg(y) 5103 return true 5104 } 5105 return false 5106 } 5107 func rewriteValueARM64_OpARM64FADDS_0(v *Value) bool { 5108 // match: (FADDS a (FMULS x y)) 5109 // cond: 5110 // result: (FMADDS a x y) 5111 for { 5112 _ = v.Args[1] 5113 a := v.Args[0] 5114 v_1 := v.Args[1] 5115 if v_1.Op != OpARM64FMULS { 5116 break 5117 } 5118 _ = v_1.Args[1] 5119 x := v_1.Args[0] 5120 y := v_1.Args[1] 5121 v.reset(OpARM64FMADDS) 5122 v.AddArg(a) 5123 v.AddArg(x) 5124 v.AddArg(y) 5125 return true 5126 } 5127 // match: (FADDS (FMULS x y) a) 5128 // cond: 5129 // result: (FMADDS a x y) 5130 for { 5131 _ = v.Args[1] 5132 v_0 := v.Args[0] 5133 if v_0.Op != OpARM64FMULS { 5134 break 5135 } 5136 _ = v_0.Args[1] 5137 x := v_0.Args[0] 5138 y := v_0.Args[1] 5139 a := v.Args[1] 5140 v.reset(OpARM64FMADDS) 5141 v.AddArg(a) 5142 v.AddArg(x) 5143 v.AddArg(y) 5144 return true 5145 } 5146 // match: (FADDS a (FNMULS x y)) 5147 // cond: 5148 // result: (FMSUBS a x y) 5149 for { 5150 _ = v.Args[1] 5151 a := v.Args[0] 5152 v_1 := v.Args[1] 5153 if v_1.Op != OpARM64FNMULS { 5154 break 5155 } 5156 _ = v_1.Args[1] 5157 x := v_1.Args[0] 5158 y := v_1.Args[1] 5159 v.reset(OpARM64FMSUBS) 5160 v.AddArg(a) 5161 v.AddArg(x) 5162 v.AddArg(y) 5163 return true 5164 } 5165 // match: (FADDS (FNMULS x y) a) 5166 // cond: 5167 // result: (FMSUBS a x y) 5168 for { 5169 _ = v.Args[1] 5170 v_0 := v.Args[0] 5171 if v_0.Op != OpARM64FNMULS { 5172 break 5173 } 5174 _ = v_0.Args[1] 5175 x := v_0.Args[0] 5176 y := v_0.Args[1] 5177 a := v.Args[1] 5178 v.reset(OpARM64FMSUBS) 5179 v.AddArg(a) 5180 v.AddArg(x) 5181 v.AddArg(y) 5182 return true 5183 } 5184 return false 5185 } 5186 func rewriteValueARM64_OpARM64FMOVDfpgp_0(v *Value) bool { 5187 b := v.Block 5188 _ = b 5189 // match: (FMOVDfpgp <t> (Arg [off] {sym})) 5190 // cond: 5191 // result: @b.Func.Entry (Arg <t> [off] {sym}) 5192 for { 5193 t := v.Type 5194 v_0 := v.Args[0] 5195 if v_0.Op != OpArg { 5196 break 5197 } 5198 off := v_0.AuxInt 5199 sym := v_0.Aux 5200 b = b.Func.Entry 5201 v0 := b.NewValue0(v.Pos, OpArg, t) 5202 v.reset(OpCopy) 5203 v.AddArg(v0) 5204 v0.AuxInt = off 5205 v0.Aux = sym 5206 return true 5207 } 5208 return false 5209 } 5210 func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool { 5211 b := v.Block 5212 _ = b 5213 // match: (FMOVDgpfp <t> (Arg [off] {sym})) 5214 // cond: 5215 // result: @b.Func.Entry (Arg <t> [off] {sym}) 5216 for { 5217 t := v.Type 5218 v_0 := v.Args[0] 5219 if v_0.Op != OpArg { 5220 break 5221 } 5222 off := v_0.AuxInt 5223 sym := v_0.Aux 5224 b = b.Func.Entry 5225 v0 := b.NewValue0(v.Pos, OpArg, t) 5226 v.reset(OpCopy) 5227 v.AddArg(v0) 5228 v0.AuxInt = off 5229 v0.Aux = sym 5230 return true 5231 } 5232 return false 5233 } 5234 func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool { 5235 b := v.Block 5236 _ = b 5237 config := b.Func.Config 5238 _ = config 5239 // match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _)) 5240 // cond: 5241 // result: (FMOVDgpfp val) 5242 for { 5243 off := v.AuxInt 5244 sym := v.Aux 5245 _ = v.Args[1] 5246 ptr := v.Args[0] 5247 v_1 := v.Args[1] 5248 if v_1.Op != OpARM64MOVDstore { 5249 break 5250 } 5251 if v_1.AuxInt != off { 5252 break 5253 } 5254 if v_1.Aux != sym { 5255 break 5256 } 5257 _ = v_1.Args[2] 5258 if ptr != v_1.Args[0] { 5259 break 5260 } 5261 val := v_1.Args[1] 5262 v.reset(OpARM64FMOVDgpfp) 5263 v.AddArg(val) 5264 return true 5265 } 5266 // match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 5267 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5268 // result: (FMOVDload [off1+off2] {sym} ptr mem) 5269 for { 5270 off1 := v.AuxInt 5271 sym := v.Aux 5272 _ = v.Args[1] 5273 v_0 := v.Args[0] 5274 if v_0.Op != OpARM64ADDconst { 5275 break 5276 } 5277 off2 := v_0.AuxInt 5278 ptr := v_0.Args[0] 5279 mem := v.Args[1] 5280 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5281 break 5282 } 5283 v.reset(OpARM64FMOVDload) 5284 v.AuxInt = off1 + off2 5285 v.Aux = sym 5286 v.AddArg(ptr) 5287 v.AddArg(mem) 5288 return true 5289 } 5290 // match: (FMOVDload [off] {sym} (ADD ptr idx) mem) 5291 // cond: off == 0 && sym == nil 5292 // result: (FMOVDloadidx ptr idx mem) 5293 for { 5294 off := v.AuxInt 5295 sym := v.Aux 5296 _ = v.Args[1] 5297 v_0 := v.Args[0] 5298 if v_0.Op != OpARM64ADD { 5299 break 5300 } 5301 _ = v_0.Args[1] 5302 ptr := v_0.Args[0] 5303 idx := v_0.Args[1] 5304 mem := v.Args[1] 5305 if !(off == 0 && sym == nil) { 5306 break 5307 } 5308 v.reset(OpARM64FMOVDloadidx) 5309 v.AddArg(ptr) 5310 v.AddArg(idx) 5311 v.AddArg(mem) 5312 return true 5313 } 5314 // match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5315 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5316 // result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5317 for { 5318 off1 := v.AuxInt 5319 sym1 := v.Aux 5320 _ = v.Args[1] 5321 v_0 := v.Args[0] 5322 if v_0.Op != OpARM64MOVDaddr { 5323 break 5324 } 5325 off2 := v_0.AuxInt 5326 sym2 := v_0.Aux 5327 ptr := v_0.Args[0] 5328 mem := v.Args[1] 5329 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5330 break 5331 } 5332 v.reset(OpARM64FMOVDload) 5333 v.AuxInt = off1 + off2 5334 v.Aux = mergeSym(sym1, sym2) 5335 v.AddArg(ptr) 5336 v.AddArg(mem) 5337 return true 5338 } 5339 return false 5340 } 5341 func rewriteValueARM64_OpARM64FMOVDloadidx_0(v *Value) bool { 5342 // match: (FMOVDloadidx ptr (MOVDconst [c]) mem) 5343 // cond: 5344 // result: (FMOVDload [c] ptr mem) 5345 for { 5346 _ = v.Args[2] 5347 ptr := v.Args[0] 5348 v_1 := v.Args[1] 5349 if v_1.Op != OpARM64MOVDconst { 5350 break 5351 } 5352 c := v_1.AuxInt 5353 mem := v.Args[2] 5354 v.reset(OpARM64FMOVDload) 5355 v.AuxInt = c 5356 v.AddArg(ptr) 5357 v.AddArg(mem) 5358 return true 5359 } 5360 // match: (FMOVDloadidx (MOVDconst [c]) ptr mem) 5361 // cond: 5362 // result: (FMOVDload [c] ptr mem) 5363 for { 5364 _ = v.Args[2] 5365 v_0 := v.Args[0] 5366 if v_0.Op != OpARM64MOVDconst { 5367 break 5368 } 5369 c := v_0.AuxInt 5370 ptr := v.Args[1] 5371 mem := v.Args[2] 5372 v.reset(OpARM64FMOVDload) 5373 v.AuxInt = c 5374 v.AddArg(ptr) 5375 v.AddArg(mem) 5376 return true 5377 } 5378 return false 5379 } 5380 func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool { 5381 b := v.Block 5382 _ = b 5383 config := b.Func.Config 5384 _ = config 5385 // match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem) 5386 // cond: 5387 // result: (MOVDstore [off] {sym} ptr val mem) 5388 for { 5389 off := v.AuxInt 5390 sym := v.Aux 5391 _ = v.Args[2] 5392 ptr := v.Args[0] 5393 v_1 := v.Args[1] 5394 if v_1.Op != OpARM64FMOVDgpfp { 5395 break 5396 } 5397 val := v_1.Args[0] 5398 mem := v.Args[2] 5399 v.reset(OpARM64MOVDstore) 5400 v.AuxInt = off 5401 v.Aux = sym 5402 v.AddArg(ptr) 5403 v.AddArg(val) 5404 v.AddArg(mem) 5405 return true 5406 } 5407 // match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5408 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5409 // result: (FMOVDstore [off1+off2] {sym} ptr val mem) 5410 for { 5411 off1 := v.AuxInt 5412 sym := v.Aux 5413 _ = v.Args[2] 5414 v_0 := v.Args[0] 5415 if v_0.Op != OpARM64ADDconst { 5416 break 5417 } 5418 off2 := v_0.AuxInt 5419 ptr := v_0.Args[0] 5420 val := v.Args[1] 5421 mem := v.Args[2] 5422 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5423 break 5424 } 5425 v.reset(OpARM64FMOVDstore) 5426 v.AuxInt = off1 + off2 5427 v.Aux = sym 5428 v.AddArg(ptr) 5429 v.AddArg(val) 5430 v.AddArg(mem) 5431 return true 5432 } 5433 // match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem) 5434 // cond: off == 0 && sym == nil 5435 // result: (FMOVDstoreidx ptr idx val mem) 5436 for { 5437 off := v.AuxInt 5438 sym := v.Aux 5439 _ = v.Args[2] 5440 v_0 := v.Args[0] 5441 if v_0.Op != OpARM64ADD { 5442 break 5443 } 5444 _ = v_0.Args[1] 5445 ptr := v_0.Args[0] 5446 idx := v_0.Args[1] 5447 val := v.Args[1] 5448 mem := v.Args[2] 5449 if !(off == 0 && sym == nil) { 5450 break 5451 } 5452 v.reset(OpARM64FMOVDstoreidx) 5453 v.AddArg(ptr) 5454 v.AddArg(idx) 5455 v.AddArg(val) 5456 v.AddArg(mem) 5457 return true 5458 } 5459 // match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5460 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5461 // result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5462 for { 5463 off1 := v.AuxInt 5464 sym1 := v.Aux 5465 _ = v.Args[2] 5466 v_0 := v.Args[0] 5467 if v_0.Op != OpARM64MOVDaddr { 5468 break 5469 } 5470 off2 := v_0.AuxInt 5471 sym2 := v_0.Aux 5472 ptr := v_0.Args[0] 5473 val := v.Args[1] 5474 mem := v.Args[2] 5475 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5476 break 5477 } 5478 v.reset(OpARM64FMOVDstore) 5479 v.AuxInt = off1 + off2 5480 v.Aux = mergeSym(sym1, sym2) 5481 v.AddArg(ptr) 5482 v.AddArg(val) 5483 v.AddArg(mem) 5484 return true 5485 } 5486 return false 5487 } 5488 func rewriteValueARM64_OpARM64FMOVDstoreidx_0(v *Value) bool { 5489 // match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem) 5490 // cond: 5491 // result: (FMOVDstore [c] ptr val mem) 5492 for { 5493 _ = v.Args[3] 5494 ptr := v.Args[0] 5495 v_1 := v.Args[1] 5496 if v_1.Op != OpARM64MOVDconst { 5497 break 5498 } 5499 c := v_1.AuxInt 5500 val := v.Args[2] 5501 mem := v.Args[3] 5502 v.reset(OpARM64FMOVDstore) 5503 v.AuxInt = c 5504 v.AddArg(ptr) 5505 v.AddArg(val) 5506 v.AddArg(mem) 5507 return true 5508 } 5509 // match: (FMOVDstoreidx (MOVDconst [c]) idx val mem) 5510 // cond: 5511 // result: (FMOVDstore [c] idx val mem) 5512 for { 5513 _ = v.Args[3] 5514 v_0 := v.Args[0] 5515 if v_0.Op != OpARM64MOVDconst { 5516 break 5517 } 5518 c := v_0.AuxInt 5519 idx := v.Args[1] 5520 val := v.Args[2] 5521 mem := v.Args[3] 5522 v.reset(OpARM64FMOVDstore) 5523 v.AuxInt = c 5524 v.AddArg(idx) 5525 v.AddArg(val) 5526 v.AddArg(mem) 5527 return true 5528 } 5529 return false 5530 } 5531 func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool { 5532 b := v.Block 5533 _ = b 5534 config := b.Func.Config 5535 _ = config 5536 // match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _)) 5537 // cond: 5538 // result: (FMOVSgpfp val) 5539 for { 5540 off := v.AuxInt 5541 sym := v.Aux 5542 _ = v.Args[1] 5543 ptr := v.Args[0] 5544 v_1 := v.Args[1] 5545 if v_1.Op != OpARM64MOVWstore { 5546 break 5547 } 5548 if v_1.AuxInt != off { 5549 break 5550 } 5551 if v_1.Aux != sym { 5552 break 5553 } 5554 _ = v_1.Args[2] 5555 if ptr != v_1.Args[0] { 5556 break 5557 } 5558 val := v_1.Args[1] 5559 v.reset(OpARM64FMOVSgpfp) 5560 v.AddArg(val) 5561 return true 5562 } 5563 // match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem) 5564 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5565 // result: (FMOVSload [off1+off2] {sym} ptr mem) 5566 for { 5567 off1 := v.AuxInt 5568 sym := v.Aux 5569 _ = v.Args[1] 5570 v_0 := v.Args[0] 5571 if v_0.Op != OpARM64ADDconst { 5572 break 5573 } 5574 off2 := v_0.AuxInt 5575 ptr := v_0.Args[0] 5576 mem := v.Args[1] 5577 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5578 break 5579 } 5580 v.reset(OpARM64FMOVSload) 5581 v.AuxInt = off1 + off2 5582 v.Aux = sym 5583 v.AddArg(ptr) 5584 v.AddArg(mem) 5585 return true 5586 } 5587 // match: (FMOVSload [off] {sym} (ADD ptr idx) mem) 5588 // cond: off == 0 && sym == nil 5589 // result: (FMOVSloadidx ptr idx mem) 5590 for { 5591 off := v.AuxInt 5592 sym := v.Aux 5593 _ = v.Args[1] 5594 v_0 := v.Args[0] 5595 if v_0.Op != OpARM64ADD { 5596 break 5597 } 5598 _ = v_0.Args[1] 5599 ptr := v_0.Args[0] 5600 idx := v_0.Args[1] 5601 mem := v.Args[1] 5602 if !(off == 0 && sym == nil) { 5603 break 5604 } 5605 v.reset(OpARM64FMOVSloadidx) 5606 v.AddArg(ptr) 5607 v.AddArg(idx) 5608 v.AddArg(mem) 5609 return true 5610 } 5611 // match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 5612 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5613 // result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5614 for { 5615 off1 := v.AuxInt 5616 sym1 := v.Aux 5617 _ = v.Args[1] 5618 v_0 := v.Args[0] 5619 if v_0.Op != OpARM64MOVDaddr { 5620 break 5621 } 5622 off2 := v_0.AuxInt 5623 sym2 := v_0.Aux 5624 ptr := v_0.Args[0] 5625 mem := v.Args[1] 5626 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5627 break 5628 } 5629 v.reset(OpARM64FMOVSload) 5630 v.AuxInt = off1 + off2 5631 v.Aux = mergeSym(sym1, sym2) 5632 v.AddArg(ptr) 5633 v.AddArg(mem) 5634 return true 5635 } 5636 return false 5637 } 5638 func rewriteValueARM64_OpARM64FMOVSloadidx_0(v *Value) bool { 5639 // match: (FMOVSloadidx ptr (MOVDconst [c]) mem) 5640 // cond: 5641 // result: (FMOVSload [c] ptr mem) 5642 for { 5643 _ = v.Args[2] 5644 ptr := v.Args[0] 5645 v_1 := v.Args[1] 5646 if v_1.Op != OpARM64MOVDconst { 5647 break 5648 } 5649 c := v_1.AuxInt 5650 mem := v.Args[2] 5651 v.reset(OpARM64FMOVSload) 5652 v.AuxInt = c 5653 v.AddArg(ptr) 5654 v.AddArg(mem) 5655 return true 5656 } 5657 // match: (FMOVSloadidx (MOVDconst [c]) ptr mem) 5658 // cond: 5659 // result: (FMOVSload [c] ptr mem) 5660 for { 5661 _ = v.Args[2] 5662 v_0 := v.Args[0] 5663 if v_0.Op != OpARM64MOVDconst { 5664 break 5665 } 5666 c := v_0.AuxInt 5667 ptr := v.Args[1] 5668 mem := v.Args[2] 5669 v.reset(OpARM64FMOVSload) 5670 v.AuxInt = c 5671 v.AddArg(ptr) 5672 v.AddArg(mem) 5673 return true 5674 } 5675 return false 5676 } 5677 func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool { 5678 b := v.Block 5679 _ = b 5680 config := b.Func.Config 5681 _ = config 5682 // match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem) 5683 // cond: 5684 // result: (MOVWstore [off] {sym} ptr val mem) 5685 for { 5686 off := v.AuxInt 5687 sym := v.Aux 5688 _ = v.Args[2] 5689 ptr := v.Args[0] 5690 v_1 := v.Args[1] 5691 if v_1.Op != OpARM64FMOVSgpfp { 5692 break 5693 } 5694 val := v_1.Args[0] 5695 mem := v.Args[2] 5696 v.reset(OpARM64MOVWstore) 5697 v.AuxInt = off 5698 v.Aux = sym 5699 v.AddArg(ptr) 5700 v.AddArg(val) 5701 v.AddArg(mem) 5702 return true 5703 } 5704 // match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem) 5705 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5706 // result: (FMOVSstore [off1+off2] {sym} ptr val mem) 5707 for { 5708 off1 := v.AuxInt 5709 sym := v.Aux 5710 _ = v.Args[2] 5711 v_0 := v.Args[0] 5712 if v_0.Op != OpARM64ADDconst { 5713 break 5714 } 5715 off2 := v_0.AuxInt 5716 ptr := v_0.Args[0] 5717 val := v.Args[1] 5718 mem := v.Args[2] 5719 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5720 break 5721 } 5722 v.reset(OpARM64FMOVSstore) 5723 v.AuxInt = off1 + off2 5724 v.Aux = sym 5725 v.AddArg(ptr) 5726 v.AddArg(val) 5727 v.AddArg(mem) 5728 return true 5729 } 5730 // match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem) 5731 // cond: off == 0 && sym == nil 5732 // result: (FMOVSstoreidx ptr idx val mem) 5733 for { 5734 off := v.AuxInt 5735 sym := v.Aux 5736 _ = v.Args[2] 5737 v_0 := v.Args[0] 5738 if v_0.Op != OpARM64ADD { 5739 break 5740 } 5741 _ = v_0.Args[1] 5742 ptr := v_0.Args[0] 5743 idx := v_0.Args[1] 5744 val := v.Args[1] 5745 mem := v.Args[2] 5746 if !(off == 0 && sym == nil) { 5747 break 5748 } 5749 v.reset(OpARM64FMOVSstoreidx) 5750 v.AddArg(ptr) 5751 v.AddArg(idx) 5752 v.AddArg(val) 5753 v.AddArg(mem) 5754 return true 5755 } 5756 // match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 5757 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 5758 // result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5759 for { 5760 off1 := v.AuxInt 5761 sym1 := v.Aux 5762 _ = v.Args[2] 5763 v_0 := v.Args[0] 5764 if v_0.Op != OpARM64MOVDaddr { 5765 break 5766 } 5767 off2 := v_0.AuxInt 5768 sym2 := v_0.Aux 5769 ptr := v_0.Args[0] 5770 val := v.Args[1] 5771 mem := v.Args[2] 5772 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 5773 break 5774 } 5775 v.reset(OpARM64FMOVSstore) 5776 v.AuxInt = off1 + off2 5777 v.Aux = mergeSym(sym1, sym2) 5778 v.AddArg(ptr) 5779 v.AddArg(val) 5780 v.AddArg(mem) 5781 return true 5782 } 5783 return false 5784 } 5785 func rewriteValueARM64_OpARM64FMOVSstoreidx_0(v *Value) bool { 5786 // match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem) 5787 // cond: 5788 // result: (FMOVSstore [c] ptr val mem) 5789 for { 5790 _ = v.Args[3] 5791 ptr := v.Args[0] 5792 v_1 := v.Args[1] 5793 if v_1.Op != OpARM64MOVDconst { 5794 break 5795 } 5796 c := v_1.AuxInt 5797 val := v.Args[2] 5798 mem := v.Args[3] 5799 v.reset(OpARM64FMOVSstore) 5800 v.AuxInt = c 5801 v.AddArg(ptr) 5802 v.AddArg(val) 5803 v.AddArg(mem) 5804 return true 5805 } 5806 // match: (FMOVSstoreidx (MOVDconst [c]) idx val mem) 5807 // cond: 5808 // result: (FMOVSstore [c] idx val mem) 5809 for { 5810 _ = v.Args[3] 5811 v_0 := v.Args[0] 5812 if v_0.Op != OpARM64MOVDconst { 5813 break 5814 } 5815 c := v_0.AuxInt 5816 idx := v.Args[1] 5817 val := v.Args[2] 5818 mem := v.Args[3] 5819 v.reset(OpARM64FMOVSstore) 5820 v.AuxInt = c 5821 v.AddArg(idx) 5822 v.AddArg(val) 5823 v.AddArg(mem) 5824 return true 5825 } 5826 return false 5827 } 5828 func rewriteValueARM64_OpARM64FMULD_0(v *Value) bool { 5829 // match: (FMULD (FNEGD x) y) 5830 // cond: 5831 // result: (FNMULD x y) 5832 for { 5833 _ = v.Args[1] 5834 v_0 := v.Args[0] 5835 if v_0.Op != OpARM64FNEGD { 5836 break 5837 } 5838 x := v_0.Args[0] 5839 y := v.Args[1] 5840 v.reset(OpARM64FNMULD) 5841 v.AddArg(x) 5842 v.AddArg(y) 5843 return true 5844 } 5845 // match: (FMULD y (FNEGD x)) 5846 // cond: 5847 // result: (FNMULD x y) 5848 for { 5849 _ = v.Args[1] 5850 y := v.Args[0] 5851 v_1 := v.Args[1] 5852 if v_1.Op != OpARM64FNEGD { 5853 break 5854 } 5855 x := v_1.Args[0] 5856 v.reset(OpARM64FNMULD) 5857 v.AddArg(x) 5858 v.AddArg(y) 5859 return true 5860 } 5861 return false 5862 } 5863 func rewriteValueARM64_OpARM64FMULS_0(v *Value) bool { 5864 // match: (FMULS (FNEGS x) y) 5865 // cond: 5866 // result: (FNMULS x y) 5867 for { 5868 _ = v.Args[1] 5869 v_0 := v.Args[0] 5870 if v_0.Op != OpARM64FNEGS { 5871 break 5872 } 5873 x := v_0.Args[0] 5874 y := v.Args[1] 5875 v.reset(OpARM64FNMULS) 5876 v.AddArg(x) 5877 v.AddArg(y) 5878 return true 5879 } 5880 // match: (FMULS y (FNEGS x)) 5881 // cond: 5882 // result: (FNMULS x y) 5883 for { 5884 _ = v.Args[1] 5885 y := v.Args[0] 5886 v_1 := v.Args[1] 5887 if v_1.Op != OpARM64FNEGS { 5888 break 5889 } 5890 x := v_1.Args[0] 5891 v.reset(OpARM64FNMULS) 5892 v.AddArg(x) 5893 v.AddArg(y) 5894 return true 5895 } 5896 return false 5897 } 5898 func rewriteValueARM64_OpARM64FNEGD_0(v *Value) bool { 5899 // match: (FNEGD (FMULD x y)) 5900 // cond: 5901 // result: (FNMULD x y) 5902 for { 5903 v_0 := v.Args[0] 5904 if v_0.Op != OpARM64FMULD { 5905 break 5906 } 5907 _ = v_0.Args[1] 5908 x := v_0.Args[0] 5909 y := v_0.Args[1] 5910 v.reset(OpARM64FNMULD) 5911 v.AddArg(x) 5912 v.AddArg(y) 5913 return true 5914 } 5915 // match: (FNEGD (FNMULD x y)) 5916 // cond: 5917 // result: (FMULD x y) 5918 for { 5919 v_0 := v.Args[0] 5920 if v_0.Op != OpARM64FNMULD { 5921 break 5922 } 5923 _ = v_0.Args[1] 5924 x := v_0.Args[0] 5925 y := v_0.Args[1] 5926 v.reset(OpARM64FMULD) 5927 v.AddArg(x) 5928 v.AddArg(y) 5929 return true 5930 } 5931 return false 5932 } 5933 func rewriteValueARM64_OpARM64FNEGS_0(v *Value) bool { 5934 // match: (FNEGS (FMULS x y)) 5935 // cond: 5936 // result: (FNMULS x y) 5937 for { 5938 v_0 := v.Args[0] 5939 if v_0.Op != OpARM64FMULS { 5940 break 5941 } 5942 _ = v_0.Args[1] 5943 x := v_0.Args[0] 5944 y := v_0.Args[1] 5945 v.reset(OpARM64FNMULS) 5946 v.AddArg(x) 5947 v.AddArg(y) 5948 return true 5949 } 5950 // match: (FNEGS (FNMULS x y)) 5951 // cond: 5952 // result: (FMULS x y) 5953 for { 5954 v_0 := v.Args[0] 5955 if v_0.Op != OpARM64FNMULS { 5956 break 5957 } 5958 _ = v_0.Args[1] 5959 x := v_0.Args[0] 5960 y := v_0.Args[1] 5961 v.reset(OpARM64FMULS) 5962 v.AddArg(x) 5963 v.AddArg(y) 5964 return true 5965 } 5966 return false 5967 } 5968 func rewriteValueARM64_OpARM64FNMULD_0(v *Value) bool { 5969 // match: (FNMULD (FNEGD x) y) 5970 // cond: 5971 // result: (FMULD x y) 5972 for { 5973 _ = v.Args[1] 5974 v_0 := v.Args[0] 5975 if v_0.Op != OpARM64FNEGD { 5976 break 5977 } 5978 x := v_0.Args[0] 5979 y := v.Args[1] 5980 v.reset(OpARM64FMULD) 5981 v.AddArg(x) 5982 v.AddArg(y) 5983 return true 5984 } 5985 // match: (FNMULD y (FNEGD x)) 5986 // cond: 5987 // result: (FMULD x y) 5988 for { 5989 _ = v.Args[1] 5990 y := v.Args[0] 5991 v_1 := v.Args[1] 5992 if v_1.Op != OpARM64FNEGD { 5993 break 5994 } 5995 x := v_1.Args[0] 5996 v.reset(OpARM64FMULD) 5997 v.AddArg(x) 5998 v.AddArg(y) 5999 return true 6000 } 6001 return false 6002 } 6003 func rewriteValueARM64_OpARM64FNMULS_0(v *Value) bool { 6004 // match: (FNMULS (FNEGS x) y) 6005 // cond: 6006 // result: (FMULS x y) 6007 for { 6008 _ = v.Args[1] 6009 v_0 := v.Args[0] 6010 if v_0.Op != OpARM64FNEGS { 6011 break 6012 } 6013 x := v_0.Args[0] 6014 y := v.Args[1] 6015 v.reset(OpARM64FMULS) 6016 v.AddArg(x) 6017 v.AddArg(y) 6018 return true 6019 } 6020 // match: (FNMULS y (FNEGS x)) 6021 // cond: 6022 // result: (FMULS x y) 6023 for { 6024 _ = v.Args[1] 6025 y := v.Args[0] 6026 v_1 := v.Args[1] 6027 if v_1.Op != OpARM64FNEGS { 6028 break 6029 } 6030 x := v_1.Args[0] 6031 v.reset(OpARM64FMULS) 6032 v.AddArg(x) 6033 v.AddArg(y) 6034 return true 6035 } 6036 return false 6037 } 6038 func rewriteValueARM64_OpARM64FSUBD_0(v *Value) bool { 6039 // match: (FSUBD a (FMULD x y)) 6040 // cond: 6041 // result: (FMSUBD a x y) 6042 for { 6043 _ = v.Args[1] 6044 a := v.Args[0] 6045 v_1 := v.Args[1] 6046 if v_1.Op != OpARM64FMULD { 6047 break 6048 } 6049 _ = v_1.Args[1] 6050 x := v_1.Args[0] 6051 y := v_1.Args[1] 6052 v.reset(OpARM64FMSUBD) 6053 v.AddArg(a) 6054 v.AddArg(x) 6055 v.AddArg(y) 6056 return true 6057 } 6058 // match: (FSUBD (FMULD x y) a) 6059 // cond: 6060 // result: (FNMSUBD a x y) 6061 for { 6062 _ = v.Args[1] 6063 v_0 := v.Args[0] 6064 if v_0.Op != OpARM64FMULD { 6065 break 6066 } 6067 _ = v_0.Args[1] 6068 x := v_0.Args[0] 6069 y := v_0.Args[1] 6070 a := v.Args[1] 6071 v.reset(OpARM64FNMSUBD) 6072 v.AddArg(a) 6073 v.AddArg(x) 6074 v.AddArg(y) 6075 return true 6076 } 6077 // match: (FSUBD a (FNMULD x y)) 6078 // cond: 6079 // result: (FMADDD a x y) 6080 for { 6081 _ = v.Args[1] 6082 a := v.Args[0] 6083 v_1 := v.Args[1] 6084 if v_1.Op != OpARM64FNMULD { 6085 break 6086 } 6087 _ = v_1.Args[1] 6088 x := v_1.Args[0] 6089 y := v_1.Args[1] 6090 v.reset(OpARM64FMADDD) 6091 v.AddArg(a) 6092 v.AddArg(x) 6093 v.AddArg(y) 6094 return true 6095 } 6096 // match: (FSUBD (FNMULD x y) a) 6097 // cond: 6098 // result: (FNMADDD a x y) 6099 for { 6100 _ = v.Args[1] 6101 v_0 := v.Args[0] 6102 if v_0.Op != OpARM64FNMULD { 6103 break 6104 } 6105 _ = v_0.Args[1] 6106 x := v_0.Args[0] 6107 y := v_0.Args[1] 6108 a := v.Args[1] 6109 v.reset(OpARM64FNMADDD) 6110 v.AddArg(a) 6111 v.AddArg(x) 6112 v.AddArg(y) 6113 return true 6114 } 6115 return false 6116 } 6117 func rewriteValueARM64_OpARM64FSUBS_0(v *Value) bool { 6118 // match: (FSUBS a (FMULS x y)) 6119 // cond: 6120 // result: (FMSUBS a x y) 6121 for { 6122 _ = v.Args[1] 6123 a := v.Args[0] 6124 v_1 := v.Args[1] 6125 if v_1.Op != OpARM64FMULS { 6126 break 6127 } 6128 _ = v_1.Args[1] 6129 x := v_1.Args[0] 6130 y := v_1.Args[1] 6131 v.reset(OpARM64FMSUBS) 6132 v.AddArg(a) 6133 v.AddArg(x) 6134 v.AddArg(y) 6135 return true 6136 } 6137 // match: (FSUBS (FMULS x y) a) 6138 // cond: 6139 // result: (FNMSUBS a x y) 6140 for { 6141 _ = v.Args[1] 6142 v_0 := v.Args[0] 6143 if v_0.Op != OpARM64FMULS { 6144 break 6145 } 6146 _ = v_0.Args[1] 6147 x := v_0.Args[0] 6148 y := v_0.Args[1] 6149 a := v.Args[1] 6150 v.reset(OpARM64FNMSUBS) 6151 v.AddArg(a) 6152 v.AddArg(x) 6153 v.AddArg(y) 6154 return true 6155 } 6156 // match: (FSUBS a (FNMULS x y)) 6157 // cond: 6158 // result: (FMADDS a x y) 6159 for { 6160 _ = v.Args[1] 6161 a := v.Args[0] 6162 v_1 := v.Args[1] 6163 if v_1.Op != OpARM64FNMULS { 6164 break 6165 } 6166 _ = v_1.Args[1] 6167 x := v_1.Args[0] 6168 y := v_1.Args[1] 6169 v.reset(OpARM64FMADDS) 6170 v.AddArg(a) 6171 v.AddArg(x) 6172 v.AddArg(y) 6173 return true 6174 } 6175 // match: (FSUBS (FNMULS x y) a) 6176 // cond: 6177 // result: (FNMADDS a x y) 6178 for { 6179 _ = v.Args[1] 6180 v_0 := v.Args[0] 6181 if v_0.Op != OpARM64FNMULS { 6182 break 6183 } 6184 _ = v_0.Args[1] 6185 x := v_0.Args[0] 6186 y := v_0.Args[1] 6187 a := v.Args[1] 6188 v.reset(OpARM64FNMADDS) 6189 v.AddArg(a) 6190 v.AddArg(x) 6191 v.AddArg(y) 6192 return true 6193 } 6194 return false 6195 } 6196 func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool { 6197 // match: (GreaterEqual (FlagEQ)) 6198 // cond: 6199 // result: (MOVDconst [1]) 6200 for { 6201 v_0 := v.Args[0] 6202 if v_0.Op != OpARM64FlagEQ { 6203 break 6204 } 6205 v.reset(OpARM64MOVDconst) 6206 v.AuxInt = 1 6207 return true 6208 } 6209 // match: (GreaterEqual (FlagLT_ULT)) 6210 // cond: 6211 // result: (MOVDconst [0]) 6212 for { 6213 v_0 := v.Args[0] 6214 if v_0.Op != OpARM64FlagLT_ULT { 6215 break 6216 } 6217 v.reset(OpARM64MOVDconst) 6218 v.AuxInt = 0 6219 return true 6220 } 6221 // match: (GreaterEqual (FlagLT_UGT)) 6222 // cond: 6223 // result: (MOVDconst [0]) 6224 for { 6225 v_0 := v.Args[0] 6226 if v_0.Op != OpARM64FlagLT_UGT { 6227 break 6228 } 6229 v.reset(OpARM64MOVDconst) 6230 v.AuxInt = 0 6231 return true 6232 } 6233 // match: (GreaterEqual (FlagGT_ULT)) 6234 // cond: 6235 // result: (MOVDconst [1]) 6236 for { 6237 v_0 := v.Args[0] 6238 if v_0.Op != OpARM64FlagGT_ULT { 6239 break 6240 } 6241 v.reset(OpARM64MOVDconst) 6242 v.AuxInt = 1 6243 return true 6244 } 6245 // match: (GreaterEqual (FlagGT_UGT)) 6246 // cond: 6247 // result: (MOVDconst [1]) 6248 for { 6249 v_0 := v.Args[0] 6250 if v_0.Op != OpARM64FlagGT_UGT { 6251 break 6252 } 6253 v.reset(OpARM64MOVDconst) 6254 v.AuxInt = 1 6255 return true 6256 } 6257 // match: (GreaterEqual (InvertFlags x)) 6258 // cond: 6259 // result: (LessEqual x) 6260 for { 6261 v_0 := v.Args[0] 6262 if v_0.Op != OpARM64InvertFlags { 6263 break 6264 } 6265 x := v_0.Args[0] 6266 v.reset(OpARM64LessEqual) 6267 v.AddArg(x) 6268 return true 6269 } 6270 return false 6271 } 6272 func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool { 6273 // match: (GreaterEqualU (FlagEQ)) 6274 // cond: 6275 // result: (MOVDconst [1]) 6276 for { 6277 v_0 := v.Args[0] 6278 if v_0.Op != OpARM64FlagEQ { 6279 break 6280 } 6281 v.reset(OpARM64MOVDconst) 6282 v.AuxInt = 1 6283 return true 6284 } 6285 // match: (GreaterEqualU (FlagLT_ULT)) 6286 // cond: 6287 // result: (MOVDconst [0]) 6288 for { 6289 v_0 := v.Args[0] 6290 if v_0.Op != OpARM64FlagLT_ULT { 6291 break 6292 } 6293 v.reset(OpARM64MOVDconst) 6294 v.AuxInt = 0 6295 return true 6296 } 6297 // match: (GreaterEqualU (FlagLT_UGT)) 6298 // cond: 6299 // result: (MOVDconst [1]) 6300 for { 6301 v_0 := v.Args[0] 6302 if v_0.Op != OpARM64FlagLT_UGT { 6303 break 6304 } 6305 v.reset(OpARM64MOVDconst) 6306 v.AuxInt = 1 6307 return true 6308 } 6309 // match: (GreaterEqualU (FlagGT_ULT)) 6310 // cond: 6311 // result: (MOVDconst [0]) 6312 for { 6313 v_0 := v.Args[0] 6314 if v_0.Op != OpARM64FlagGT_ULT { 6315 break 6316 } 6317 v.reset(OpARM64MOVDconst) 6318 v.AuxInt = 0 6319 return true 6320 } 6321 // match: (GreaterEqualU (FlagGT_UGT)) 6322 // cond: 6323 // result: (MOVDconst [1]) 6324 for { 6325 v_0 := v.Args[0] 6326 if v_0.Op != OpARM64FlagGT_UGT { 6327 break 6328 } 6329 v.reset(OpARM64MOVDconst) 6330 v.AuxInt = 1 6331 return true 6332 } 6333 // match: (GreaterEqualU (InvertFlags x)) 6334 // cond: 6335 // result: (LessEqualU x) 6336 for { 6337 v_0 := v.Args[0] 6338 if v_0.Op != OpARM64InvertFlags { 6339 break 6340 } 6341 x := v_0.Args[0] 6342 v.reset(OpARM64LessEqualU) 6343 v.AddArg(x) 6344 return true 6345 } 6346 return false 6347 } 6348 func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool { 6349 // match: (GreaterThan (FlagEQ)) 6350 // cond: 6351 // result: (MOVDconst [0]) 6352 for { 6353 v_0 := v.Args[0] 6354 if v_0.Op != OpARM64FlagEQ { 6355 break 6356 } 6357 v.reset(OpARM64MOVDconst) 6358 v.AuxInt = 0 6359 return true 6360 } 6361 // match: (GreaterThan (FlagLT_ULT)) 6362 // cond: 6363 // result: (MOVDconst [0]) 6364 for { 6365 v_0 := v.Args[0] 6366 if v_0.Op != OpARM64FlagLT_ULT { 6367 break 6368 } 6369 v.reset(OpARM64MOVDconst) 6370 v.AuxInt = 0 6371 return true 6372 } 6373 // match: (GreaterThan (FlagLT_UGT)) 6374 // cond: 6375 // result: (MOVDconst [0]) 6376 for { 6377 v_0 := v.Args[0] 6378 if v_0.Op != OpARM64FlagLT_UGT { 6379 break 6380 } 6381 v.reset(OpARM64MOVDconst) 6382 v.AuxInt = 0 6383 return true 6384 } 6385 // match: (GreaterThan (FlagGT_ULT)) 6386 // cond: 6387 // result: (MOVDconst [1]) 6388 for { 6389 v_0 := v.Args[0] 6390 if v_0.Op != OpARM64FlagGT_ULT { 6391 break 6392 } 6393 v.reset(OpARM64MOVDconst) 6394 v.AuxInt = 1 6395 return true 6396 } 6397 // match: (GreaterThan (FlagGT_UGT)) 6398 // cond: 6399 // result: (MOVDconst [1]) 6400 for { 6401 v_0 := v.Args[0] 6402 if v_0.Op != OpARM64FlagGT_UGT { 6403 break 6404 } 6405 v.reset(OpARM64MOVDconst) 6406 v.AuxInt = 1 6407 return true 6408 } 6409 // match: (GreaterThan (InvertFlags x)) 6410 // cond: 6411 // result: (LessThan x) 6412 for { 6413 v_0 := v.Args[0] 6414 if v_0.Op != OpARM64InvertFlags { 6415 break 6416 } 6417 x := v_0.Args[0] 6418 v.reset(OpARM64LessThan) 6419 v.AddArg(x) 6420 return true 6421 } 6422 return false 6423 } 6424 func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool { 6425 // match: (GreaterThanU (FlagEQ)) 6426 // cond: 6427 // result: (MOVDconst [0]) 6428 for { 6429 v_0 := v.Args[0] 6430 if v_0.Op != OpARM64FlagEQ { 6431 break 6432 } 6433 v.reset(OpARM64MOVDconst) 6434 v.AuxInt = 0 6435 return true 6436 } 6437 // match: (GreaterThanU (FlagLT_ULT)) 6438 // cond: 6439 // result: (MOVDconst [0]) 6440 for { 6441 v_0 := v.Args[0] 6442 if v_0.Op != OpARM64FlagLT_ULT { 6443 break 6444 } 6445 v.reset(OpARM64MOVDconst) 6446 v.AuxInt = 0 6447 return true 6448 } 6449 // match: (GreaterThanU (FlagLT_UGT)) 6450 // cond: 6451 // result: (MOVDconst [1]) 6452 for { 6453 v_0 := v.Args[0] 6454 if v_0.Op != OpARM64FlagLT_UGT { 6455 break 6456 } 6457 v.reset(OpARM64MOVDconst) 6458 v.AuxInt = 1 6459 return true 6460 } 6461 // match: (GreaterThanU (FlagGT_ULT)) 6462 // cond: 6463 // result: (MOVDconst [0]) 6464 for { 6465 v_0 := v.Args[0] 6466 if v_0.Op != OpARM64FlagGT_ULT { 6467 break 6468 } 6469 v.reset(OpARM64MOVDconst) 6470 v.AuxInt = 0 6471 return true 6472 } 6473 // match: (GreaterThanU (FlagGT_UGT)) 6474 // cond: 6475 // result: (MOVDconst [1]) 6476 for { 6477 v_0 := v.Args[0] 6478 if v_0.Op != OpARM64FlagGT_UGT { 6479 break 6480 } 6481 v.reset(OpARM64MOVDconst) 6482 v.AuxInt = 1 6483 return true 6484 } 6485 // match: (GreaterThanU (InvertFlags x)) 6486 // cond: 6487 // result: (LessThanU x) 6488 for { 6489 v_0 := v.Args[0] 6490 if v_0.Op != OpARM64InvertFlags { 6491 break 6492 } 6493 x := v_0.Args[0] 6494 v.reset(OpARM64LessThanU) 6495 v.AddArg(x) 6496 return true 6497 } 6498 return false 6499 } 6500 func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool { 6501 // match: (LessEqual (FlagEQ)) 6502 // cond: 6503 // result: (MOVDconst [1]) 6504 for { 6505 v_0 := v.Args[0] 6506 if v_0.Op != OpARM64FlagEQ { 6507 break 6508 } 6509 v.reset(OpARM64MOVDconst) 6510 v.AuxInt = 1 6511 return true 6512 } 6513 // match: (LessEqual (FlagLT_ULT)) 6514 // cond: 6515 // result: (MOVDconst [1]) 6516 for { 6517 v_0 := v.Args[0] 6518 if v_0.Op != OpARM64FlagLT_ULT { 6519 break 6520 } 6521 v.reset(OpARM64MOVDconst) 6522 v.AuxInt = 1 6523 return true 6524 } 6525 // match: (LessEqual (FlagLT_UGT)) 6526 // cond: 6527 // result: (MOVDconst [1]) 6528 for { 6529 v_0 := v.Args[0] 6530 if v_0.Op != OpARM64FlagLT_UGT { 6531 break 6532 } 6533 v.reset(OpARM64MOVDconst) 6534 v.AuxInt = 1 6535 return true 6536 } 6537 // match: (LessEqual (FlagGT_ULT)) 6538 // cond: 6539 // result: (MOVDconst [0]) 6540 for { 6541 v_0 := v.Args[0] 6542 if v_0.Op != OpARM64FlagGT_ULT { 6543 break 6544 } 6545 v.reset(OpARM64MOVDconst) 6546 v.AuxInt = 0 6547 return true 6548 } 6549 // match: (LessEqual (FlagGT_UGT)) 6550 // cond: 6551 // result: (MOVDconst [0]) 6552 for { 6553 v_0 := v.Args[0] 6554 if v_0.Op != OpARM64FlagGT_UGT { 6555 break 6556 } 6557 v.reset(OpARM64MOVDconst) 6558 v.AuxInt = 0 6559 return true 6560 } 6561 // match: (LessEqual (InvertFlags x)) 6562 // cond: 6563 // result: (GreaterEqual x) 6564 for { 6565 v_0 := v.Args[0] 6566 if v_0.Op != OpARM64InvertFlags { 6567 break 6568 } 6569 x := v_0.Args[0] 6570 v.reset(OpARM64GreaterEqual) 6571 v.AddArg(x) 6572 return true 6573 } 6574 return false 6575 } 6576 func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool { 6577 // match: (LessEqualU (FlagEQ)) 6578 // cond: 6579 // result: (MOVDconst [1]) 6580 for { 6581 v_0 := v.Args[0] 6582 if v_0.Op != OpARM64FlagEQ { 6583 break 6584 } 6585 v.reset(OpARM64MOVDconst) 6586 v.AuxInt = 1 6587 return true 6588 } 6589 // match: (LessEqualU (FlagLT_ULT)) 6590 // cond: 6591 // result: (MOVDconst [1]) 6592 for { 6593 v_0 := v.Args[0] 6594 if v_0.Op != OpARM64FlagLT_ULT { 6595 break 6596 } 6597 v.reset(OpARM64MOVDconst) 6598 v.AuxInt = 1 6599 return true 6600 } 6601 // match: (LessEqualU (FlagLT_UGT)) 6602 // cond: 6603 // result: (MOVDconst [0]) 6604 for { 6605 v_0 := v.Args[0] 6606 if v_0.Op != OpARM64FlagLT_UGT { 6607 break 6608 } 6609 v.reset(OpARM64MOVDconst) 6610 v.AuxInt = 0 6611 return true 6612 } 6613 // match: (LessEqualU (FlagGT_ULT)) 6614 // cond: 6615 // result: (MOVDconst [1]) 6616 for { 6617 v_0 := v.Args[0] 6618 if v_0.Op != OpARM64FlagGT_ULT { 6619 break 6620 } 6621 v.reset(OpARM64MOVDconst) 6622 v.AuxInt = 1 6623 return true 6624 } 6625 // match: (LessEqualU (FlagGT_UGT)) 6626 // cond: 6627 // result: (MOVDconst [0]) 6628 for { 6629 v_0 := v.Args[0] 6630 if v_0.Op != OpARM64FlagGT_UGT { 6631 break 6632 } 6633 v.reset(OpARM64MOVDconst) 6634 v.AuxInt = 0 6635 return true 6636 } 6637 // match: (LessEqualU (InvertFlags x)) 6638 // cond: 6639 // result: (GreaterEqualU x) 6640 for { 6641 v_0 := v.Args[0] 6642 if v_0.Op != OpARM64InvertFlags { 6643 break 6644 } 6645 x := v_0.Args[0] 6646 v.reset(OpARM64GreaterEqualU) 6647 v.AddArg(x) 6648 return true 6649 } 6650 return false 6651 } 6652 func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool { 6653 // match: (LessThan (FlagEQ)) 6654 // cond: 6655 // result: (MOVDconst [0]) 6656 for { 6657 v_0 := v.Args[0] 6658 if v_0.Op != OpARM64FlagEQ { 6659 break 6660 } 6661 v.reset(OpARM64MOVDconst) 6662 v.AuxInt = 0 6663 return true 6664 } 6665 // match: (LessThan (FlagLT_ULT)) 6666 // cond: 6667 // result: (MOVDconst [1]) 6668 for { 6669 v_0 := v.Args[0] 6670 if v_0.Op != OpARM64FlagLT_ULT { 6671 break 6672 } 6673 v.reset(OpARM64MOVDconst) 6674 v.AuxInt = 1 6675 return true 6676 } 6677 // match: (LessThan (FlagLT_UGT)) 6678 // cond: 6679 // result: (MOVDconst [1]) 6680 for { 6681 v_0 := v.Args[0] 6682 if v_0.Op != OpARM64FlagLT_UGT { 6683 break 6684 } 6685 v.reset(OpARM64MOVDconst) 6686 v.AuxInt = 1 6687 return true 6688 } 6689 // match: (LessThan (FlagGT_ULT)) 6690 // cond: 6691 // result: (MOVDconst [0]) 6692 for { 6693 v_0 := v.Args[0] 6694 if v_0.Op != OpARM64FlagGT_ULT { 6695 break 6696 } 6697 v.reset(OpARM64MOVDconst) 6698 v.AuxInt = 0 6699 return true 6700 } 6701 // match: (LessThan (FlagGT_UGT)) 6702 // cond: 6703 // result: (MOVDconst [0]) 6704 for { 6705 v_0 := v.Args[0] 6706 if v_0.Op != OpARM64FlagGT_UGT { 6707 break 6708 } 6709 v.reset(OpARM64MOVDconst) 6710 v.AuxInt = 0 6711 return true 6712 } 6713 // match: (LessThan (InvertFlags x)) 6714 // cond: 6715 // result: (GreaterThan x) 6716 for { 6717 v_0 := v.Args[0] 6718 if v_0.Op != OpARM64InvertFlags { 6719 break 6720 } 6721 x := v_0.Args[0] 6722 v.reset(OpARM64GreaterThan) 6723 v.AddArg(x) 6724 return true 6725 } 6726 return false 6727 } 6728 func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool { 6729 // match: (LessThanU (FlagEQ)) 6730 // cond: 6731 // result: (MOVDconst [0]) 6732 for { 6733 v_0 := v.Args[0] 6734 if v_0.Op != OpARM64FlagEQ { 6735 break 6736 } 6737 v.reset(OpARM64MOVDconst) 6738 v.AuxInt = 0 6739 return true 6740 } 6741 // match: (LessThanU (FlagLT_ULT)) 6742 // cond: 6743 // result: (MOVDconst [1]) 6744 for { 6745 v_0 := v.Args[0] 6746 if v_0.Op != OpARM64FlagLT_ULT { 6747 break 6748 } 6749 v.reset(OpARM64MOVDconst) 6750 v.AuxInt = 1 6751 return true 6752 } 6753 // match: (LessThanU (FlagLT_UGT)) 6754 // cond: 6755 // result: (MOVDconst [0]) 6756 for { 6757 v_0 := v.Args[0] 6758 if v_0.Op != OpARM64FlagLT_UGT { 6759 break 6760 } 6761 v.reset(OpARM64MOVDconst) 6762 v.AuxInt = 0 6763 return true 6764 } 6765 // match: (LessThanU (FlagGT_ULT)) 6766 // cond: 6767 // result: (MOVDconst [1]) 6768 for { 6769 v_0 := v.Args[0] 6770 if v_0.Op != OpARM64FlagGT_ULT { 6771 break 6772 } 6773 v.reset(OpARM64MOVDconst) 6774 v.AuxInt = 1 6775 return true 6776 } 6777 // match: (LessThanU (FlagGT_UGT)) 6778 // cond: 6779 // result: (MOVDconst [0]) 6780 for { 6781 v_0 := v.Args[0] 6782 if v_0.Op != OpARM64FlagGT_UGT { 6783 break 6784 } 6785 v.reset(OpARM64MOVDconst) 6786 v.AuxInt = 0 6787 return true 6788 } 6789 // match: (LessThanU (InvertFlags x)) 6790 // cond: 6791 // result: (GreaterThanU x) 6792 for { 6793 v_0 := v.Args[0] 6794 if v_0.Op != OpARM64InvertFlags { 6795 break 6796 } 6797 x := v_0.Args[0] 6798 v.reset(OpARM64GreaterThanU) 6799 v.AddArg(x) 6800 return true 6801 } 6802 return false 6803 } 6804 func rewriteValueARM64_OpARM64MADD_0(v *Value) bool { 6805 b := v.Block 6806 _ = b 6807 // match: (MADD a x (MOVDconst [-1])) 6808 // cond: 6809 // result: (SUB a x) 6810 for { 6811 _ = v.Args[2] 6812 a := v.Args[0] 6813 x := v.Args[1] 6814 v_2 := v.Args[2] 6815 if v_2.Op != OpARM64MOVDconst { 6816 break 6817 } 6818 if v_2.AuxInt != -1 { 6819 break 6820 } 6821 v.reset(OpARM64SUB) 6822 v.AddArg(a) 6823 v.AddArg(x) 6824 return true 6825 } 6826 // match: (MADD a _ (MOVDconst [0])) 6827 // cond: 6828 // result: a 6829 for { 6830 _ = v.Args[2] 6831 a := v.Args[0] 6832 v_2 := v.Args[2] 6833 if v_2.Op != OpARM64MOVDconst { 6834 break 6835 } 6836 if v_2.AuxInt != 0 { 6837 break 6838 } 6839 v.reset(OpCopy) 6840 v.Type = a.Type 6841 v.AddArg(a) 6842 return true 6843 } 6844 // match: (MADD a x (MOVDconst [1])) 6845 // cond: 6846 // result: (ADD a x) 6847 for { 6848 _ = v.Args[2] 6849 a := v.Args[0] 6850 x := v.Args[1] 6851 v_2 := v.Args[2] 6852 if v_2.Op != OpARM64MOVDconst { 6853 break 6854 } 6855 if v_2.AuxInt != 1 { 6856 break 6857 } 6858 v.reset(OpARM64ADD) 6859 v.AddArg(a) 6860 v.AddArg(x) 6861 return true 6862 } 6863 // match: (MADD a x (MOVDconst [c])) 6864 // cond: isPowerOfTwo(c) 6865 // result: (ADDshiftLL a x [log2(c)]) 6866 for { 6867 _ = v.Args[2] 6868 a := v.Args[0] 6869 x := v.Args[1] 6870 v_2 := v.Args[2] 6871 if v_2.Op != OpARM64MOVDconst { 6872 break 6873 } 6874 c := v_2.AuxInt 6875 if !(isPowerOfTwo(c)) { 6876 break 6877 } 6878 v.reset(OpARM64ADDshiftLL) 6879 v.AuxInt = log2(c) 6880 v.AddArg(a) 6881 v.AddArg(x) 6882 return true 6883 } 6884 // match: (MADD a x (MOVDconst [c])) 6885 // cond: isPowerOfTwo(c-1) && c>=3 6886 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 6887 for { 6888 _ = v.Args[2] 6889 a := v.Args[0] 6890 x := v.Args[1] 6891 v_2 := v.Args[2] 6892 if v_2.Op != OpARM64MOVDconst { 6893 break 6894 } 6895 c := v_2.AuxInt 6896 if !(isPowerOfTwo(c-1) && c >= 3) { 6897 break 6898 } 6899 v.reset(OpARM64ADD) 6900 v.AddArg(a) 6901 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6902 v0.AuxInt = log2(c - 1) 6903 v0.AddArg(x) 6904 v0.AddArg(x) 6905 v.AddArg(v0) 6906 return true 6907 } 6908 // match: (MADD a x (MOVDconst [c])) 6909 // cond: isPowerOfTwo(c+1) && c>=7 6910 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 6911 for { 6912 _ = v.Args[2] 6913 a := v.Args[0] 6914 x := v.Args[1] 6915 v_2 := v.Args[2] 6916 if v_2.Op != OpARM64MOVDconst { 6917 break 6918 } 6919 c := v_2.AuxInt 6920 if !(isPowerOfTwo(c+1) && c >= 7) { 6921 break 6922 } 6923 v.reset(OpARM64SUB) 6924 v.AddArg(a) 6925 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6926 v0.AuxInt = log2(c + 1) 6927 v0.AddArg(x) 6928 v0.AddArg(x) 6929 v.AddArg(v0) 6930 return true 6931 } 6932 // match: (MADD a x (MOVDconst [c])) 6933 // cond: c%3 == 0 && isPowerOfTwo(c/3) 6934 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 6935 for { 6936 _ = v.Args[2] 6937 a := v.Args[0] 6938 x := v.Args[1] 6939 v_2 := v.Args[2] 6940 if v_2.Op != OpARM64MOVDconst { 6941 break 6942 } 6943 c := v_2.AuxInt 6944 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 6945 break 6946 } 6947 v.reset(OpARM64SUBshiftLL) 6948 v.AuxInt = log2(c / 3) 6949 v.AddArg(a) 6950 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 6951 v0.AuxInt = 2 6952 v0.AddArg(x) 6953 v0.AddArg(x) 6954 v.AddArg(v0) 6955 return true 6956 } 6957 // match: (MADD a x (MOVDconst [c])) 6958 // cond: c%5 == 0 && isPowerOfTwo(c/5) 6959 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 6960 for { 6961 _ = v.Args[2] 6962 a := v.Args[0] 6963 x := v.Args[1] 6964 v_2 := v.Args[2] 6965 if v_2.Op != OpARM64MOVDconst { 6966 break 6967 } 6968 c := v_2.AuxInt 6969 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 6970 break 6971 } 6972 v.reset(OpARM64ADDshiftLL) 6973 v.AuxInt = log2(c / 5) 6974 v.AddArg(a) 6975 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 6976 v0.AuxInt = 2 6977 v0.AddArg(x) 6978 v0.AddArg(x) 6979 v.AddArg(v0) 6980 return true 6981 } 6982 // match: (MADD a x (MOVDconst [c])) 6983 // cond: c%7 == 0 && isPowerOfTwo(c/7) 6984 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 6985 for { 6986 _ = v.Args[2] 6987 a := v.Args[0] 6988 x := v.Args[1] 6989 v_2 := v.Args[2] 6990 if v_2.Op != OpARM64MOVDconst { 6991 break 6992 } 6993 c := v_2.AuxInt 6994 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 6995 break 6996 } 6997 v.reset(OpARM64SUBshiftLL) 6998 v.AuxInt = log2(c / 7) 6999 v.AddArg(a) 7000 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7001 v0.AuxInt = 3 7002 v0.AddArg(x) 7003 v0.AddArg(x) 7004 v.AddArg(v0) 7005 return true 7006 } 7007 // match: (MADD a x (MOVDconst [c])) 7008 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7009 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7010 for { 7011 _ = v.Args[2] 7012 a := v.Args[0] 7013 x := v.Args[1] 7014 v_2 := v.Args[2] 7015 if v_2.Op != OpARM64MOVDconst { 7016 break 7017 } 7018 c := v_2.AuxInt 7019 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7020 break 7021 } 7022 v.reset(OpARM64ADDshiftLL) 7023 v.AuxInt = log2(c / 9) 7024 v.AddArg(a) 7025 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7026 v0.AuxInt = 3 7027 v0.AddArg(x) 7028 v0.AddArg(x) 7029 v.AddArg(v0) 7030 return true 7031 } 7032 return false 7033 } 7034 func rewriteValueARM64_OpARM64MADD_10(v *Value) bool { 7035 b := v.Block 7036 _ = b 7037 // match: (MADD a (MOVDconst [-1]) x) 7038 // cond: 7039 // result: (SUB a x) 7040 for { 7041 _ = v.Args[2] 7042 a := v.Args[0] 7043 v_1 := v.Args[1] 7044 if v_1.Op != OpARM64MOVDconst { 7045 break 7046 } 7047 if v_1.AuxInt != -1 { 7048 break 7049 } 7050 x := v.Args[2] 7051 v.reset(OpARM64SUB) 7052 v.AddArg(a) 7053 v.AddArg(x) 7054 return true 7055 } 7056 // match: (MADD a (MOVDconst [0]) _) 7057 // cond: 7058 // result: a 7059 for { 7060 _ = v.Args[2] 7061 a := v.Args[0] 7062 v_1 := v.Args[1] 7063 if v_1.Op != OpARM64MOVDconst { 7064 break 7065 } 7066 if v_1.AuxInt != 0 { 7067 break 7068 } 7069 v.reset(OpCopy) 7070 v.Type = a.Type 7071 v.AddArg(a) 7072 return true 7073 } 7074 // match: (MADD a (MOVDconst [1]) x) 7075 // cond: 7076 // result: (ADD a x) 7077 for { 7078 _ = v.Args[2] 7079 a := v.Args[0] 7080 v_1 := v.Args[1] 7081 if v_1.Op != OpARM64MOVDconst { 7082 break 7083 } 7084 if v_1.AuxInt != 1 { 7085 break 7086 } 7087 x := v.Args[2] 7088 v.reset(OpARM64ADD) 7089 v.AddArg(a) 7090 v.AddArg(x) 7091 return true 7092 } 7093 // match: (MADD a (MOVDconst [c]) x) 7094 // cond: isPowerOfTwo(c) 7095 // result: (ADDshiftLL a x [log2(c)]) 7096 for { 7097 _ = v.Args[2] 7098 a := v.Args[0] 7099 v_1 := v.Args[1] 7100 if v_1.Op != OpARM64MOVDconst { 7101 break 7102 } 7103 c := v_1.AuxInt 7104 x := v.Args[2] 7105 if !(isPowerOfTwo(c)) { 7106 break 7107 } 7108 v.reset(OpARM64ADDshiftLL) 7109 v.AuxInt = log2(c) 7110 v.AddArg(a) 7111 v.AddArg(x) 7112 return true 7113 } 7114 // match: (MADD a (MOVDconst [c]) x) 7115 // cond: isPowerOfTwo(c-1) && c>=3 7116 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7117 for { 7118 _ = v.Args[2] 7119 a := v.Args[0] 7120 v_1 := v.Args[1] 7121 if v_1.Op != OpARM64MOVDconst { 7122 break 7123 } 7124 c := v_1.AuxInt 7125 x := v.Args[2] 7126 if !(isPowerOfTwo(c-1) && c >= 3) { 7127 break 7128 } 7129 v.reset(OpARM64ADD) 7130 v.AddArg(a) 7131 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7132 v0.AuxInt = log2(c - 1) 7133 v0.AddArg(x) 7134 v0.AddArg(x) 7135 v.AddArg(v0) 7136 return true 7137 } 7138 // match: (MADD a (MOVDconst [c]) x) 7139 // cond: isPowerOfTwo(c+1) && c>=7 7140 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7141 for { 7142 _ = v.Args[2] 7143 a := v.Args[0] 7144 v_1 := v.Args[1] 7145 if v_1.Op != OpARM64MOVDconst { 7146 break 7147 } 7148 c := v_1.AuxInt 7149 x := v.Args[2] 7150 if !(isPowerOfTwo(c+1) && c >= 7) { 7151 break 7152 } 7153 v.reset(OpARM64SUB) 7154 v.AddArg(a) 7155 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7156 v0.AuxInt = log2(c + 1) 7157 v0.AddArg(x) 7158 v0.AddArg(x) 7159 v.AddArg(v0) 7160 return true 7161 } 7162 // match: (MADD a (MOVDconst [c]) x) 7163 // cond: c%3 == 0 && isPowerOfTwo(c/3) 7164 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7165 for { 7166 _ = v.Args[2] 7167 a := v.Args[0] 7168 v_1 := v.Args[1] 7169 if v_1.Op != OpARM64MOVDconst { 7170 break 7171 } 7172 c := v_1.AuxInt 7173 x := v.Args[2] 7174 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 7175 break 7176 } 7177 v.reset(OpARM64SUBshiftLL) 7178 v.AuxInt = log2(c / 3) 7179 v.AddArg(a) 7180 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7181 v0.AuxInt = 2 7182 v0.AddArg(x) 7183 v0.AddArg(x) 7184 v.AddArg(v0) 7185 return true 7186 } 7187 // match: (MADD a (MOVDconst [c]) x) 7188 // cond: c%5 == 0 && isPowerOfTwo(c/5) 7189 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7190 for { 7191 _ = v.Args[2] 7192 a := v.Args[0] 7193 v_1 := v.Args[1] 7194 if v_1.Op != OpARM64MOVDconst { 7195 break 7196 } 7197 c := v_1.AuxInt 7198 x := v.Args[2] 7199 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 7200 break 7201 } 7202 v.reset(OpARM64ADDshiftLL) 7203 v.AuxInt = log2(c / 5) 7204 v.AddArg(a) 7205 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7206 v0.AuxInt = 2 7207 v0.AddArg(x) 7208 v0.AddArg(x) 7209 v.AddArg(v0) 7210 return true 7211 } 7212 // match: (MADD a (MOVDconst [c]) x) 7213 // cond: c%7 == 0 && isPowerOfTwo(c/7) 7214 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7215 for { 7216 _ = v.Args[2] 7217 a := v.Args[0] 7218 v_1 := v.Args[1] 7219 if v_1.Op != OpARM64MOVDconst { 7220 break 7221 } 7222 c := v_1.AuxInt 7223 x := v.Args[2] 7224 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 7225 break 7226 } 7227 v.reset(OpARM64SUBshiftLL) 7228 v.AuxInt = log2(c / 7) 7229 v.AddArg(a) 7230 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7231 v0.AuxInt = 3 7232 v0.AddArg(x) 7233 v0.AddArg(x) 7234 v.AddArg(v0) 7235 return true 7236 } 7237 // match: (MADD a (MOVDconst [c]) x) 7238 // cond: c%9 == 0 && isPowerOfTwo(c/9) 7239 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7240 for { 7241 _ = v.Args[2] 7242 a := v.Args[0] 7243 v_1 := v.Args[1] 7244 if v_1.Op != OpARM64MOVDconst { 7245 break 7246 } 7247 c := v_1.AuxInt 7248 x := v.Args[2] 7249 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 7250 break 7251 } 7252 v.reset(OpARM64ADDshiftLL) 7253 v.AuxInt = log2(c / 9) 7254 v.AddArg(a) 7255 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7256 v0.AuxInt = 3 7257 v0.AddArg(x) 7258 v0.AddArg(x) 7259 v.AddArg(v0) 7260 return true 7261 } 7262 return false 7263 } 7264 func rewriteValueARM64_OpARM64MADD_20(v *Value) bool { 7265 b := v.Block 7266 _ = b 7267 // match: (MADD (MOVDconst [c]) x y) 7268 // cond: 7269 // result: (ADDconst [c] (MUL <x.Type> x y)) 7270 for { 7271 _ = v.Args[2] 7272 v_0 := v.Args[0] 7273 if v_0.Op != OpARM64MOVDconst { 7274 break 7275 } 7276 c := v_0.AuxInt 7277 x := v.Args[1] 7278 y := v.Args[2] 7279 v.reset(OpARM64ADDconst) 7280 v.AuxInt = c 7281 v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 7282 v0.AddArg(x) 7283 v0.AddArg(y) 7284 v.AddArg(v0) 7285 return true 7286 } 7287 // match: (MADD a (MOVDconst [c]) (MOVDconst [d])) 7288 // cond: 7289 // result: (ADDconst [c*d] a) 7290 for { 7291 _ = v.Args[2] 7292 a := v.Args[0] 7293 v_1 := v.Args[1] 7294 if v_1.Op != OpARM64MOVDconst { 7295 break 7296 } 7297 c := v_1.AuxInt 7298 v_2 := v.Args[2] 7299 if v_2.Op != OpARM64MOVDconst { 7300 break 7301 } 7302 d := v_2.AuxInt 7303 v.reset(OpARM64ADDconst) 7304 v.AuxInt = c * d 7305 v.AddArg(a) 7306 return true 7307 } 7308 return false 7309 } 7310 func rewriteValueARM64_OpARM64MADDW_0(v *Value) bool { 7311 b := v.Block 7312 _ = b 7313 // match: (MADDW a x (MOVDconst [c])) 7314 // cond: int32(c)==-1 7315 // result: (SUB a x) 7316 for { 7317 _ = v.Args[2] 7318 a := v.Args[0] 7319 x := v.Args[1] 7320 v_2 := v.Args[2] 7321 if v_2.Op != OpARM64MOVDconst { 7322 break 7323 } 7324 c := v_2.AuxInt 7325 if !(int32(c) == -1) { 7326 break 7327 } 7328 v.reset(OpARM64SUB) 7329 v.AddArg(a) 7330 v.AddArg(x) 7331 return true 7332 } 7333 // match: (MADDW a _ (MOVDconst [c])) 7334 // cond: int32(c)==0 7335 // result: a 7336 for { 7337 _ = v.Args[2] 7338 a := v.Args[0] 7339 v_2 := v.Args[2] 7340 if v_2.Op != OpARM64MOVDconst { 7341 break 7342 } 7343 c := v_2.AuxInt 7344 if !(int32(c) == 0) { 7345 break 7346 } 7347 v.reset(OpCopy) 7348 v.Type = a.Type 7349 v.AddArg(a) 7350 return true 7351 } 7352 // match: (MADDW a x (MOVDconst [c])) 7353 // cond: int32(c)==1 7354 // result: (ADD a x) 7355 for { 7356 _ = v.Args[2] 7357 a := v.Args[0] 7358 x := v.Args[1] 7359 v_2 := v.Args[2] 7360 if v_2.Op != OpARM64MOVDconst { 7361 break 7362 } 7363 c := v_2.AuxInt 7364 if !(int32(c) == 1) { 7365 break 7366 } 7367 v.reset(OpARM64ADD) 7368 v.AddArg(a) 7369 v.AddArg(x) 7370 return true 7371 } 7372 // match: (MADDW a x (MOVDconst [c])) 7373 // cond: isPowerOfTwo(c) 7374 // result: (ADDshiftLL a x [log2(c)]) 7375 for { 7376 _ = v.Args[2] 7377 a := v.Args[0] 7378 x := v.Args[1] 7379 v_2 := v.Args[2] 7380 if v_2.Op != OpARM64MOVDconst { 7381 break 7382 } 7383 c := v_2.AuxInt 7384 if !(isPowerOfTwo(c)) { 7385 break 7386 } 7387 v.reset(OpARM64ADDshiftLL) 7388 v.AuxInt = log2(c) 7389 v.AddArg(a) 7390 v.AddArg(x) 7391 return true 7392 } 7393 // match: (MADDW a x (MOVDconst [c])) 7394 // cond: isPowerOfTwo(c-1) && int32(c)>=3 7395 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7396 for { 7397 _ = v.Args[2] 7398 a := v.Args[0] 7399 x := v.Args[1] 7400 v_2 := v.Args[2] 7401 if v_2.Op != OpARM64MOVDconst { 7402 break 7403 } 7404 c := v_2.AuxInt 7405 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7406 break 7407 } 7408 v.reset(OpARM64ADD) 7409 v.AddArg(a) 7410 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7411 v0.AuxInt = log2(c - 1) 7412 v0.AddArg(x) 7413 v0.AddArg(x) 7414 v.AddArg(v0) 7415 return true 7416 } 7417 // match: (MADDW a x (MOVDconst [c])) 7418 // cond: isPowerOfTwo(c+1) && int32(c)>=7 7419 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7420 for { 7421 _ = v.Args[2] 7422 a := v.Args[0] 7423 x := v.Args[1] 7424 v_2 := v.Args[2] 7425 if v_2.Op != OpARM64MOVDconst { 7426 break 7427 } 7428 c := v_2.AuxInt 7429 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7430 break 7431 } 7432 v.reset(OpARM64SUB) 7433 v.AddArg(a) 7434 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7435 v0.AuxInt = log2(c + 1) 7436 v0.AddArg(x) 7437 v0.AddArg(x) 7438 v.AddArg(v0) 7439 return true 7440 } 7441 // match: (MADDW a x (MOVDconst [c])) 7442 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7443 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7444 for { 7445 _ = v.Args[2] 7446 a := v.Args[0] 7447 x := v.Args[1] 7448 v_2 := v.Args[2] 7449 if v_2.Op != OpARM64MOVDconst { 7450 break 7451 } 7452 c := v_2.AuxInt 7453 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7454 break 7455 } 7456 v.reset(OpARM64SUBshiftLL) 7457 v.AuxInt = log2(c / 3) 7458 v.AddArg(a) 7459 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7460 v0.AuxInt = 2 7461 v0.AddArg(x) 7462 v0.AddArg(x) 7463 v.AddArg(v0) 7464 return true 7465 } 7466 // match: (MADDW a x (MOVDconst [c])) 7467 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7468 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7469 for { 7470 _ = v.Args[2] 7471 a := v.Args[0] 7472 x := v.Args[1] 7473 v_2 := v.Args[2] 7474 if v_2.Op != OpARM64MOVDconst { 7475 break 7476 } 7477 c := v_2.AuxInt 7478 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7479 break 7480 } 7481 v.reset(OpARM64ADDshiftLL) 7482 v.AuxInt = log2(c / 5) 7483 v.AddArg(a) 7484 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7485 v0.AuxInt = 2 7486 v0.AddArg(x) 7487 v0.AddArg(x) 7488 v.AddArg(v0) 7489 return true 7490 } 7491 // match: (MADDW a x (MOVDconst [c])) 7492 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 7493 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7494 for { 7495 _ = v.Args[2] 7496 a := v.Args[0] 7497 x := v.Args[1] 7498 v_2 := v.Args[2] 7499 if v_2.Op != OpARM64MOVDconst { 7500 break 7501 } 7502 c := v_2.AuxInt 7503 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 7504 break 7505 } 7506 v.reset(OpARM64SUBshiftLL) 7507 v.AuxInt = log2(c / 7) 7508 v.AddArg(a) 7509 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7510 v0.AuxInt = 3 7511 v0.AddArg(x) 7512 v0.AddArg(x) 7513 v.AddArg(v0) 7514 return true 7515 } 7516 // match: (MADDW a x (MOVDconst [c])) 7517 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 7518 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7519 for { 7520 _ = v.Args[2] 7521 a := v.Args[0] 7522 x := v.Args[1] 7523 v_2 := v.Args[2] 7524 if v_2.Op != OpARM64MOVDconst { 7525 break 7526 } 7527 c := v_2.AuxInt 7528 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 7529 break 7530 } 7531 v.reset(OpARM64ADDshiftLL) 7532 v.AuxInt = log2(c / 9) 7533 v.AddArg(a) 7534 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7535 v0.AuxInt = 3 7536 v0.AddArg(x) 7537 v0.AddArg(x) 7538 v.AddArg(v0) 7539 return true 7540 } 7541 return false 7542 } 7543 func rewriteValueARM64_OpARM64MADDW_10(v *Value) bool { 7544 b := v.Block 7545 _ = b 7546 // match: (MADDW a (MOVDconst [c]) x) 7547 // cond: int32(c)==-1 7548 // result: (SUB a x) 7549 for { 7550 _ = v.Args[2] 7551 a := v.Args[0] 7552 v_1 := v.Args[1] 7553 if v_1.Op != OpARM64MOVDconst { 7554 break 7555 } 7556 c := v_1.AuxInt 7557 x := v.Args[2] 7558 if !(int32(c) == -1) { 7559 break 7560 } 7561 v.reset(OpARM64SUB) 7562 v.AddArg(a) 7563 v.AddArg(x) 7564 return true 7565 } 7566 // match: (MADDW a (MOVDconst [c]) _) 7567 // cond: int32(c)==0 7568 // result: a 7569 for { 7570 _ = v.Args[2] 7571 a := v.Args[0] 7572 v_1 := v.Args[1] 7573 if v_1.Op != OpARM64MOVDconst { 7574 break 7575 } 7576 c := v_1.AuxInt 7577 if !(int32(c) == 0) { 7578 break 7579 } 7580 v.reset(OpCopy) 7581 v.Type = a.Type 7582 v.AddArg(a) 7583 return true 7584 } 7585 // match: (MADDW a (MOVDconst [c]) x) 7586 // cond: int32(c)==1 7587 // result: (ADD a x) 7588 for { 7589 _ = v.Args[2] 7590 a := v.Args[0] 7591 v_1 := v.Args[1] 7592 if v_1.Op != OpARM64MOVDconst { 7593 break 7594 } 7595 c := v_1.AuxInt 7596 x := v.Args[2] 7597 if !(int32(c) == 1) { 7598 break 7599 } 7600 v.reset(OpARM64ADD) 7601 v.AddArg(a) 7602 v.AddArg(x) 7603 return true 7604 } 7605 // match: (MADDW a (MOVDconst [c]) x) 7606 // cond: isPowerOfTwo(c) 7607 // result: (ADDshiftLL a x [log2(c)]) 7608 for { 7609 _ = v.Args[2] 7610 a := v.Args[0] 7611 v_1 := v.Args[1] 7612 if v_1.Op != OpARM64MOVDconst { 7613 break 7614 } 7615 c := v_1.AuxInt 7616 x := v.Args[2] 7617 if !(isPowerOfTwo(c)) { 7618 break 7619 } 7620 v.reset(OpARM64ADDshiftLL) 7621 v.AuxInt = log2(c) 7622 v.AddArg(a) 7623 v.AddArg(x) 7624 return true 7625 } 7626 // match: (MADDW a (MOVDconst [c]) x) 7627 // cond: isPowerOfTwo(c-1) && int32(c)>=3 7628 // result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)])) 7629 for { 7630 _ = v.Args[2] 7631 a := v.Args[0] 7632 v_1 := v.Args[1] 7633 if v_1.Op != OpARM64MOVDconst { 7634 break 7635 } 7636 c := v_1.AuxInt 7637 x := v.Args[2] 7638 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 7639 break 7640 } 7641 v.reset(OpARM64ADD) 7642 v.AddArg(a) 7643 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7644 v0.AuxInt = log2(c - 1) 7645 v0.AddArg(x) 7646 v0.AddArg(x) 7647 v.AddArg(v0) 7648 return true 7649 } 7650 // match: (MADDW a (MOVDconst [c]) x) 7651 // cond: isPowerOfTwo(c+1) && int32(c)>=7 7652 // result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)])) 7653 for { 7654 _ = v.Args[2] 7655 a := v.Args[0] 7656 v_1 := v.Args[1] 7657 if v_1.Op != OpARM64MOVDconst { 7658 break 7659 } 7660 c := v_1.AuxInt 7661 x := v.Args[2] 7662 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 7663 break 7664 } 7665 v.reset(OpARM64SUB) 7666 v.AddArg(a) 7667 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7668 v0.AuxInt = log2(c + 1) 7669 v0.AddArg(x) 7670 v0.AddArg(x) 7671 v.AddArg(v0) 7672 return true 7673 } 7674 // match: (MADDW a (MOVDconst [c]) x) 7675 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 7676 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 7677 for { 7678 _ = v.Args[2] 7679 a := v.Args[0] 7680 v_1 := v.Args[1] 7681 if v_1.Op != OpARM64MOVDconst { 7682 break 7683 } 7684 c := v_1.AuxInt 7685 x := v.Args[2] 7686 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 7687 break 7688 } 7689 v.reset(OpARM64SUBshiftLL) 7690 v.AuxInt = log2(c / 3) 7691 v.AddArg(a) 7692 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7693 v0.AuxInt = 2 7694 v0.AddArg(x) 7695 v0.AddArg(x) 7696 v.AddArg(v0) 7697 return true 7698 } 7699 // match: (MADDW a (MOVDconst [c]) x) 7700 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 7701 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 7702 for { 7703 _ = v.Args[2] 7704 a := v.Args[0] 7705 v_1 := v.Args[1] 7706 if v_1.Op != OpARM64MOVDconst { 7707 break 7708 } 7709 c := v_1.AuxInt 7710 x := v.Args[2] 7711 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 7712 break 7713 } 7714 v.reset(OpARM64ADDshiftLL) 7715 v.AuxInt = log2(c / 5) 7716 v.AddArg(a) 7717 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7718 v0.AuxInt = 2 7719 v0.AddArg(x) 7720 v0.AddArg(x) 7721 v.AddArg(v0) 7722 return true 7723 } 7724 // match: (MADDW a (MOVDconst [c]) x) 7725 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 7726 // result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 7727 for { 7728 _ = v.Args[2] 7729 a := v.Args[0] 7730 v_1 := v.Args[1] 7731 if v_1.Op != OpARM64MOVDconst { 7732 break 7733 } 7734 c := v_1.AuxInt 7735 x := v.Args[2] 7736 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 7737 break 7738 } 7739 v.reset(OpARM64SUBshiftLL) 7740 v.AuxInt = log2(c / 7) 7741 v.AddArg(a) 7742 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 7743 v0.AuxInt = 3 7744 v0.AddArg(x) 7745 v0.AddArg(x) 7746 v.AddArg(v0) 7747 return true 7748 } 7749 // match: (MADDW a (MOVDconst [c]) x) 7750 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 7751 // result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 7752 for { 7753 _ = v.Args[2] 7754 a := v.Args[0] 7755 v_1 := v.Args[1] 7756 if v_1.Op != OpARM64MOVDconst { 7757 break 7758 } 7759 c := v_1.AuxInt 7760 x := v.Args[2] 7761 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 7762 break 7763 } 7764 v.reset(OpARM64ADDshiftLL) 7765 v.AuxInt = log2(c / 9) 7766 v.AddArg(a) 7767 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7768 v0.AuxInt = 3 7769 v0.AddArg(x) 7770 v0.AddArg(x) 7771 v.AddArg(v0) 7772 return true 7773 } 7774 return false 7775 } 7776 func rewriteValueARM64_OpARM64MADDW_20(v *Value) bool { 7777 b := v.Block 7778 _ = b 7779 // match: (MADDW (MOVDconst [c]) x y) 7780 // cond: 7781 // result: (ADDconst [c] (MULW <x.Type> x y)) 7782 for { 7783 _ = v.Args[2] 7784 v_0 := v.Args[0] 7785 if v_0.Op != OpARM64MOVDconst { 7786 break 7787 } 7788 c := v_0.AuxInt 7789 x := v.Args[1] 7790 y := v.Args[2] 7791 v.reset(OpARM64ADDconst) 7792 v.AuxInt = c 7793 v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 7794 v0.AddArg(x) 7795 v0.AddArg(y) 7796 v.AddArg(v0) 7797 return true 7798 } 7799 // match: (MADDW a (MOVDconst [c]) (MOVDconst [d])) 7800 // cond: 7801 // result: (ADDconst [int64(int32(c)*int32(d))] a) 7802 for { 7803 _ = v.Args[2] 7804 a := v.Args[0] 7805 v_1 := v.Args[1] 7806 if v_1.Op != OpARM64MOVDconst { 7807 break 7808 } 7809 c := v_1.AuxInt 7810 v_2 := v.Args[2] 7811 if v_2.Op != OpARM64MOVDconst { 7812 break 7813 } 7814 d := v_2.AuxInt 7815 v.reset(OpARM64ADDconst) 7816 v.AuxInt = int64(int32(c) * int32(d)) 7817 v.AddArg(a) 7818 return true 7819 } 7820 return false 7821 } 7822 func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool { 7823 b := v.Block 7824 _ = b 7825 // match: (MNEG x (MOVDconst [-1])) 7826 // cond: 7827 // result: x 7828 for { 7829 _ = v.Args[1] 7830 x := v.Args[0] 7831 v_1 := v.Args[1] 7832 if v_1.Op != OpARM64MOVDconst { 7833 break 7834 } 7835 if v_1.AuxInt != -1 { 7836 break 7837 } 7838 v.reset(OpCopy) 7839 v.Type = x.Type 7840 v.AddArg(x) 7841 return true 7842 } 7843 // match: (MNEG (MOVDconst [-1]) x) 7844 // cond: 7845 // result: x 7846 for { 7847 _ = v.Args[1] 7848 v_0 := v.Args[0] 7849 if v_0.Op != OpARM64MOVDconst { 7850 break 7851 } 7852 if v_0.AuxInt != -1 { 7853 break 7854 } 7855 x := v.Args[1] 7856 v.reset(OpCopy) 7857 v.Type = x.Type 7858 v.AddArg(x) 7859 return true 7860 } 7861 // match: (MNEG _ (MOVDconst [0])) 7862 // cond: 7863 // result: (MOVDconst [0]) 7864 for { 7865 _ = v.Args[1] 7866 v_1 := v.Args[1] 7867 if v_1.Op != OpARM64MOVDconst { 7868 break 7869 } 7870 if v_1.AuxInt != 0 { 7871 break 7872 } 7873 v.reset(OpARM64MOVDconst) 7874 v.AuxInt = 0 7875 return true 7876 } 7877 // match: (MNEG (MOVDconst [0]) _) 7878 // cond: 7879 // result: (MOVDconst [0]) 7880 for { 7881 _ = v.Args[1] 7882 v_0 := v.Args[0] 7883 if v_0.Op != OpARM64MOVDconst { 7884 break 7885 } 7886 if v_0.AuxInt != 0 { 7887 break 7888 } 7889 v.reset(OpARM64MOVDconst) 7890 v.AuxInt = 0 7891 return true 7892 } 7893 // match: (MNEG x (MOVDconst [1])) 7894 // cond: 7895 // result: (NEG x) 7896 for { 7897 _ = v.Args[1] 7898 x := v.Args[0] 7899 v_1 := v.Args[1] 7900 if v_1.Op != OpARM64MOVDconst { 7901 break 7902 } 7903 if v_1.AuxInt != 1 { 7904 break 7905 } 7906 v.reset(OpARM64NEG) 7907 v.AddArg(x) 7908 return true 7909 } 7910 // match: (MNEG (MOVDconst [1]) x) 7911 // cond: 7912 // result: (NEG x) 7913 for { 7914 _ = v.Args[1] 7915 v_0 := v.Args[0] 7916 if v_0.Op != OpARM64MOVDconst { 7917 break 7918 } 7919 if v_0.AuxInt != 1 { 7920 break 7921 } 7922 x := v.Args[1] 7923 v.reset(OpARM64NEG) 7924 v.AddArg(x) 7925 return true 7926 } 7927 // match: (MNEG x (MOVDconst [c])) 7928 // cond: isPowerOfTwo(c) 7929 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7930 for { 7931 _ = v.Args[1] 7932 x := v.Args[0] 7933 v_1 := v.Args[1] 7934 if v_1.Op != OpARM64MOVDconst { 7935 break 7936 } 7937 c := v_1.AuxInt 7938 if !(isPowerOfTwo(c)) { 7939 break 7940 } 7941 v.reset(OpARM64NEG) 7942 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7943 v0.AuxInt = log2(c) 7944 v0.AddArg(x) 7945 v.AddArg(v0) 7946 return true 7947 } 7948 // match: (MNEG (MOVDconst [c]) x) 7949 // cond: isPowerOfTwo(c) 7950 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 7951 for { 7952 _ = v.Args[1] 7953 v_0 := v.Args[0] 7954 if v_0.Op != OpARM64MOVDconst { 7955 break 7956 } 7957 c := v_0.AuxInt 7958 x := v.Args[1] 7959 if !(isPowerOfTwo(c)) { 7960 break 7961 } 7962 v.reset(OpARM64NEG) 7963 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 7964 v0.AuxInt = log2(c) 7965 v0.AddArg(x) 7966 v.AddArg(v0) 7967 return true 7968 } 7969 // match: (MNEG x (MOVDconst [c])) 7970 // cond: isPowerOfTwo(c-1) && c >= 3 7971 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7972 for { 7973 _ = v.Args[1] 7974 x := v.Args[0] 7975 v_1 := v.Args[1] 7976 if v_1.Op != OpARM64MOVDconst { 7977 break 7978 } 7979 c := v_1.AuxInt 7980 if !(isPowerOfTwo(c-1) && c >= 3) { 7981 break 7982 } 7983 v.reset(OpARM64NEG) 7984 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 7985 v0.AuxInt = log2(c - 1) 7986 v0.AddArg(x) 7987 v0.AddArg(x) 7988 v.AddArg(v0) 7989 return true 7990 } 7991 // match: (MNEG (MOVDconst [c]) x) 7992 // cond: isPowerOfTwo(c-1) && c >= 3 7993 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 7994 for { 7995 _ = v.Args[1] 7996 v_0 := v.Args[0] 7997 if v_0.Op != OpARM64MOVDconst { 7998 break 7999 } 8000 c := v_0.AuxInt 8001 x := v.Args[1] 8002 if !(isPowerOfTwo(c-1) && c >= 3) { 8003 break 8004 } 8005 v.reset(OpARM64NEG) 8006 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8007 v0.AuxInt = log2(c - 1) 8008 v0.AddArg(x) 8009 v0.AddArg(x) 8010 v.AddArg(v0) 8011 return true 8012 } 8013 return false 8014 } 8015 func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool { 8016 b := v.Block 8017 _ = b 8018 // match: (MNEG x (MOVDconst [c])) 8019 // cond: isPowerOfTwo(c+1) && c >= 7 8020 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8021 for { 8022 _ = v.Args[1] 8023 x := v.Args[0] 8024 v_1 := v.Args[1] 8025 if v_1.Op != OpARM64MOVDconst { 8026 break 8027 } 8028 c := v_1.AuxInt 8029 if !(isPowerOfTwo(c+1) && c >= 7) { 8030 break 8031 } 8032 v.reset(OpARM64NEG) 8033 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8034 v0.AuxInt = log2(c + 1) 8035 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8036 v1.AddArg(x) 8037 v0.AddArg(v1) 8038 v0.AddArg(x) 8039 v.AddArg(v0) 8040 return true 8041 } 8042 // match: (MNEG (MOVDconst [c]) x) 8043 // cond: isPowerOfTwo(c+1) && c >= 7 8044 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8045 for { 8046 _ = v.Args[1] 8047 v_0 := v.Args[0] 8048 if v_0.Op != OpARM64MOVDconst { 8049 break 8050 } 8051 c := v_0.AuxInt 8052 x := v.Args[1] 8053 if !(isPowerOfTwo(c+1) && c >= 7) { 8054 break 8055 } 8056 v.reset(OpARM64NEG) 8057 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8058 v0.AuxInt = log2(c + 1) 8059 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8060 v1.AddArg(x) 8061 v0.AddArg(v1) 8062 v0.AddArg(x) 8063 v.AddArg(v0) 8064 return true 8065 } 8066 // match: (MNEG x (MOVDconst [c])) 8067 // cond: c%3 == 0 && isPowerOfTwo(c/3) 8068 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8069 for { 8070 _ = v.Args[1] 8071 x := v.Args[0] 8072 v_1 := v.Args[1] 8073 if v_1.Op != OpARM64MOVDconst { 8074 break 8075 } 8076 c := v_1.AuxInt 8077 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 8078 break 8079 } 8080 v.reset(OpARM64SLLconst) 8081 v.Type = x.Type 8082 v.AuxInt = log2(c / 3) 8083 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8084 v0.AuxInt = 2 8085 v0.AddArg(x) 8086 v0.AddArg(x) 8087 v.AddArg(v0) 8088 return true 8089 } 8090 // match: (MNEG (MOVDconst [c]) x) 8091 // cond: c%3 == 0 && isPowerOfTwo(c/3) 8092 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8093 for { 8094 _ = v.Args[1] 8095 v_0 := v.Args[0] 8096 if v_0.Op != OpARM64MOVDconst { 8097 break 8098 } 8099 c := v_0.AuxInt 8100 x := v.Args[1] 8101 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 8102 break 8103 } 8104 v.reset(OpARM64SLLconst) 8105 v.Type = x.Type 8106 v.AuxInt = log2(c / 3) 8107 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8108 v0.AuxInt = 2 8109 v0.AddArg(x) 8110 v0.AddArg(x) 8111 v.AddArg(v0) 8112 return true 8113 } 8114 // match: (MNEG x (MOVDconst [c])) 8115 // cond: c%5 == 0 && isPowerOfTwo(c/5) 8116 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8117 for { 8118 _ = v.Args[1] 8119 x := v.Args[0] 8120 v_1 := v.Args[1] 8121 if v_1.Op != OpARM64MOVDconst { 8122 break 8123 } 8124 c := v_1.AuxInt 8125 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 8126 break 8127 } 8128 v.reset(OpARM64NEG) 8129 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8130 v0.AuxInt = log2(c / 5) 8131 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8132 v1.AuxInt = 2 8133 v1.AddArg(x) 8134 v1.AddArg(x) 8135 v0.AddArg(v1) 8136 v.AddArg(v0) 8137 return true 8138 } 8139 // match: (MNEG (MOVDconst [c]) x) 8140 // cond: c%5 == 0 && isPowerOfTwo(c/5) 8141 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8142 for { 8143 _ = v.Args[1] 8144 v_0 := v.Args[0] 8145 if v_0.Op != OpARM64MOVDconst { 8146 break 8147 } 8148 c := v_0.AuxInt 8149 x := v.Args[1] 8150 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 8151 break 8152 } 8153 v.reset(OpARM64NEG) 8154 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8155 v0.AuxInt = log2(c / 5) 8156 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8157 v1.AuxInt = 2 8158 v1.AddArg(x) 8159 v1.AddArg(x) 8160 v0.AddArg(v1) 8161 v.AddArg(v0) 8162 return true 8163 } 8164 // match: (MNEG x (MOVDconst [c])) 8165 // cond: c%7 == 0 && isPowerOfTwo(c/7) 8166 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8167 for { 8168 _ = v.Args[1] 8169 x := v.Args[0] 8170 v_1 := v.Args[1] 8171 if v_1.Op != OpARM64MOVDconst { 8172 break 8173 } 8174 c := v_1.AuxInt 8175 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 8176 break 8177 } 8178 v.reset(OpARM64SLLconst) 8179 v.Type = x.Type 8180 v.AuxInt = log2(c / 7) 8181 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8182 v0.AuxInt = 3 8183 v0.AddArg(x) 8184 v0.AddArg(x) 8185 v.AddArg(v0) 8186 return true 8187 } 8188 // match: (MNEG (MOVDconst [c]) x) 8189 // cond: c%7 == 0 && isPowerOfTwo(c/7) 8190 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8191 for { 8192 _ = v.Args[1] 8193 v_0 := v.Args[0] 8194 if v_0.Op != OpARM64MOVDconst { 8195 break 8196 } 8197 c := v_0.AuxInt 8198 x := v.Args[1] 8199 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 8200 break 8201 } 8202 v.reset(OpARM64SLLconst) 8203 v.Type = x.Type 8204 v.AuxInt = log2(c / 7) 8205 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8206 v0.AuxInt = 3 8207 v0.AddArg(x) 8208 v0.AddArg(x) 8209 v.AddArg(v0) 8210 return true 8211 } 8212 // match: (MNEG x (MOVDconst [c])) 8213 // cond: c%9 == 0 && isPowerOfTwo(c/9) 8214 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8215 for { 8216 _ = v.Args[1] 8217 x := v.Args[0] 8218 v_1 := v.Args[1] 8219 if v_1.Op != OpARM64MOVDconst { 8220 break 8221 } 8222 c := v_1.AuxInt 8223 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 8224 break 8225 } 8226 v.reset(OpARM64NEG) 8227 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8228 v0.AuxInt = log2(c / 9) 8229 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8230 v1.AuxInt = 3 8231 v1.AddArg(x) 8232 v1.AddArg(x) 8233 v0.AddArg(v1) 8234 v.AddArg(v0) 8235 return true 8236 } 8237 // match: (MNEG (MOVDconst [c]) x) 8238 // cond: c%9 == 0 && isPowerOfTwo(c/9) 8239 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8240 for { 8241 _ = v.Args[1] 8242 v_0 := v.Args[0] 8243 if v_0.Op != OpARM64MOVDconst { 8244 break 8245 } 8246 c := v_0.AuxInt 8247 x := v.Args[1] 8248 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 8249 break 8250 } 8251 v.reset(OpARM64NEG) 8252 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8253 v0.AuxInt = log2(c / 9) 8254 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8255 v1.AuxInt = 3 8256 v1.AddArg(x) 8257 v1.AddArg(x) 8258 v0.AddArg(v1) 8259 v.AddArg(v0) 8260 return true 8261 } 8262 return false 8263 } 8264 func rewriteValueARM64_OpARM64MNEG_20(v *Value) bool { 8265 // match: (MNEG (MOVDconst [c]) (MOVDconst [d])) 8266 // cond: 8267 // result: (MOVDconst [-c*d]) 8268 for { 8269 _ = v.Args[1] 8270 v_0 := v.Args[0] 8271 if v_0.Op != OpARM64MOVDconst { 8272 break 8273 } 8274 c := v_0.AuxInt 8275 v_1 := v.Args[1] 8276 if v_1.Op != OpARM64MOVDconst { 8277 break 8278 } 8279 d := v_1.AuxInt 8280 v.reset(OpARM64MOVDconst) 8281 v.AuxInt = -c * d 8282 return true 8283 } 8284 // match: (MNEG (MOVDconst [d]) (MOVDconst [c])) 8285 // cond: 8286 // result: (MOVDconst [-c*d]) 8287 for { 8288 _ = v.Args[1] 8289 v_0 := v.Args[0] 8290 if v_0.Op != OpARM64MOVDconst { 8291 break 8292 } 8293 d := v_0.AuxInt 8294 v_1 := v.Args[1] 8295 if v_1.Op != OpARM64MOVDconst { 8296 break 8297 } 8298 c := v_1.AuxInt 8299 v.reset(OpARM64MOVDconst) 8300 v.AuxInt = -c * d 8301 return true 8302 } 8303 return false 8304 } 8305 func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool { 8306 b := v.Block 8307 _ = b 8308 // match: (MNEGW x (MOVDconst [c])) 8309 // cond: int32(c)==-1 8310 // result: x 8311 for { 8312 _ = v.Args[1] 8313 x := v.Args[0] 8314 v_1 := v.Args[1] 8315 if v_1.Op != OpARM64MOVDconst { 8316 break 8317 } 8318 c := v_1.AuxInt 8319 if !(int32(c) == -1) { 8320 break 8321 } 8322 v.reset(OpCopy) 8323 v.Type = x.Type 8324 v.AddArg(x) 8325 return true 8326 } 8327 // match: (MNEGW (MOVDconst [c]) x) 8328 // cond: int32(c)==-1 8329 // result: x 8330 for { 8331 _ = v.Args[1] 8332 v_0 := v.Args[0] 8333 if v_0.Op != OpARM64MOVDconst { 8334 break 8335 } 8336 c := v_0.AuxInt 8337 x := v.Args[1] 8338 if !(int32(c) == -1) { 8339 break 8340 } 8341 v.reset(OpCopy) 8342 v.Type = x.Type 8343 v.AddArg(x) 8344 return true 8345 } 8346 // match: (MNEGW _ (MOVDconst [c])) 8347 // cond: int32(c)==0 8348 // result: (MOVDconst [0]) 8349 for { 8350 _ = v.Args[1] 8351 v_1 := v.Args[1] 8352 if v_1.Op != OpARM64MOVDconst { 8353 break 8354 } 8355 c := v_1.AuxInt 8356 if !(int32(c) == 0) { 8357 break 8358 } 8359 v.reset(OpARM64MOVDconst) 8360 v.AuxInt = 0 8361 return true 8362 } 8363 // match: (MNEGW (MOVDconst [c]) _) 8364 // cond: int32(c)==0 8365 // result: (MOVDconst [0]) 8366 for { 8367 _ = v.Args[1] 8368 v_0 := v.Args[0] 8369 if v_0.Op != OpARM64MOVDconst { 8370 break 8371 } 8372 c := v_0.AuxInt 8373 if !(int32(c) == 0) { 8374 break 8375 } 8376 v.reset(OpARM64MOVDconst) 8377 v.AuxInt = 0 8378 return true 8379 } 8380 // match: (MNEGW x (MOVDconst [c])) 8381 // cond: int32(c)==1 8382 // result: (NEG x) 8383 for { 8384 _ = v.Args[1] 8385 x := v.Args[0] 8386 v_1 := v.Args[1] 8387 if v_1.Op != OpARM64MOVDconst { 8388 break 8389 } 8390 c := v_1.AuxInt 8391 if !(int32(c) == 1) { 8392 break 8393 } 8394 v.reset(OpARM64NEG) 8395 v.AddArg(x) 8396 return true 8397 } 8398 // match: (MNEGW (MOVDconst [c]) x) 8399 // cond: int32(c)==1 8400 // result: (NEG x) 8401 for { 8402 _ = v.Args[1] 8403 v_0 := v.Args[0] 8404 if v_0.Op != OpARM64MOVDconst { 8405 break 8406 } 8407 c := v_0.AuxInt 8408 x := v.Args[1] 8409 if !(int32(c) == 1) { 8410 break 8411 } 8412 v.reset(OpARM64NEG) 8413 v.AddArg(x) 8414 return true 8415 } 8416 // match: (MNEGW x (MOVDconst [c])) 8417 // cond: isPowerOfTwo(c) 8418 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 8419 for { 8420 _ = v.Args[1] 8421 x := v.Args[0] 8422 v_1 := v.Args[1] 8423 if v_1.Op != OpARM64MOVDconst { 8424 break 8425 } 8426 c := v_1.AuxInt 8427 if !(isPowerOfTwo(c)) { 8428 break 8429 } 8430 v.reset(OpARM64NEG) 8431 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8432 v0.AuxInt = log2(c) 8433 v0.AddArg(x) 8434 v.AddArg(v0) 8435 return true 8436 } 8437 // match: (MNEGW (MOVDconst [c]) x) 8438 // cond: isPowerOfTwo(c) 8439 // result: (NEG (SLLconst <x.Type> [log2(c)] x)) 8440 for { 8441 _ = v.Args[1] 8442 v_0 := v.Args[0] 8443 if v_0.Op != OpARM64MOVDconst { 8444 break 8445 } 8446 c := v_0.AuxInt 8447 x := v.Args[1] 8448 if !(isPowerOfTwo(c)) { 8449 break 8450 } 8451 v.reset(OpARM64NEG) 8452 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8453 v0.AuxInt = log2(c) 8454 v0.AddArg(x) 8455 v.AddArg(v0) 8456 return true 8457 } 8458 // match: (MNEGW x (MOVDconst [c])) 8459 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 8460 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 8461 for { 8462 _ = v.Args[1] 8463 x := v.Args[0] 8464 v_1 := v.Args[1] 8465 if v_1.Op != OpARM64MOVDconst { 8466 break 8467 } 8468 c := v_1.AuxInt 8469 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 8470 break 8471 } 8472 v.reset(OpARM64NEG) 8473 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8474 v0.AuxInt = log2(c - 1) 8475 v0.AddArg(x) 8476 v0.AddArg(x) 8477 v.AddArg(v0) 8478 return true 8479 } 8480 // match: (MNEGW (MOVDconst [c]) x) 8481 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 8482 // result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)])) 8483 for { 8484 _ = v.Args[1] 8485 v_0 := v.Args[0] 8486 if v_0.Op != OpARM64MOVDconst { 8487 break 8488 } 8489 c := v_0.AuxInt 8490 x := v.Args[1] 8491 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 8492 break 8493 } 8494 v.reset(OpARM64NEG) 8495 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8496 v0.AuxInt = log2(c - 1) 8497 v0.AddArg(x) 8498 v0.AddArg(x) 8499 v.AddArg(v0) 8500 return true 8501 } 8502 return false 8503 } 8504 func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool { 8505 b := v.Block 8506 _ = b 8507 // match: (MNEGW x (MOVDconst [c])) 8508 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 8509 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8510 for { 8511 _ = v.Args[1] 8512 x := v.Args[0] 8513 v_1 := v.Args[1] 8514 if v_1.Op != OpARM64MOVDconst { 8515 break 8516 } 8517 c := v_1.AuxInt 8518 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 8519 break 8520 } 8521 v.reset(OpARM64NEG) 8522 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8523 v0.AuxInt = log2(c + 1) 8524 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8525 v1.AddArg(x) 8526 v0.AddArg(v1) 8527 v0.AddArg(x) 8528 v.AddArg(v0) 8529 return true 8530 } 8531 // match: (MNEGW (MOVDconst [c]) x) 8532 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 8533 // result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)])) 8534 for { 8535 _ = v.Args[1] 8536 v_0 := v.Args[0] 8537 if v_0.Op != OpARM64MOVDconst { 8538 break 8539 } 8540 c := v_0.AuxInt 8541 x := v.Args[1] 8542 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 8543 break 8544 } 8545 v.reset(OpARM64NEG) 8546 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8547 v0.AuxInt = log2(c + 1) 8548 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 8549 v1.AddArg(x) 8550 v0.AddArg(v1) 8551 v0.AddArg(x) 8552 v.AddArg(v0) 8553 return true 8554 } 8555 // match: (MNEGW x (MOVDconst [c])) 8556 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 8557 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8558 for { 8559 _ = v.Args[1] 8560 x := v.Args[0] 8561 v_1 := v.Args[1] 8562 if v_1.Op != OpARM64MOVDconst { 8563 break 8564 } 8565 c := v_1.AuxInt 8566 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 8567 break 8568 } 8569 v.reset(OpARM64SLLconst) 8570 v.Type = x.Type 8571 v.AuxInt = log2(c / 3) 8572 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8573 v0.AuxInt = 2 8574 v0.AddArg(x) 8575 v0.AddArg(x) 8576 v.AddArg(v0) 8577 return true 8578 } 8579 // match: (MNEGW (MOVDconst [c]) x) 8580 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 8581 // result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2])) 8582 for { 8583 _ = v.Args[1] 8584 v_0 := v.Args[0] 8585 if v_0.Op != OpARM64MOVDconst { 8586 break 8587 } 8588 c := v_0.AuxInt 8589 x := v.Args[1] 8590 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 8591 break 8592 } 8593 v.reset(OpARM64SLLconst) 8594 v.Type = x.Type 8595 v.AuxInt = log2(c / 3) 8596 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8597 v0.AuxInt = 2 8598 v0.AddArg(x) 8599 v0.AddArg(x) 8600 v.AddArg(v0) 8601 return true 8602 } 8603 // match: (MNEGW x (MOVDconst [c])) 8604 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 8605 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8606 for { 8607 _ = v.Args[1] 8608 x := v.Args[0] 8609 v_1 := v.Args[1] 8610 if v_1.Op != OpARM64MOVDconst { 8611 break 8612 } 8613 c := v_1.AuxInt 8614 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 8615 break 8616 } 8617 v.reset(OpARM64NEG) 8618 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8619 v0.AuxInt = log2(c / 5) 8620 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8621 v1.AuxInt = 2 8622 v1.AddArg(x) 8623 v1.AddArg(x) 8624 v0.AddArg(v1) 8625 v.AddArg(v0) 8626 return true 8627 } 8628 // match: (MNEGW (MOVDconst [c]) x) 8629 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 8630 // result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2]))) 8631 for { 8632 _ = v.Args[1] 8633 v_0 := v.Args[0] 8634 if v_0.Op != OpARM64MOVDconst { 8635 break 8636 } 8637 c := v_0.AuxInt 8638 x := v.Args[1] 8639 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 8640 break 8641 } 8642 v.reset(OpARM64NEG) 8643 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8644 v0.AuxInt = log2(c / 5) 8645 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8646 v1.AuxInt = 2 8647 v1.AddArg(x) 8648 v1.AddArg(x) 8649 v0.AddArg(v1) 8650 v.AddArg(v0) 8651 return true 8652 } 8653 // match: (MNEGW x (MOVDconst [c])) 8654 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8655 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8656 for { 8657 _ = v.Args[1] 8658 x := v.Args[0] 8659 v_1 := v.Args[1] 8660 if v_1.Op != OpARM64MOVDconst { 8661 break 8662 } 8663 c := v_1.AuxInt 8664 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8665 break 8666 } 8667 v.reset(OpARM64SLLconst) 8668 v.Type = x.Type 8669 v.AuxInt = log2(c / 7) 8670 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8671 v0.AuxInt = 3 8672 v0.AddArg(x) 8673 v0.AddArg(x) 8674 v.AddArg(v0) 8675 return true 8676 } 8677 // match: (MNEGW (MOVDconst [c]) x) 8678 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 8679 // result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3])) 8680 for { 8681 _ = v.Args[1] 8682 v_0 := v.Args[0] 8683 if v_0.Op != OpARM64MOVDconst { 8684 break 8685 } 8686 c := v_0.AuxInt 8687 x := v.Args[1] 8688 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 8689 break 8690 } 8691 v.reset(OpARM64SLLconst) 8692 v.Type = x.Type 8693 v.AuxInt = log2(c / 7) 8694 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 8695 v0.AuxInt = 3 8696 v0.AddArg(x) 8697 v0.AddArg(x) 8698 v.AddArg(v0) 8699 return true 8700 } 8701 // match: (MNEGW x (MOVDconst [c])) 8702 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8703 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8704 for { 8705 _ = v.Args[1] 8706 x := v.Args[0] 8707 v_1 := v.Args[1] 8708 if v_1.Op != OpARM64MOVDconst { 8709 break 8710 } 8711 c := v_1.AuxInt 8712 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8713 break 8714 } 8715 v.reset(OpARM64NEG) 8716 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8717 v0.AuxInt = log2(c / 9) 8718 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8719 v1.AuxInt = 3 8720 v1.AddArg(x) 8721 v1.AddArg(x) 8722 v0.AddArg(v1) 8723 v.AddArg(v0) 8724 return true 8725 } 8726 // match: (MNEGW (MOVDconst [c]) x) 8727 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 8728 // result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3]))) 8729 for { 8730 _ = v.Args[1] 8731 v_0 := v.Args[0] 8732 if v_0.Op != OpARM64MOVDconst { 8733 break 8734 } 8735 c := v_0.AuxInt 8736 x := v.Args[1] 8737 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 8738 break 8739 } 8740 v.reset(OpARM64NEG) 8741 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 8742 v0.AuxInt = log2(c / 9) 8743 v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 8744 v1.AuxInt = 3 8745 v1.AddArg(x) 8746 v1.AddArg(x) 8747 v0.AddArg(v1) 8748 v.AddArg(v0) 8749 return true 8750 } 8751 return false 8752 } 8753 func rewriteValueARM64_OpARM64MNEGW_20(v *Value) bool { 8754 // match: (MNEGW (MOVDconst [c]) (MOVDconst [d])) 8755 // cond: 8756 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 8757 for { 8758 _ = v.Args[1] 8759 v_0 := v.Args[0] 8760 if v_0.Op != OpARM64MOVDconst { 8761 break 8762 } 8763 c := v_0.AuxInt 8764 v_1 := v.Args[1] 8765 if v_1.Op != OpARM64MOVDconst { 8766 break 8767 } 8768 d := v_1.AuxInt 8769 v.reset(OpARM64MOVDconst) 8770 v.AuxInt = -int64(int32(c) * int32(d)) 8771 return true 8772 } 8773 // match: (MNEGW (MOVDconst [d]) (MOVDconst [c])) 8774 // cond: 8775 // result: (MOVDconst [-int64(int32(c)*int32(d))]) 8776 for { 8777 _ = v.Args[1] 8778 v_0 := v.Args[0] 8779 if v_0.Op != OpARM64MOVDconst { 8780 break 8781 } 8782 d := v_0.AuxInt 8783 v_1 := v.Args[1] 8784 if v_1.Op != OpARM64MOVDconst { 8785 break 8786 } 8787 c := v_1.AuxInt 8788 v.reset(OpARM64MOVDconst) 8789 v.AuxInt = -int64(int32(c) * int32(d)) 8790 return true 8791 } 8792 return false 8793 } 8794 func rewriteValueARM64_OpARM64MOD_0(v *Value) bool { 8795 // match: (MOD (MOVDconst [c]) (MOVDconst [d])) 8796 // cond: 8797 // result: (MOVDconst [c%d]) 8798 for { 8799 _ = v.Args[1] 8800 v_0 := v.Args[0] 8801 if v_0.Op != OpARM64MOVDconst { 8802 break 8803 } 8804 c := v_0.AuxInt 8805 v_1 := v.Args[1] 8806 if v_1.Op != OpARM64MOVDconst { 8807 break 8808 } 8809 d := v_1.AuxInt 8810 v.reset(OpARM64MOVDconst) 8811 v.AuxInt = c % d 8812 return true 8813 } 8814 return false 8815 } 8816 func rewriteValueARM64_OpARM64MODW_0(v *Value) bool { 8817 // match: (MODW (MOVDconst [c]) (MOVDconst [d])) 8818 // cond: 8819 // result: (MOVDconst [int64(int32(c)%int32(d))]) 8820 for { 8821 _ = v.Args[1] 8822 v_0 := v.Args[0] 8823 if v_0.Op != OpARM64MOVDconst { 8824 break 8825 } 8826 c := v_0.AuxInt 8827 v_1 := v.Args[1] 8828 if v_1.Op != OpARM64MOVDconst { 8829 break 8830 } 8831 d := v_1.AuxInt 8832 v.reset(OpARM64MOVDconst) 8833 v.AuxInt = int64(int32(c) % int32(d)) 8834 return true 8835 } 8836 return false 8837 } 8838 func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool { 8839 b := v.Block 8840 _ = b 8841 config := b.Func.Config 8842 _ = config 8843 // match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem) 8844 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8845 // result: (MOVBUload [off1+off2] {sym} ptr mem) 8846 for { 8847 off1 := v.AuxInt 8848 sym := v.Aux 8849 _ = v.Args[1] 8850 v_0 := v.Args[0] 8851 if v_0.Op != OpARM64ADDconst { 8852 break 8853 } 8854 off2 := v_0.AuxInt 8855 ptr := v_0.Args[0] 8856 mem := v.Args[1] 8857 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8858 break 8859 } 8860 v.reset(OpARM64MOVBUload) 8861 v.AuxInt = off1 + off2 8862 v.Aux = sym 8863 v.AddArg(ptr) 8864 v.AddArg(mem) 8865 return true 8866 } 8867 // match: (MOVBUload [off] {sym} (ADD ptr idx) mem) 8868 // cond: off == 0 && sym == nil 8869 // result: (MOVBUloadidx ptr idx mem) 8870 for { 8871 off := v.AuxInt 8872 sym := v.Aux 8873 _ = v.Args[1] 8874 v_0 := v.Args[0] 8875 if v_0.Op != OpARM64ADD { 8876 break 8877 } 8878 _ = v_0.Args[1] 8879 ptr := v_0.Args[0] 8880 idx := v_0.Args[1] 8881 mem := v.Args[1] 8882 if !(off == 0 && sym == nil) { 8883 break 8884 } 8885 v.reset(OpARM64MOVBUloadidx) 8886 v.AddArg(ptr) 8887 v.AddArg(idx) 8888 v.AddArg(mem) 8889 return true 8890 } 8891 // match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 8892 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 8893 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 8894 for { 8895 off1 := v.AuxInt 8896 sym1 := v.Aux 8897 _ = v.Args[1] 8898 v_0 := v.Args[0] 8899 if v_0.Op != OpARM64MOVDaddr { 8900 break 8901 } 8902 off2 := v_0.AuxInt 8903 sym2 := v_0.Aux 8904 ptr := v_0.Args[0] 8905 mem := v.Args[1] 8906 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 8907 break 8908 } 8909 v.reset(OpARM64MOVBUload) 8910 v.AuxInt = off1 + off2 8911 v.Aux = mergeSym(sym1, sym2) 8912 v.AddArg(ptr) 8913 v.AddArg(mem) 8914 return true 8915 } 8916 // match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 8917 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 8918 // result: (MOVDconst [0]) 8919 for { 8920 off := v.AuxInt 8921 sym := v.Aux 8922 _ = v.Args[1] 8923 ptr := v.Args[0] 8924 v_1 := v.Args[1] 8925 if v_1.Op != OpARM64MOVBstorezero { 8926 break 8927 } 8928 off2 := v_1.AuxInt 8929 sym2 := v_1.Aux 8930 _ = v_1.Args[1] 8931 ptr2 := v_1.Args[0] 8932 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 8933 break 8934 } 8935 v.reset(OpARM64MOVDconst) 8936 v.AuxInt = 0 8937 return true 8938 } 8939 // match: (MOVBUload [off] {sym} (SB) _) 8940 // cond: symIsRO(sym) 8941 // result: (MOVDconst [int64(read8(sym, off))]) 8942 for { 8943 off := v.AuxInt 8944 sym := v.Aux 8945 _ = v.Args[1] 8946 v_0 := v.Args[0] 8947 if v_0.Op != OpSB { 8948 break 8949 } 8950 if !(symIsRO(sym)) { 8951 break 8952 } 8953 v.reset(OpARM64MOVDconst) 8954 v.AuxInt = int64(read8(sym, off)) 8955 return true 8956 } 8957 return false 8958 } 8959 func rewriteValueARM64_OpARM64MOVBUloadidx_0(v *Value) bool { 8960 // match: (MOVBUloadidx ptr (MOVDconst [c]) mem) 8961 // cond: 8962 // result: (MOVBUload [c] ptr mem) 8963 for { 8964 _ = v.Args[2] 8965 ptr := v.Args[0] 8966 v_1 := v.Args[1] 8967 if v_1.Op != OpARM64MOVDconst { 8968 break 8969 } 8970 c := v_1.AuxInt 8971 mem := v.Args[2] 8972 v.reset(OpARM64MOVBUload) 8973 v.AuxInt = c 8974 v.AddArg(ptr) 8975 v.AddArg(mem) 8976 return true 8977 } 8978 // match: (MOVBUloadidx (MOVDconst [c]) ptr mem) 8979 // cond: 8980 // result: (MOVBUload [c] ptr mem) 8981 for { 8982 _ = v.Args[2] 8983 v_0 := v.Args[0] 8984 if v_0.Op != OpARM64MOVDconst { 8985 break 8986 } 8987 c := v_0.AuxInt 8988 ptr := v.Args[1] 8989 mem := v.Args[2] 8990 v.reset(OpARM64MOVBUload) 8991 v.AuxInt = c 8992 v.AddArg(ptr) 8993 v.AddArg(mem) 8994 return true 8995 } 8996 // match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 8997 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 8998 // result: (MOVDconst [0]) 8999 for { 9000 _ = v.Args[2] 9001 ptr := v.Args[0] 9002 idx := v.Args[1] 9003 v_2 := v.Args[2] 9004 if v_2.Op != OpARM64MOVBstorezeroidx { 9005 break 9006 } 9007 _ = v_2.Args[2] 9008 ptr2 := v_2.Args[0] 9009 idx2 := v_2.Args[1] 9010 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 9011 break 9012 } 9013 v.reset(OpARM64MOVDconst) 9014 v.AuxInt = 0 9015 return true 9016 } 9017 return false 9018 } 9019 func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool { 9020 // match: (MOVBUreg x:(MOVBUload _ _)) 9021 // cond: 9022 // result: (MOVDreg x) 9023 for { 9024 x := v.Args[0] 9025 if x.Op != OpARM64MOVBUload { 9026 break 9027 } 9028 _ = x.Args[1] 9029 v.reset(OpARM64MOVDreg) 9030 v.AddArg(x) 9031 return true 9032 } 9033 // match: (MOVBUreg x:(MOVBUloadidx _ _ _)) 9034 // cond: 9035 // result: (MOVDreg x) 9036 for { 9037 x := v.Args[0] 9038 if x.Op != OpARM64MOVBUloadidx { 9039 break 9040 } 9041 _ = x.Args[2] 9042 v.reset(OpARM64MOVDreg) 9043 v.AddArg(x) 9044 return true 9045 } 9046 // match: (MOVBUreg x:(MOVBUreg _)) 9047 // cond: 9048 // result: (MOVDreg x) 9049 for { 9050 x := v.Args[0] 9051 if x.Op != OpARM64MOVBUreg { 9052 break 9053 } 9054 v.reset(OpARM64MOVDreg) 9055 v.AddArg(x) 9056 return true 9057 } 9058 // match: (MOVBUreg (ANDconst [c] x)) 9059 // cond: 9060 // result: (ANDconst [c&(1<<8-1)] x) 9061 for { 9062 v_0 := v.Args[0] 9063 if v_0.Op != OpARM64ANDconst { 9064 break 9065 } 9066 c := v_0.AuxInt 9067 x := v_0.Args[0] 9068 v.reset(OpARM64ANDconst) 9069 v.AuxInt = c & (1<<8 - 1) 9070 v.AddArg(x) 9071 return true 9072 } 9073 // match: (MOVBUreg (MOVDconst [c])) 9074 // cond: 9075 // result: (MOVDconst [int64(uint8(c))]) 9076 for { 9077 v_0 := v.Args[0] 9078 if v_0.Op != OpARM64MOVDconst { 9079 break 9080 } 9081 c := v_0.AuxInt 9082 v.reset(OpARM64MOVDconst) 9083 v.AuxInt = int64(uint8(c)) 9084 return true 9085 } 9086 // match: (MOVBUreg x) 9087 // cond: x.Type.IsBoolean() 9088 // result: (MOVDreg x) 9089 for { 9090 x := v.Args[0] 9091 if !(x.Type.IsBoolean()) { 9092 break 9093 } 9094 v.reset(OpARM64MOVDreg) 9095 v.AddArg(x) 9096 return true 9097 } 9098 // match: (MOVBUreg (SLLconst [sc] x)) 9099 // cond: isARM64BFMask(sc, 1<<8-1, sc) 9100 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x) 9101 for { 9102 v_0 := v.Args[0] 9103 if v_0.Op != OpARM64SLLconst { 9104 break 9105 } 9106 sc := v_0.AuxInt 9107 x := v_0.Args[0] 9108 if !(isARM64BFMask(sc, 1<<8-1, sc)) { 9109 break 9110 } 9111 v.reset(OpARM64UBFIZ) 9112 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc)) 9113 v.AddArg(x) 9114 return true 9115 } 9116 // match: (MOVBUreg (SRLconst [sc] x)) 9117 // cond: isARM64BFMask(sc, 1<<8-1, 0) 9118 // result: (UBFX [arm64BFAuxInt(sc, 8)] x) 9119 for { 9120 v_0 := v.Args[0] 9121 if v_0.Op != OpARM64SRLconst { 9122 break 9123 } 9124 sc := v_0.AuxInt 9125 x := v_0.Args[0] 9126 if !(isARM64BFMask(sc, 1<<8-1, 0)) { 9127 break 9128 } 9129 v.reset(OpARM64UBFX) 9130 v.AuxInt = arm64BFAuxInt(sc, 8) 9131 v.AddArg(x) 9132 return true 9133 } 9134 return false 9135 } 9136 func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool { 9137 b := v.Block 9138 _ = b 9139 config := b.Func.Config 9140 _ = config 9141 // match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem) 9142 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9143 // result: (MOVBload [off1+off2] {sym} ptr mem) 9144 for { 9145 off1 := v.AuxInt 9146 sym := v.Aux 9147 _ = v.Args[1] 9148 v_0 := v.Args[0] 9149 if v_0.Op != OpARM64ADDconst { 9150 break 9151 } 9152 off2 := v_0.AuxInt 9153 ptr := v_0.Args[0] 9154 mem := v.Args[1] 9155 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9156 break 9157 } 9158 v.reset(OpARM64MOVBload) 9159 v.AuxInt = off1 + off2 9160 v.Aux = sym 9161 v.AddArg(ptr) 9162 v.AddArg(mem) 9163 return true 9164 } 9165 // match: (MOVBload [off] {sym} (ADD ptr idx) mem) 9166 // cond: off == 0 && sym == nil 9167 // result: (MOVBloadidx ptr idx mem) 9168 for { 9169 off := v.AuxInt 9170 sym := v.Aux 9171 _ = v.Args[1] 9172 v_0 := v.Args[0] 9173 if v_0.Op != OpARM64ADD { 9174 break 9175 } 9176 _ = v_0.Args[1] 9177 ptr := v_0.Args[0] 9178 idx := v_0.Args[1] 9179 mem := v.Args[1] 9180 if !(off == 0 && sym == nil) { 9181 break 9182 } 9183 v.reset(OpARM64MOVBloadidx) 9184 v.AddArg(ptr) 9185 v.AddArg(idx) 9186 v.AddArg(mem) 9187 return true 9188 } 9189 // match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 9190 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9191 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 9192 for { 9193 off1 := v.AuxInt 9194 sym1 := v.Aux 9195 _ = v.Args[1] 9196 v_0 := v.Args[0] 9197 if v_0.Op != OpARM64MOVDaddr { 9198 break 9199 } 9200 off2 := v_0.AuxInt 9201 sym2 := v_0.Aux 9202 ptr := v_0.Args[0] 9203 mem := v.Args[1] 9204 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9205 break 9206 } 9207 v.reset(OpARM64MOVBload) 9208 v.AuxInt = off1 + off2 9209 v.Aux = mergeSym(sym1, sym2) 9210 v.AddArg(ptr) 9211 v.AddArg(mem) 9212 return true 9213 } 9214 // match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _)) 9215 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 9216 // result: (MOVDconst [0]) 9217 for { 9218 off := v.AuxInt 9219 sym := v.Aux 9220 _ = v.Args[1] 9221 ptr := v.Args[0] 9222 v_1 := v.Args[1] 9223 if v_1.Op != OpARM64MOVBstorezero { 9224 break 9225 } 9226 off2 := v_1.AuxInt 9227 sym2 := v_1.Aux 9228 _ = v_1.Args[1] 9229 ptr2 := v_1.Args[0] 9230 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 9231 break 9232 } 9233 v.reset(OpARM64MOVDconst) 9234 v.AuxInt = 0 9235 return true 9236 } 9237 return false 9238 } 9239 func rewriteValueARM64_OpARM64MOVBloadidx_0(v *Value) bool { 9240 // match: (MOVBloadidx ptr (MOVDconst [c]) mem) 9241 // cond: 9242 // result: (MOVBload [c] ptr mem) 9243 for { 9244 _ = v.Args[2] 9245 ptr := v.Args[0] 9246 v_1 := v.Args[1] 9247 if v_1.Op != OpARM64MOVDconst { 9248 break 9249 } 9250 c := v_1.AuxInt 9251 mem := v.Args[2] 9252 v.reset(OpARM64MOVBload) 9253 v.AuxInt = c 9254 v.AddArg(ptr) 9255 v.AddArg(mem) 9256 return true 9257 } 9258 // match: (MOVBloadidx (MOVDconst [c]) ptr mem) 9259 // cond: 9260 // result: (MOVBload [c] ptr mem) 9261 for { 9262 _ = v.Args[2] 9263 v_0 := v.Args[0] 9264 if v_0.Op != OpARM64MOVDconst { 9265 break 9266 } 9267 c := v_0.AuxInt 9268 ptr := v.Args[1] 9269 mem := v.Args[2] 9270 v.reset(OpARM64MOVBload) 9271 v.AuxInt = c 9272 v.AddArg(ptr) 9273 v.AddArg(mem) 9274 return true 9275 } 9276 // match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _)) 9277 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 9278 // result: (MOVDconst [0]) 9279 for { 9280 _ = v.Args[2] 9281 ptr := v.Args[0] 9282 idx := v.Args[1] 9283 v_2 := v.Args[2] 9284 if v_2.Op != OpARM64MOVBstorezeroidx { 9285 break 9286 } 9287 _ = v_2.Args[2] 9288 ptr2 := v_2.Args[0] 9289 idx2 := v_2.Args[1] 9290 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 9291 break 9292 } 9293 v.reset(OpARM64MOVDconst) 9294 v.AuxInt = 0 9295 return true 9296 } 9297 return false 9298 } 9299 func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool { 9300 // match: (MOVBreg x:(MOVBload _ _)) 9301 // cond: 9302 // result: (MOVDreg x) 9303 for { 9304 x := v.Args[0] 9305 if x.Op != OpARM64MOVBload { 9306 break 9307 } 9308 _ = x.Args[1] 9309 v.reset(OpARM64MOVDreg) 9310 v.AddArg(x) 9311 return true 9312 } 9313 // match: (MOVBreg x:(MOVBloadidx _ _ _)) 9314 // cond: 9315 // result: (MOVDreg x) 9316 for { 9317 x := v.Args[0] 9318 if x.Op != OpARM64MOVBloadidx { 9319 break 9320 } 9321 _ = x.Args[2] 9322 v.reset(OpARM64MOVDreg) 9323 v.AddArg(x) 9324 return true 9325 } 9326 // match: (MOVBreg x:(MOVBreg _)) 9327 // cond: 9328 // result: (MOVDreg x) 9329 for { 9330 x := v.Args[0] 9331 if x.Op != OpARM64MOVBreg { 9332 break 9333 } 9334 v.reset(OpARM64MOVDreg) 9335 v.AddArg(x) 9336 return true 9337 } 9338 // match: (MOVBreg (MOVDconst [c])) 9339 // cond: 9340 // result: (MOVDconst [int64(int8(c))]) 9341 for { 9342 v_0 := v.Args[0] 9343 if v_0.Op != OpARM64MOVDconst { 9344 break 9345 } 9346 c := v_0.AuxInt 9347 v.reset(OpARM64MOVDconst) 9348 v.AuxInt = int64(int8(c)) 9349 return true 9350 } 9351 // match: (MOVBreg (SLLconst [lc] x)) 9352 // cond: lc < 8 9353 // result: (SBFIZ [arm64BFAuxInt(lc, 8-lc)] x) 9354 for { 9355 v_0 := v.Args[0] 9356 if v_0.Op != OpARM64SLLconst { 9357 break 9358 } 9359 lc := v_0.AuxInt 9360 x := v_0.Args[0] 9361 if !(lc < 8) { 9362 break 9363 } 9364 v.reset(OpARM64SBFIZ) 9365 v.AuxInt = arm64BFAuxInt(lc, 8-lc) 9366 v.AddArg(x) 9367 return true 9368 } 9369 return false 9370 } 9371 func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool { 9372 b := v.Block 9373 _ = b 9374 config := b.Func.Config 9375 _ = config 9376 // match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem) 9377 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9378 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 9379 for { 9380 off1 := v.AuxInt 9381 sym := v.Aux 9382 _ = v.Args[2] 9383 v_0 := v.Args[0] 9384 if v_0.Op != OpARM64ADDconst { 9385 break 9386 } 9387 off2 := v_0.AuxInt 9388 ptr := v_0.Args[0] 9389 val := v.Args[1] 9390 mem := v.Args[2] 9391 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9392 break 9393 } 9394 v.reset(OpARM64MOVBstore) 9395 v.AuxInt = off1 + off2 9396 v.Aux = sym 9397 v.AddArg(ptr) 9398 v.AddArg(val) 9399 v.AddArg(mem) 9400 return true 9401 } 9402 // match: (MOVBstore [off] {sym} (ADD ptr idx) val mem) 9403 // cond: off == 0 && sym == nil 9404 // result: (MOVBstoreidx ptr idx val mem) 9405 for { 9406 off := v.AuxInt 9407 sym := v.Aux 9408 _ = v.Args[2] 9409 v_0 := v.Args[0] 9410 if v_0.Op != OpARM64ADD { 9411 break 9412 } 9413 _ = v_0.Args[1] 9414 ptr := v_0.Args[0] 9415 idx := v_0.Args[1] 9416 val := v.Args[1] 9417 mem := v.Args[2] 9418 if !(off == 0 && sym == nil) { 9419 break 9420 } 9421 v.reset(OpARM64MOVBstoreidx) 9422 v.AddArg(ptr) 9423 v.AddArg(idx) 9424 v.AddArg(val) 9425 v.AddArg(mem) 9426 return true 9427 } 9428 // match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 9429 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 9430 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 9431 for { 9432 off1 := v.AuxInt 9433 sym1 := v.Aux 9434 _ = v.Args[2] 9435 v_0 := v.Args[0] 9436 if v_0.Op != OpARM64MOVDaddr { 9437 break 9438 } 9439 off2 := v_0.AuxInt 9440 sym2 := v_0.Aux 9441 ptr := v_0.Args[0] 9442 val := v.Args[1] 9443 mem := v.Args[2] 9444 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 9445 break 9446 } 9447 v.reset(OpARM64MOVBstore) 9448 v.AuxInt = off1 + off2 9449 v.Aux = mergeSym(sym1, sym2) 9450 v.AddArg(ptr) 9451 v.AddArg(val) 9452 v.AddArg(mem) 9453 return true 9454 } 9455 // match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem) 9456 // cond: 9457 // result: (MOVBstorezero [off] {sym} ptr mem) 9458 for { 9459 off := v.AuxInt 9460 sym := v.Aux 9461 _ = v.Args[2] 9462 ptr := v.Args[0] 9463 v_1 := v.Args[1] 9464 if v_1.Op != OpARM64MOVDconst { 9465 break 9466 } 9467 if v_1.AuxInt != 0 { 9468 break 9469 } 9470 mem := v.Args[2] 9471 v.reset(OpARM64MOVBstorezero) 9472 v.AuxInt = off 9473 v.Aux = sym 9474 v.AddArg(ptr) 9475 v.AddArg(mem) 9476 return true 9477 } 9478 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 9479 // cond: 9480 // result: (MOVBstore [off] {sym} ptr x mem) 9481 for { 9482 off := v.AuxInt 9483 sym := v.Aux 9484 _ = v.Args[2] 9485 ptr := v.Args[0] 9486 v_1 := v.Args[1] 9487 if v_1.Op != OpARM64MOVBreg { 9488 break 9489 } 9490 x := v_1.Args[0] 9491 mem := v.Args[2] 9492 v.reset(OpARM64MOVBstore) 9493 v.AuxInt = off 9494 v.Aux = sym 9495 v.AddArg(ptr) 9496 v.AddArg(x) 9497 v.AddArg(mem) 9498 return true 9499 } 9500 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 9501 // cond: 9502 // result: (MOVBstore [off] {sym} ptr x mem) 9503 for { 9504 off := v.AuxInt 9505 sym := v.Aux 9506 _ = v.Args[2] 9507 ptr := v.Args[0] 9508 v_1 := v.Args[1] 9509 if v_1.Op != OpARM64MOVBUreg { 9510 break 9511 } 9512 x := v_1.Args[0] 9513 mem := v.Args[2] 9514 v.reset(OpARM64MOVBstore) 9515 v.AuxInt = off 9516 v.Aux = sym 9517 v.AddArg(ptr) 9518 v.AddArg(x) 9519 v.AddArg(mem) 9520 return true 9521 } 9522 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 9523 // cond: 9524 // result: (MOVBstore [off] {sym} ptr x mem) 9525 for { 9526 off := v.AuxInt 9527 sym := v.Aux 9528 _ = v.Args[2] 9529 ptr := v.Args[0] 9530 v_1 := v.Args[1] 9531 if v_1.Op != OpARM64MOVHreg { 9532 break 9533 } 9534 x := v_1.Args[0] 9535 mem := v.Args[2] 9536 v.reset(OpARM64MOVBstore) 9537 v.AuxInt = off 9538 v.Aux = sym 9539 v.AddArg(ptr) 9540 v.AddArg(x) 9541 v.AddArg(mem) 9542 return true 9543 } 9544 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 9545 // cond: 9546 // result: (MOVBstore [off] {sym} ptr x mem) 9547 for { 9548 off := v.AuxInt 9549 sym := v.Aux 9550 _ = v.Args[2] 9551 ptr := v.Args[0] 9552 v_1 := v.Args[1] 9553 if v_1.Op != OpARM64MOVHUreg { 9554 break 9555 } 9556 x := v_1.Args[0] 9557 mem := v.Args[2] 9558 v.reset(OpARM64MOVBstore) 9559 v.AuxInt = off 9560 v.Aux = sym 9561 v.AddArg(ptr) 9562 v.AddArg(x) 9563 v.AddArg(mem) 9564 return true 9565 } 9566 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 9567 // cond: 9568 // result: (MOVBstore [off] {sym} ptr x mem) 9569 for { 9570 off := v.AuxInt 9571 sym := v.Aux 9572 _ = v.Args[2] 9573 ptr := v.Args[0] 9574 v_1 := v.Args[1] 9575 if v_1.Op != OpARM64MOVWreg { 9576 break 9577 } 9578 x := v_1.Args[0] 9579 mem := v.Args[2] 9580 v.reset(OpARM64MOVBstore) 9581 v.AuxInt = off 9582 v.Aux = sym 9583 v.AddArg(ptr) 9584 v.AddArg(x) 9585 v.AddArg(mem) 9586 return true 9587 } 9588 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 9589 // cond: 9590 // result: (MOVBstore [off] {sym} ptr x mem) 9591 for { 9592 off := v.AuxInt 9593 sym := v.Aux 9594 _ = v.Args[2] 9595 ptr := v.Args[0] 9596 v_1 := v.Args[1] 9597 if v_1.Op != OpARM64MOVWUreg { 9598 break 9599 } 9600 x := v_1.Args[0] 9601 mem := v.Args[2] 9602 v.reset(OpARM64MOVBstore) 9603 v.AuxInt = off 9604 v.Aux = sym 9605 v.AddArg(ptr) 9606 v.AddArg(x) 9607 v.AddArg(mem) 9608 return true 9609 } 9610 return false 9611 } 9612 func rewriteValueARM64_OpARM64MOVBstore_10(v *Value) bool { 9613 // match: (MOVBstore [i] {s} ptr0 (SRLconst [8] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9614 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9615 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9616 for { 9617 i := v.AuxInt 9618 s := v.Aux 9619 _ = v.Args[2] 9620 ptr0 := v.Args[0] 9621 v_1 := v.Args[1] 9622 if v_1.Op != OpARM64SRLconst { 9623 break 9624 } 9625 if v_1.AuxInt != 8 { 9626 break 9627 } 9628 w := v_1.Args[0] 9629 x := v.Args[2] 9630 if x.Op != OpARM64MOVBstore { 9631 break 9632 } 9633 if x.AuxInt != i-1 { 9634 break 9635 } 9636 if x.Aux != s { 9637 break 9638 } 9639 _ = x.Args[2] 9640 ptr1 := x.Args[0] 9641 if w != x.Args[1] { 9642 break 9643 } 9644 mem := x.Args[2] 9645 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9646 break 9647 } 9648 v.reset(OpARM64MOVHstore) 9649 v.AuxInt = i - 1 9650 v.Aux = s 9651 v.AddArg(ptr0) 9652 v.AddArg(w) 9653 v.AddArg(mem) 9654 return true 9655 } 9656 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9657 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9658 // result: (MOVHstoreidx ptr1 idx1 w mem) 9659 for { 9660 if v.AuxInt != 1 { 9661 break 9662 } 9663 s := v.Aux 9664 _ = v.Args[2] 9665 v_0 := v.Args[0] 9666 if v_0.Op != OpARM64ADD { 9667 break 9668 } 9669 _ = v_0.Args[1] 9670 ptr0 := v_0.Args[0] 9671 idx0 := v_0.Args[1] 9672 v_1 := v.Args[1] 9673 if v_1.Op != OpARM64SRLconst { 9674 break 9675 } 9676 if v_1.AuxInt != 8 { 9677 break 9678 } 9679 w := v_1.Args[0] 9680 x := v.Args[2] 9681 if x.Op != OpARM64MOVBstoreidx { 9682 break 9683 } 9684 _ = x.Args[3] 9685 ptr1 := x.Args[0] 9686 idx1 := x.Args[1] 9687 if w != x.Args[2] { 9688 break 9689 } 9690 mem := x.Args[3] 9691 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9692 break 9693 } 9694 v.reset(OpARM64MOVHstoreidx) 9695 v.AddArg(ptr1) 9696 v.AddArg(idx1) 9697 v.AddArg(w) 9698 v.AddArg(mem) 9699 return true 9700 } 9701 // match: (MOVBstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(8, 8)] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9702 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9703 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9704 for { 9705 i := v.AuxInt 9706 s := v.Aux 9707 _ = v.Args[2] 9708 ptr0 := v.Args[0] 9709 v_1 := v.Args[1] 9710 if v_1.Op != OpARM64UBFX { 9711 break 9712 } 9713 if v_1.AuxInt != arm64BFAuxInt(8, 8) { 9714 break 9715 } 9716 w := v_1.Args[0] 9717 x := v.Args[2] 9718 if x.Op != OpARM64MOVBstore { 9719 break 9720 } 9721 if x.AuxInt != i-1 { 9722 break 9723 } 9724 if x.Aux != s { 9725 break 9726 } 9727 _ = x.Args[2] 9728 ptr1 := x.Args[0] 9729 if w != x.Args[1] { 9730 break 9731 } 9732 mem := x.Args[2] 9733 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9734 break 9735 } 9736 v.reset(OpARM64MOVHstore) 9737 v.AuxInt = i - 1 9738 v.Aux = s 9739 v.AddArg(ptr0) 9740 v.AddArg(w) 9741 v.AddArg(mem) 9742 return true 9743 } 9744 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(8, 8)] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9745 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9746 // result: (MOVHstoreidx ptr1 idx1 w mem) 9747 for { 9748 if v.AuxInt != 1 { 9749 break 9750 } 9751 s := v.Aux 9752 _ = v.Args[2] 9753 v_0 := v.Args[0] 9754 if v_0.Op != OpARM64ADD { 9755 break 9756 } 9757 _ = v_0.Args[1] 9758 ptr0 := v_0.Args[0] 9759 idx0 := v_0.Args[1] 9760 v_1 := v.Args[1] 9761 if v_1.Op != OpARM64UBFX { 9762 break 9763 } 9764 if v_1.AuxInt != arm64BFAuxInt(8, 8) { 9765 break 9766 } 9767 w := v_1.Args[0] 9768 x := v.Args[2] 9769 if x.Op != OpARM64MOVBstoreidx { 9770 break 9771 } 9772 _ = x.Args[3] 9773 ptr1 := x.Args[0] 9774 idx1 := x.Args[1] 9775 if w != x.Args[2] { 9776 break 9777 } 9778 mem := x.Args[3] 9779 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9780 break 9781 } 9782 v.reset(OpARM64MOVHstoreidx) 9783 v.AddArg(ptr1) 9784 v.AddArg(idx1) 9785 v.AddArg(w) 9786 v.AddArg(mem) 9787 return true 9788 } 9789 // match: (MOVBstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(8, 24)] w) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9790 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9791 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9792 for { 9793 i := v.AuxInt 9794 s := v.Aux 9795 _ = v.Args[2] 9796 ptr0 := v.Args[0] 9797 v_1 := v.Args[1] 9798 if v_1.Op != OpARM64UBFX { 9799 break 9800 } 9801 if v_1.AuxInt != arm64BFAuxInt(8, 24) { 9802 break 9803 } 9804 w := v_1.Args[0] 9805 x := v.Args[2] 9806 if x.Op != OpARM64MOVBstore { 9807 break 9808 } 9809 if x.AuxInt != i-1 { 9810 break 9811 } 9812 if x.Aux != s { 9813 break 9814 } 9815 _ = x.Args[2] 9816 ptr1 := x.Args[0] 9817 if w != x.Args[1] { 9818 break 9819 } 9820 mem := x.Args[2] 9821 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9822 break 9823 } 9824 v.reset(OpARM64MOVHstore) 9825 v.AuxInt = i - 1 9826 v.Aux = s 9827 v.AddArg(ptr0) 9828 v.AddArg(w) 9829 v.AddArg(mem) 9830 return true 9831 } 9832 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(8, 24)] w) x:(MOVBstoreidx ptr1 idx1 w mem)) 9833 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9834 // result: (MOVHstoreidx ptr1 idx1 w mem) 9835 for { 9836 if v.AuxInt != 1 { 9837 break 9838 } 9839 s := v.Aux 9840 _ = v.Args[2] 9841 v_0 := v.Args[0] 9842 if v_0.Op != OpARM64ADD { 9843 break 9844 } 9845 _ = v_0.Args[1] 9846 ptr0 := v_0.Args[0] 9847 idx0 := v_0.Args[1] 9848 v_1 := v.Args[1] 9849 if v_1.Op != OpARM64UBFX { 9850 break 9851 } 9852 if v_1.AuxInt != arm64BFAuxInt(8, 24) { 9853 break 9854 } 9855 w := v_1.Args[0] 9856 x := v.Args[2] 9857 if x.Op != OpARM64MOVBstoreidx { 9858 break 9859 } 9860 _ = x.Args[3] 9861 ptr1 := x.Args[0] 9862 idx1 := x.Args[1] 9863 if w != x.Args[2] { 9864 break 9865 } 9866 mem := x.Args[3] 9867 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9868 break 9869 } 9870 v.reset(OpARM64MOVHstoreidx) 9871 v.AddArg(ptr1) 9872 v.AddArg(idx1) 9873 v.AddArg(w) 9874 v.AddArg(mem) 9875 return true 9876 } 9877 // match: (MOVBstore [i] {s} ptr0 (SRLconst [8] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w mem)) 9878 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9879 // result: (MOVHstore [i-1] {s} ptr0 w mem) 9880 for { 9881 i := v.AuxInt 9882 s := v.Aux 9883 _ = v.Args[2] 9884 ptr0 := v.Args[0] 9885 v_1 := v.Args[1] 9886 if v_1.Op != OpARM64SRLconst { 9887 break 9888 } 9889 if v_1.AuxInt != 8 { 9890 break 9891 } 9892 v_1_0 := v_1.Args[0] 9893 if v_1_0.Op != OpARM64MOVDreg { 9894 break 9895 } 9896 w := v_1_0.Args[0] 9897 x := v.Args[2] 9898 if x.Op != OpARM64MOVBstore { 9899 break 9900 } 9901 if x.AuxInt != i-1 { 9902 break 9903 } 9904 if x.Aux != s { 9905 break 9906 } 9907 _ = x.Args[2] 9908 ptr1 := x.Args[0] 9909 if w != x.Args[1] { 9910 break 9911 } 9912 mem := x.Args[2] 9913 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 9914 break 9915 } 9916 v.reset(OpARM64MOVHstore) 9917 v.AuxInt = i - 1 9918 v.Aux = s 9919 v.AddArg(ptr0) 9920 v.AddArg(w) 9921 v.AddArg(mem) 9922 return true 9923 } 9924 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w mem)) 9925 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 9926 // result: (MOVHstoreidx ptr1 idx1 w mem) 9927 for { 9928 if v.AuxInt != 1 { 9929 break 9930 } 9931 s := v.Aux 9932 _ = v.Args[2] 9933 v_0 := v.Args[0] 9934 if v_0.Op != OpARM64ADD { 9935 break 9936 } 9937 _ = v_0.Args[1] 9938 ptr0 := v_0.Args[0] 9939 idx0 := v_0.Args[1] 9940 v_1 := v.Args[1] 9941 if v_1.Op != OpARM64SRLconst { 9942 break 9943 } 9944 if v_1.AuxInt != 8 { 9945 break 9946 } 9947 v_1_0 := v_1.Args[0] 9948 if v_1_0.Op != OpARM64MOVDreg { 9949 break 9950 } 9951 w := v_1_0.Args[0] 9952 x := v.Args[2] 9953 if x.Op != OpARM64MOVBstoreidx { 9954 break 9955 } 9956 _ = x.Args[3] 9957 ptr1 := x.Args[0] 9958 idx1 := x.Args[1] 9959 if w != x.Args[2] { 9960 break 9961 } 9962 mem := x.Args[3] 9963 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 9964 break 9965 } 9966 v.reset(OpARM64MOVHstoreidx) 9967 v.AddArg(ptr1) 9968 v.AddArg(idx1) 9969 v.AddArg(w) 9970 v.AddArg(mem) 9971 return true 9972 } 9973 // match: (MOVBstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] w) mem)) 9974 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 9975 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 9976 for { 9977 i := v.AuxInt 9978 s := v.Aux 9979 _ = v.Args[2] 9980 ptr0 := v.Args[0] 9981 v_1 := v.Args[1] 9982 if v_1.Op != OpARM64SRLconst { 9983 break 9984 } 9985 j := v_1.AuxInt 9986 w := v_1.Args[0] 9987 x := v.Args[2] 9988 if x.Op != OpARM64MOVBstore { 9989 break 9990 } 9991 if x.AuxInt != i-1 { 9992 break 9993 } 9994 if x.Aux != s { 9995 break 9996 } 9997 _ = x.Args[2] 9998 ptr1 := x.Args[0] 9999 w0 := x.Args[1] 10000 if w0.Op != OpARM64SRLconst { 10001 break 10002 } 10003 if w0.AuxInt != j-8 { 10004 break 10005 } 10006 if w != w0.Args[0] { 10007 break 10008 } 10009 mem := x.Args[2] 10010 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 10011 break 10012 } 10013 v.reset(OpARM64MOVHstore) 10014 v.AuxInt = i - 1 10015 v.Aux = s 10016 v.AddArg(ptr0) 10017 v.AddArg(w0) 10018 v.AddArg(mem) 10019 return true 10020 } 10021 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] w) mem)) 10022 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10023 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 10024 for { 10025 if v.AuxInt != 1 { 10026 break 10027 } 10028 s := v.Aux 10029 _ = v.Args[2] 10030 v_0 := v.Args[0] 10031 if v_0.Op != OpARM64ADD { 10032 break 10033 } 10034 _ = v_0.Args[1] 10035 ptr0 := v_0.Args[0] 10036 idx0 := v_0.Args[1] 10037 v_1 := v.Args[1] 10038 if v_1.Op != OpARM64SRLconst { 10039 break 10040 } 10041 j := v_1.AuxInt 10042 w := v_1.Args[0] 10043 x := v.Args[2] 10044 if x.Op != OpARM64MOVBstoreidx { 10045 break 10046 } 10047 _ = x.Args[3] 10048 ptr1 := x.Args[0] 10049 idx1 := x.Args[1] 10050 w0 := x.Args[2] 10051 if w0.Op != OpARM64SRLconst { 10052 break 10053 } 10054 if w0.AuxInt != j-8 { 10055 break 10056 } 10057 if w != w0.Args[0] { 10058 break 10059 } 10060 mem := x.Args[3] 10061 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10062 break 10063 } 10064 v.reset(OpARM64MOVHstoreidx) 10065 v.AddArg(ptr1) 10066 v.AddArg(idx1) 10067 v.AddArg(w0) 10068 v.AddArg(mem) 10069 return true 10070 } 10071 return false 10072 } 10073 func rewriteValueARM64_OpARM64MOVBstore_20(v *Value) bool { 10074 b := v.Block 10075 _ = b 10076 // match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem)) 10077 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x) 10078 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 10079 for { 10080 i := v.AuxInt 10081 s := v.Aux 10082 _ = v.Args[2] 10083 ptr0 := v.Args[0] 10084 v_1 := v.Args[1] 10085 if v_1.Op != OpARM64UBFX { 10086 break 10087 } 10088 bfc := v_1.AuxInt 10089 w := v_1.Args[0] 10090 x := v.Args[2] 10091 if x.Op != OpARM64MOVBstore { 10092 break 10093 } 10094 if x.AuxInt != i-1 { 10095 break 10096 } 10097 if x.Aux != s { 10098 break 10099 } 10100 _ = x.Args[2] 10101 ptr1 := x.Args[0] 10102 w0 := x.Args[1] 10103 if w0.Op != OpARM64UBFX { 10104 break 10105 } 10106 bfc2 := w0.AuxInt 10107 if w != w0.Args[0] { 10108 break 10109 } 10110 mem := x.Args[2] 10111 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) { 10112 break 10113 } 10114 v.reset(OpARM64MOVHstore) 10115 v.AuxInt = i - 1 10116 v.Aux = s 10117 v.AddArg(ptr0) 10118 v.AddArg(w0) 10119 v.AddArg(mem) 10120 return true 10121 } 10122 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [bfc] w) x:(MOVBstoreidx ptr1 idx1 w0:(UBFX [bfc2] w) mem)) 10123 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x) 10124 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 10125 for { 10126 if v.AuxInt != 1 { 10127 break 10128 } 10129 s := v.Aux 10130 _ = v.Args[2] 10131 v_0 := v.Args[0] 10132 if v_0.Op != OpARM64ADD { 10133 break 10134 } 10135 _ = v_0.Args[1] 10136 ptr0 := v_0.Args[0] 10137 idx0 := v_0.Args[1] 10138 v_1 := v.Args[1] 10139 if v_1.Op != OpARM64UBFX { 10140 break 10141 } 10142 bfc := v_1.AuxInt 10143 w := v_1.Args[0] 10144 x := v.Args[2] 10145 if x.Op != OpARM64MOVBstoreidx { 10146 break 10147 } 10148 _ = x.Args[3] 10149 ptr1 := x.Args[0] 10150 idx1 := x.Args[1] 10151 w0 := x.Args[2] 10152 if w0.Op != OpARM64UBFX { 10153 break 10154 } 10155 bfc2 := w0.AuxInt 10156 if w != w0.Args[0] { 10157 break 10158 } 10159 mem := x.Args[3] 10160 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) { 10161 break 10162 } 10163 v.reset(OpARM64MOVHstoreidx) 10164 v.AddArg(ptr1) 10165 v.AddArg(idx1) 10166 v.AddArg(w0) 10167 v.AddArg(mem) 10168 return true 10169 } 10170 // match: (MOVBstore [i] {s} ptr0 (SRLconst [j] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] (MOVDreg w)) mem)) 10171 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 10172 // result: (MOVHstore [i-1] {s} ptr0 w0 mem) 10173 for { 10174 i := v.AuxInt 10175 s := v.Aux 10176 _ = v.Args[2] 10177 ptr0 := v.Args[0] 10178 v_1 := v.Args[1] 10179 if v_1.Op != OpARM64SRLconst { 10180 break 10181 } 10182 j := v_1.AuxInt 10183 v_1_0 := v_1.Args[0] 10184 if v_1_0.Op != OpARM64MOVDreg { 10185 break 10186 } 10187 w := v_1_0.Args[0] 10188 x := v.Args[2] 10189 if x.Op != OpARM64MOVBstore { 10190 break 10191 } 10192 if x.AuxInt != i-1 { 10193 break 10194 } 10195 if x.Aux != s { 10196 break 10197 } 10198 _ = x.Args[2] 10199 ptr1 := x.Args[0] 10200 w0 := x.Args[1] 10201 if w0.Op != OpARM64SRLconst { 10202 break 10203 } 10204 if w0.AuxInt != j-8 { 10205 break 10206 } 10207 w0_0 := w0.Args[0] 10208 if w0_0.Op != OpARM64MOVDreg { 10209 break 10210 } 10211 if w != w0_0.Args[0] { 10212 break 10213 } 10214 mem := x.Args[2] 10215 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 10216 break 10217 } 10218 v.reset(OpARM64MOVHstore) 10219 v.AuxInt = i - 1 10220 v.Aux = s 10221 v.AddArg(ptr0) 10222 v.AddArg(w0) 10223 v.AddArg(mem) 10224 return true 10225 } 10226 // match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] (MOVDreg w)) mem)) 10227 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 10228 // result: (MOVHstoreidx ptr1 idx1 w0 mem) 10229 for { 10230 if v.AuxInt != 1 { 10231 break 10232 } 10233 s := v.Aux 10234 _ = v.Args[2] 10235 v_0 := v.Args[0] 10236 if v_0.Op != OpARM64ADD { 10237 break 10238 } 10239 _ = v_0.Args[1] 10240 ptr0 := v_0.Args[0] 10241 idx0 := v_0.Args[1] 10242 v_1 := v.Args[1] 10243 if v_1.Op != OpARM64SRLconst { 10244 break 10245 } 10246 j := v_1.AuxInt 10247 v_1_0 := v_1.Args[0] 10248 if v_1_0.Op != OpARM64MOVDreg { 10249 break 10250 } 10251 w := v_1_0.Args[0] 10252 x := v.Args[2] 10253 if x.Op != OpARM64MOVBstoreidx { 10254 break 10255 } 10256 _ = x.Args[3] 10257 ptr1 := x.Args[0] 10258 idx1 := x.Args[1] 10259 w0 := x.Args[2] 10260 if w0.Op != OpARM64SRLconst { 10261 break 10262 } 10263 if w0.AuxInt != j-8 { 10264 break 10265 } 10266 w0_0 := w0.Args[0] 10267 if w0_0.Op != OpARM64MOVDreg { 10268 break 10269 } 10270 if w != w0_0.Args[0] { 10271 break 10272 } 10273 mem := x.Args[3] 10274 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 10275 break 10276 } 10277 v.reset(OpARM64MOVHstoreidx) 10278 v.AddArg(ptr1) 10279 v.AddArg(idx1) 10280 v.AddArg(w0) 10281 v.AddArg(mem) 10282 return true 10283 } 10284 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem)))))))) 10285 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) 10286 // result: (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem) 10287 for { 10288 i := v.AuxInt 10289 s := v.Aux 10290 _ = v.Args[2] 10291 ptr := v.Args[0] 10292 w := v.Args[1] 10293 x0 := v.Args[2] 10294 if x0.Op != OpARM64MOVBstore { 10295 break 10296 } 10297 if x0.AuxInt != i-1 { 10298 break 10299 } 10300 if x0.Aux != s { 10301 break 10302 } 10303 _ = x0.Args[2] 10304 if ptr != x0.Args[0] { 10305 break 10306 } 10307 x0_1 := x0.Args[1] 10308 if x0_1.Op != OpARM64SRLconst { 10309 break 10310 } 10311 if x0_1.AuxInt != 8 { 10312 break 10313 } 10314 if w != x0_1.Args[0] { 10315 break 10316 } 10317 x1 := x0.Args[2] 10318 if x1.Op != OpARM64MOVBstore { 10319 break 10320 } 10321 if x1.AuxInt != i-2 { 10322 break 10323 } 10324 if x1.Aux != s { 10325 break 10326 } 10327 _ = x1.Args[2] 10328 if ptr != x1.Args[0] { 10329 break 10330 } 10331 x1_1 := x1.Args[1] 10332 if x1_1.Op != OpARM64SRLconst { 10333 break 10334 } 10335 if x1_1.AuxInt != 16 { 10336 break 10337 } 10338 if w != x1_1.Args[0] { 10339 break 10340 } 10341 x2 := x1.Args[2] 10342 if x2.Op != OpARM64MOVBstore { 10343 break 10344 } 10345 if x2.AuxInt != i-3 { 10346 break 10347 } 10348 if x2.Aux != s { 10349 break 10350 } 10351 _ = x2.Args[2] 10352 if ptr != x2.Args[0] { 10353 break 10354 } 10355 x2_1 := x2.Args[1] 10356 if x2_1.Op != OpARM64SRLconst { 10357 break 10358 } 10359 if x2_1.AuxInt != 24 { 10360 break 10361 } 10362 if w != x2_1.Args[0] { 10363 break 10364 } 10365 x3 := x2.Args[2] 10366 if x3.Op != OpARM64MOVBstore { 10367 break 10368 } 10369 if x3.AuxInt != i-4 { 10370 break 10371 } 10372 if x3.Aux != s { 10373 break 10374 } 10375 _ = x3.Args[2] 10376 if ptr != x3.Args[0] { 10377 break 10378 } 10379 x3_1 := x3.Args[1] 10380 if x3_1.Op != OpARM64SRLconst { 10381 break 10382 } 10383 if x3_1.AuxInt != 32 { 10384 break 10385 } 10386 if w != x3_1.Args[0] { 10387 break 10388 } 10389 x4 := x3.Args[2] 10390 if x4.Op != OpARM64MOVBstore { 10391 break 10392 } 10393 if x4.AuxInt != i-5 { 10394 break 10395 } 10396 if x4.Aux != s { 10397 break 10398 } 10399 _ = x4.Args[2] 10400 if ptr != x4.Args[0] { 10401 break 10402 } 10403 x4_1 := x4.Args[1] 10404 if x4_1.Op != OpARM64SRLconst { 10405 break 10406 } 10407 if x4_1.AuxInt != 40 { 10408 break 10409 } 10410 if w != x4_1.Args[0] { 10411 break 10412 } 10413 x5 := x4.Args[2] 10414 if x5.Op != OpARM64MOVBstore { 10415 break 10416 } 10417 if x5.AuxInt != i-6 { 10418 break 10419 } 10420 if x5.Aux != s { 10421 break 10422 } 10423 _ = x5.Args[2] 10424 if ptr != x5.Args[0] { 10425 break 10426 } 10427 x5_1 := x5.Args[1] 10428 if x5_1.Op != OpARM64SRLconst { 10429 break 10430 } 10431 if x5_1.AuxInt != 48 { 10432 break 10433 } 10434 if w != x5_1.Args[0] { 10435 break 10436 } 10437 x6 := x5.Args[2] 10438 if x6.Op != OpARM64MOVBstore { 10439 break 10440 } 10441 if x6.AuxInt != i-7 { 10442 break 10443 } 10444 if x6.Aux != s { 10445 break 10446 } 10447 _ = x6.Args[2] 10448 if ptr != x6.Args[0] { 10449 break 10450 } 10451 x6_1 := x6.Args[1] 10452 if x6_1.Op != OpARM64SRLconst { 10453 break 10454 } 10455 if x6_1.AuxInt != 56 { 10456 break 10457 } 10458 if w != x6_1.Args[0] { 10459 break 10460 } 10461 mem := x6.Args[2] 10462 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) { 10463 break 10464 } 10465 v.reset(OpARM64MOVDstore) 10466 v.AuxInt = i - 7 10467 v.Aux = s 10468 v.AddArg(ptr) 10469 v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type) 10470 v0.AddArg(w) 10471 v.AddArg(v0) 10472 v.AddArg(mem) 10473 return true 10474 } 10475 // match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem)))))))) 10476 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) 10477 // result: (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem) 10478 for { 10479 if v.AuxInt != 7 { 10480 break 10481 } 10482 s := v.Aux 10483 _ = v.Args[2] 10484 p := v.Args[0] 10485 w := v.Args[1] 10486 x0 := v.Args[2] 10487 if x0.Op != OpARM64MOVBstore { 10488 break 10489 } 10490 if x0.AuxInt != 6 { 10491 break 10492 } 10493 if x0.Aux != s { 10494 break 10495 } 10496 _ = x0.Args[2] 10497 if p != x0.Args[0] { 10498 break 10499 } 10500 x0_1 := x0.Args[1] 10501 if x0_1.Op != OpARM64SRLconst { 10502 break 10503 } 10504 if x0_1.AuxInt != 8 { 10505 break 10506 } 10507 if w != x0_1.Args[0] { 10508 break 10509 } 10510 x1 := x0.Args[2] 10511 if x1.Op != OpARM64MOVBstore { 10512 break 10513 } 10514 if x1.AuxInt != 5 { 10515 break 10516 } 10517 if x1.Aux != s { 10518 break 10519 } 10520 _ = x1.Args[2] 10521 if p != x1.Args[0] { 10522 break 10523 } 10524 x1_1 := x1.Args[1] 10525 if x1_1.Op != OpARM64SRLconst { 10526 break 10527 } 10528 if x1_1.AuxInt != 16 { 10529 break 10530 } 10531 if w != x1_1.Args[0] { 10532 break 10533 } 10534 x2 := x1.Args[2] 10535 if x2.Op != OpARM64MOVBstore { 10536 break 10537 } 10538 if x2.AuxInt != 4 { 10539 break 10540 } 10541 if x2.Aux != s { 10542 break 10543 } 10544 _ = x2.Args[2] 10545 if p != x2.Args[0] { 10546 break 10547 } 10548 x2_1 := x2.Args[1] 10549 if x2_1.Op != OpARM64SRLconst { 10550 break 10551 } 10552 if x2_1.AuxInt != 24 { 10553 break 10554 } 10555 if w != x2_1.Args[0] { 10556 break 10557 } 10558 x3 := x2.Args[2] 10559 if x3.Op != OpARM64MOVBstore { 10560 break 10561 } 10562 if x3.AuxInt != 3 { 10563 break 10564 } 10565 if x3.Aux != s { 10566 break 10567 } 10568 _ = x3.Args[2] 10569 if p != x3.Args[0] { 10570 break 10571 } 10572 x3_1 := x3.Args[1] 10573 if x3_1.Op != OpARM64SRLconst { 10574 break 10575 } 10576 if x3_1.AuxInt != 32 { 10577 break 10578 } 10579 if w != x3_1.Args[0] { 10580 break 10581 } 10582 x4 := x3.Args[2] 10583 if x4.Op != OpARM64MOVBstore { 10584 break 10585 } 10586 if x4.AuxInt != 2 { 10587 break 10588 } 10589 if x4.Aux != s { 10590 break 10591 } 10592 _ = x4.Args[2] 10593 if p != x4.Args[0] { 10594 break 10595 } 10596 x4_1 := x4.Args[1] 10597 if x4_1.Op != OpARM64SRLconst { 10598 break 10599 } 10600 if x4_1.AuxInt != 40 { 10601 break 10602 } 10603 if w != x4_1.Args[0] { 10604 break 10605 } 10606 x5 := x4.Args[2] 10607 if x5.Op != OpARM64MOVBstore { 10608 break 10609 } 10610 if x5.AuxInt != 1 { 10611 break 10612 } 10613 if x5.Aux != s { 10614 break 10615 } 10616 _ = x5.Args[2] 10617 p1 := x5.Args[0] 10618 if p1.Op != OpARM64ADD { 10619 break 10620 } 10621 _ = p1.Args[1] 10622 ptr1 := p1.Args[0] 10623 idx1 := p1.Args[1] 10624 x5_1 := x5.Args[1] 10625 if x5_1.Op != OpARM64SRLconst { 10626 break 10627 } 10628 if x5_1.AuxInt != 48 { 10629 break 10630 } 10631 if w != x5_1.Args[0] { 10632 break 10633 } 10634 x6 := x5.Args[2] 10635 if x6.Op != OpARM64MOVBstoreidx { 10636 break 10637 } 10638 _ = x6.Args[3] 10639 ptr0 := x6.Args[0] 10640 idx0 := x6.Args[1] 10641 x6_2 := x6.Args[2] 10642 if x6_2.Op != OpARM64SRLconst { 10643 break 10644 } 10645 if x6_2.AuxInt != 56 { 10646 break 10647 } 10648 if w != x6_2.Args[0] { 10649 break 10650 } 10651 mem := x6.Args[3] 10652 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) { 10653 break 10654 } 10655 v.reset(OpARM64MOVDstoreidx) 10656 v.AddArg(ptr0) 10657 v.AddArg(idx0) 10658 v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type) 10659 v0.AddArg(w) 10660 v.AddArg(v0) 10661 v.AddArg(mem) 10662 return true 10663 } 10664 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 10665 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 10666 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 10667 for { 10668 i := v.AuxInt 10669 s := v.Aux 10670 _ = v.Args[2] 10671 ptr := v.Args[0] 10672 w := v.Args[1] 10673 x0 := v.Args[2] 10674 if x0.Op != OpARM64MOVBstore { 10675 break 10676 } 10677 if x0.AuxInt != i-1 { 10678 break 10679 } 10680 if x0.Aux != s { 10681 break 10682 } 10683 _ = x0.Args[2] 10684 if ptr != x0.Args[0] { 10685 break 10686 } 10687 x0_1 := x0.Args[1] 10688 if x0_1.Op != OpARM64UBFX { 10689 break 10690 } 10691 if x0_1.AuxInt != arm64BFAuxInt(8, 24) { 10692 break 10693 } 10694 if w != x0_1.Args[0] { 10695 break 10696 } 10697 x1 := x0.Args[2] 10698 if x1.Op != OpARM64MOVBstore { 10699 break 10700 } 10701 if x1.AuxInt != i-2 { 10702 break 10703 } 10704 if x1.Aux != s { 10705 break 10706 } 10707 _ = x1.Args[2] 10708 if ptr != x1.Args[0] { 10709 break 10710 } 10711 x1_1 := x1.Args[1] 10712 if x1_1.Op != OpARM64UBFX { 10713 break 10714 } 10715 if x1_1.AuxInt != arm64BFAuxInt(16, 16) { 10716 break 10717 } 10718 if w != x1_1.Args[0] { 10719 break 10720 } 10721 x2 := x1.Args[2] 10722 if x2.Op != OpARM64MOVBstore { 10723 break 10724 } 10725 if x2.AuxInt != i-3 { 10726 break 10727 } 10728 if x2.Aux != s { 10729 break 10730 } 10731 _ = x2.Args[2] 10732 if ptr != x2.Args[0] { 10733 break 10734 } 10735 x2_1 := x2.Args[1] 10736 if x2_1.Op != OpARM64UBFX { 10737 break 10738 } 10739 if x2_1.AuxInt != arm64BFAuxInt(24, 8) { 10740 break 10741 } 10742 if w != x2_1.Args[0] { 10743 break 10744 } 10745 mem := x2.Args[2] 10746 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 10747 break 10748 } 10749 v.reset(OpARM64MOVWstore) 10750 v.AuxInt = i - 3 10751 v.Aux = s 10752 v.AddArg(ptr) 10753 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type) 10754 v0.AddArg(w) 10755 v.AddArg(v0) 10756 v.AddArg(mem) 10757 return true 10758 } 10759 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 10760 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 10761 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 10762 for { 10763 if v.AuxInt != 3 { 10764 break 10765 } 10766 s := v.Aux 10767 _ = v.Args[2] 10768 p := v.Args[0] 10769 w := v.Args[1] 10770 x0 := v.Args[2] 10771 if x0.Op != OpARM64MOVBstore { 10772 break 10773 } 10774 if x0.AuxInt != 2 { 10775 break 10776 } 10777 if x0.Aux != s { 10778 break 10779 } 10780 _ = x0.Args[2] 10781 if p != x0.Args[0] { 10782 break 10783 } 10784 x0_1 := x0.Args[1] 10785 if x0_1.Op != OpARM64UBFX { 10786 break 10787 } 10788 if x0_1.AuxInt != arm64BFAuxInt(8, 24) { 10789 break 10790 } 10791 if w != x0_1.Args[0] { 10792 break 10793 } 10794 x1 := x0.Args[2] 10795 if x1.Op != OpARM64MOVBstore { 10796 break 10797 } 10798 if x1.AuxInt != 1 { 10799 break 10800 } 10801 if x1.Aux != s { 10802 break 10803 } 10804 _ = x1.Args[2] 10805 p1 := x1.Args[0] 10806 if p1.Op != OpARM64ADD { 10807 break 10808 } 10809 _ = p1.Args[1] 10810 ptr1 := p1.Args[0] 10811 idx1 := p1.Args[1] 10812 x1_1 := x1.Args[1] 10813 if x1_1.Op != OpARM64UBFX { 10814 break 10815 } 10816 if x1_1.AuxInt != arm64BFAuxInt(16, 16) { 10817 break 10818 } 10819 if w != x1_1.Args[0] { 10820 break 10821 } 10822 x2 := x1.Args[2] 10823 if x2.Op != OpARM64MOVBstoreidx { 10824 break 10825 } 10826 _ = x2.Args[3] 10827 ptr0 := x2.Args[0] 10828 idx0 := x2.Args[1] 10829 x2_2 := x2.Args[2] 10830 if x2_2.Op != OpARM64UBFX { 10831 break 10832 } 10833 if x2_2.AuxInt != arm64BFAuxInt(24, 8) { 10834 break 10835 } 10836 if w != x2_2.Args[0] { 10837 break 10838 } 10839 mem := x2.Args[3] 10840 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 10841 break 10842 } 10843 v.reset(OpARM64MOVWstoreidx) 10844 v.AddArg(ptr0) 10845 v.AddArg(idx0) 10846 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type) 10847 v0.AddArg(w) 10848 v.AddArg(v0) 10849 v.AddArg(mem) 10850 return true 10851 } 10852 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem)))) 10853 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 10854 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 10855 for { 10856 i := v.AuxInt 10857 s := v.Aux 10858 _ = v.Args[2] 10859 ptr := v.Args[0] 10860 w := v.Args[1] 10861 x0 := v.Args[2] 10862 if x0.Op != OpARM64MOVBstore { 10863 break 10864 } 10865 if x0.AuxInt != i-1 { 10866 break 10867 } 10868 if x0.Aux != s { 10869 break 10870 } 10871 _ = x0.Args[2] 10872 if ptr != x0.Args[0] { 10873 break 10874 } 10875 x0_1 := x0.Args[1] 10876 if x0_1.Op != OpARM64SRLconst { 10877 break 10878 } 10879 if x0_1.AuxInt != 8 { 10880 break 10881 } 10882 x0_1_0 := x0_1.Args[0] 10883 if x0_1_0.Op != OpARM64MOVDreg { 10884 break 10885 } 10886 if w != x0_1_0.Args[0] { 10887 break 10888 } 10889 x1 := x0.Args[2] 10890 if x1.Op != OpARM64MOVBstore { 10891 break 10892 } 10893 if x1.AuxInt != i-2 { 10894 break 10895 } 10896 if x1.Aux != s { 10897 break 10898 } 10899 _ = x1.Args[2] 10900 if ptr != x1.Args[0] { 10901 break 10902 } 10903 x1_1 := x1.Args[1] 10904 if x1_1.Op != OpARM64SRLconst { 10905 break 10906 } 10907 if x1_1.AuxInt != 16 { 10908 break 10909 } 10910 x1_1_0 := x1_1.Args[0] 10911 if x1_1_0.Op != OpARM64MOVDreg { 10912 break 10913 } 10914 if w != x1_1_0.Args[0] { 10915 break 10916 } 10917 x2 := x1.Args[2] 10918 if x2.Op != OpARM64MOVBstore { 10919 break 10920 } 10921 if x2.AuxInt != i-3 { 10922 break 10923 } 10924 if x2.Aux != s { 10925 break 10926 } 10927 _ = x2.Args[2] 10928 if ptr != x2.Args[0] { 10929 break 10930 } 10931 x2_1 := x2.Args[1] 10932 if x2_1.Op != OpARM64SRLconst { 10933 break 10934 } 10935 if x2_1.AuxInt != 24 { 10936 break 10937 } 10938 x2_1_0 := x2_1.Args[0] 10939 if x2_1_0.Op != OpARM64MOVDreg { 10940 break 10941 } 10942 if w != x2_1_0.Args[0] { 10943 break 10944 } 10945 mem := x2.Args[2] 10946 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 10947 break 10948 } 10949 v.reset(OpARM64MOVWstore) 10950 v.AuxInt = i - 3 10951 v.Aux = s 10952 v.AddArg(ptr) 10953 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type) 10954 v0.AddArg(w) 10955 v.AddArg(v0) 10956 v.AddArg(mem) 10957 return true 10958 } 10959 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem)))) 10960 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 10961 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 10962 for { 10963 if v.AuxInt != 3 { 10964 break 10965 } 10966 s := v.Aux 10967 _ = v.Args[2] 10968 p := v.Args[0] 10969 w := v.Args[1] 10970 x0 := v.Args[2] 10971 if x0.Op != OpARM64MOVBstore { 10972 break 10973 } 10974 if x0.AuxInt != 2 { 10975 break 10976 } 10977 if x0.Aux != s { 10978 break 10979 } 10980 _ = x0.Args[2] 10981 if p != x0.Args[0] { 10982 break 10983 } 10984 x0_1 := x0.Args[1] 10985 if x0_1.Op != OpARM64SRLconst { 10986 break 10987 } 10988 if x0_1.AuxInt != 8 { 10989 break 10990 } 10991 x0_1_0 := x0_1.Args[0] 10992 if x0_1_0.Op != OpARM64MOVDreg { 10993 break 10994 } 10995 if w != x0_1_0.Args[0] { 10996 break 10997 } 10998 x1 := x0.Args[2] 10999 if x1.Op != OpARM64MOVBstore { 11000 break 11001 } 11002 if x1.AuxInt != 1 { 11003 break 11004 } 11005 if x1.Aux != s { 11006 break 11007 } 11008 _ = x1.Args[2] 11009 p1 := x1.Args[0] 11010 if p1.Op != OpARM64ADD { 11011 break 11012 } 11013 _ = p1.Args[1] 11014 ptr1 := p1.Args[0] 11015 idx1 := p1.Args[1] 11016 x1_1 := x1.Args[1] 11017 if x1_1.Op != OpARM64SRLconst { 11018 break 11019 } 11020 if x1_1.AuxInt != 16 { 11021 break 11022 } 11023 x1_1_0 := x1_1.Args[0] 11024 if x1_1_0.Op != OpARM64MOVDreg { 11025 break 11026 } 11027 if w != x1_1_0.Args[0] { 11028 break 11029 } 11030 x2 := x1.Args[2] 11031 if x2.Op != OpARM64MOVBstoreidx { 11032 break 11033 } 11034 _ = x2.Args[3] 11035 ptr0 := x2.Args[0] 11036 idx0 := x2.Args[1] 11037 x2_2 := x2.Args[2] 11038 if x2_2.Op != OpARM64SRLconst { 11039 break 11040 } 11041 if x2_2.AuxInt != 24 { 11042 break 11043 } 11044 x2_2_0 := x2_2.Args[0] 11045 if x2_2_0.Op != OpARM64MOVDreg { 11046 break 11047 } 11048 if w != x2_2_0.Args[0] { 11049 break 11050 } 11051 mem := x2.Args[3] 11052 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 11053 break 11054 } 11055 v.reset(OpARM64MOVWstoreidx) 11056 v.AddArg(ptr0) 11057 v.AddArg(idx0) 11058 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type) 11059 v0.AddArg(w) 11060 v.AddArg(v0) 11061 v.AddArg(mem) 11062 return true 11063 } 11064 return false 11065 } 11066 func rewriteValueARM64_OpARM64MOVBstore_30(v *Value) bool { 11067 b := v.Block 11068 _ = b 11069 // match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem)))) 11070 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 11071 // result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem) 11072 for { 11073 i := v.AuxInt 11074 s := v.Aux 11075 _ = v.Args[2] 11076 ptr := v.Args[0] 11077 w := v.Args[1] 11078 x0 := v.Args[2] 11079 if x0.Op != OpARM64MOVBstore { 11080 break 11081 } 11082 if x0.AuxInt != i-1 { 11083 break 11084 } 11085 if x0.Aux != s { 11086 break 11087 } 11088 _ = x0.Args[2] 11089 if ptr != x0.Args[0] { 11090 break 11091 } 11092 x0_1 := x0.Args[1] 11093 if x0_1.Op != OpARM64SRLconst { 11094 break 11095 } 11096 if x0_1.AuxInt != 8 { 11097 break 11098 } 11099 if w != x0_1.Args[0] { 11100 break 11101 } 11102 x1 := x0.Args[2] 11103 if x1.Op != OpARM64MOVBstore { 11104 break 11105 } 11106 if x1.AuxInt != i-2 { 11107 break 11108 } 11109 if x1.Aux != s { 11110 break 11111 } 11112 _ = x1.Args[2] 11113 if ptr != x1.Args[0] { 11114 break 11115 } 11116 x1_1 := x1.Args[1] 11117 if x1_1.Op != OpARM64SRLconst { 11118 break 11119 } 11120 if x1_1.AuxInt != 16 { 11121 break 11122 } 11123 if w != x1_1.Args[0] { 11124 break 11125 } 11126 x2 := x1.Args[2] 11127 if x2.Op != OpARM64MOVBstore { 11128 break 11129 } 11130 if x2.AuxInt != i-3 { 11131 break 11132 } 11133 if x2.Aux != s { 11134 break 11135 } 11136 _ = x2.Args[2] 11137 if ptr != x2.Args[0] { 11138 break 11139 } 11140 x2_1 := x2.Args[1] 11141 if x2_1.Op != OpARM64SRLconst { 11142 break 11143 } 11144 if x2_1.AuxInt != 24 { 11145 break 11146 } 11147 if w != x2_1.Args[0] { 11148 break 11149 } 11150 mem := x2.Args[2] 11151 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 11152 break 11153 } 11154 v.reset(OpARM64MOVWstore) 11155 v.AuxInt = i - 3 11156 v.Aux = s 11157 v.AddArg(ptr) 11158 v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type) 11159 v0.AddArg(w) 11160 v.AddArg(v0) 11161 v.AddArg(mem) 11162 return true 11163 } 11164 // match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem)))) 11165 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) 11166 // result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem) 11167 for { 11168 if v.AuxInt != 3 { 11169 break 11170 } 11171 s := v.Aux 11172 _ = v.Args[2] 11173 p := v.Args[0] 11174 w := v.Args[1] 11175 x0 := v.Args[2] 11176 if x0.Op != OpARM64MOVBstore { 11177 break 11178 } 11179 if x0.AuxInt != 2 { 11180 break 11181 } 11182 if x0.Aux != s { 11183 break 11184 } 11185 _ = x0.Args[2] 11186 if p != x0.Args[0] { 11187 break 11188 } 11189 x0_1 := x0.Args[1] 11190 if x0_1.Op != OpARM64SRLconst { 11191 break 11192 } 11193 if x0_1.AuxInt != 8 { 11194 break 11195 } 11196 if w != x0_1.Args[0] { 11197 break 11198 } 11199 x1 := x0.Args[2] 11200 if x1.Op != OpARM64MOVBstore { 11201 break 11202 } 11203 if x1.AuxInt != 1 { 11204 break 11205 } 11206 if x1.Aux != s { 11207 break 11208 } 11209 _ = x1.Args[2] 11210 p1 := x1.Args[0] 11211 if p1.Op != OpARM64ADD { 11212 break 11213 } 11214 _ = p1.Args[1] 11215 ptr1 := p1.Args[0] 11216 idx1 := p1.Args[1] 11217 x1_1 := x1.Args[1] 11218 if x1_1.Op != OpARM64SRLconst { 11219 break 11220 } 11221 if x1_1.AuxInt != 16 { 11222 break 11223 } 11224 if w != x1_1.Args[0] { 11225 break 11226 } 11227 x2 := x1.Args[2] 11228 if x2.Op != OpARM64MOVBstoreidx { 11229 break 11230 } 11231 _ = x2.Args[3] 11232 ptr0 := x2.Args[0] 11233 idx0 := x2.Args[1] 11234 x2_2 := x2.Args[2] 11235 if x2_2.Op != OpARM64SRLconst { 11236 break 11237 } 11238 if x2_2.AuxInt != 24 { 11239 break 11240 } 11241 if w != x2_2.Args[0] { 11242 break 11243 } 11244 mem := x2.Args[3] 11245 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) { 11246 break 11247 } 11248 v.reset(OpARM64MOVWstoreidx) 11249 v.AddArg(ptr0) 11250 v.AddArg(idx0) 11251 v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type) 11252 v0.AddArg(w) 11253 v.AddArg(v0) 11254 v.AddArg(mem) 11255 return true 11256 } 11257 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem)) 11258 // cond: x.Uses == 1 && clobber(x) 11259 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11260 for { 11261 i := v.AuxInt 11262 s := v.Aux 11263 _ = v.Args[2] 11264 ptr := v.Args[0] 11265 w := v.Args[1] 11266 x := v.Args[2] 11267 if x.Op != OpARM64MOVBstore { 11268 break 11269 } 11270 if x.AuxInt != i-1 { 11271 break 11272 } 11273 if x.Aux != s { 11274 break 11275 } 11276 _ = x.Args[2] 11277 if ptr != x.Args[0] { 11278 break 11279 } 11280 x_1 := x.Args[1] 11281 if x_1.Op != OpARM64SRLconst { 11282 break 11283 } 11284 if x_1.AuxInt != 8 { 11285 break 11286 } 11287 if w != x_1.Args[0] { 11288 break 11289 } 11290 mem := x.Args[2] 11291 if !(x.Uses == 1 && clobber(x)) { 11292 break 11293 } 11294 v.reset(OpARM64MOVHstore) 11295 v.AuxInt = i - 1 11296 v.Aux = s 11297 v.AddArg(ptr) 11298 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 11299 v0.AddArg(w) 11300 v.AddArg(v0) 11301 v.AddArg(mem) 11302 return true 11303 } 11304 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem)) 11305 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11306 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11307 for { 11308 if v.AuxInt != 1 { 11309 break 11310 } 11311 s := v.Aux 11312 _ = v.Args[2] 11313 v_0 := v.Args[0] 11314 if v_0.Op != OpARM64ADD { 11315 break 11316 } 11317 _ = v_0.Args[1] 11318 ptr1 := v_0.Args[0] 11319 idx1 := v_0.Args[1] 11320 w := v.Args[1] 11321 x := v.Args[2] 11322 if x.Op != OpARM64MOVBstoreidx { 11323 break 11324 } 11325 _ = x.Args[3] 11326 ptr0 := x.Args[0] 11327 idx0 := x.Args[1] 11328 x_2 := x.Args[2] 11329 if x_2.Op != OpARM64SRLconst { 11330 break 11331 } 11332 if x_2.AuxInt != 8 { 11333 break 11334 } 11335 if w != x_2.Args[0] { 11336 break 11337 } 11338 mem := x.Args[3] 11339 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11340 break 11341 } 11342 v.reset(OpARM64MOVHstoreidx) 11343 v.AddArg(ptr0) 11344 v.AddArg(idx0) 11345 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11346 v0.AddArg(w) 11347 v.AddArg(v0) 11348 v.AddArg(mem) 11349 return true 11350 } 11351 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 11352 // cond: x.Uses == 1 && clobber(x) 11353 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11354 for { 11355 i := v.AuxInt 11356 s := v.Aux 11357 _ = v.Args[2] 11358 ptr := v.Args[0] 11359 w := v.Args[1] 11360 x := v.Args[2] 11361 if x.Op != OpARM64MOVBstore { 11362 break 11363 } 11364 if x.AuxInt != i-1 { 11365 break 11366 } 11367 if x.Aux != s { 11368 break 11369 } 11370 _ = x.Args[2] 11371 if ptr != x.Args[0] { 11372 break 11373 } 11374 x_1 := x.Args[1] 11375 if x_1.Op != OpARM64UBFX { 11376 break 11377 } 11378 if x_1.AuxInt != arm64BFAuxInt(8, 8) { 11379 break 11380 } 11381 if w != x_1.Args[0] { 11382 break 11383 } 11384 mem := x.Args[2] 11385 if !(x.Uses == 1 && clobber(x)) { 11386 break 11387 } 11388 v.reset(OpARM64MOVHstore) 11389 v.AuxInt = i - 1 11390 v.Aux = s 11391 v.AddArg(ptr) 11392 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 11393 v0.AddArg(w) 11394 v.AddArg(v0) 11395 v.AddArg(mem) 11396 return true 11397 } 11398 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 11399 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11400 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11401 for { 11402 if v.AuxInt != 1 { 11403 break 11404 } 11405 s := v.Aux 11406 _ = v.Args[2] 11407 v_0 := v.Args[0] 11408 if v_0.Op != OpARM64ADD { 11409 break 11410 } 11411 _ = v_0.Args[1] 11412 ptr1 := v_0.Args[0] 11413 idx1 := v_0.Args[1] 11414 w := v.Args[1] 11415 x := v.Args[2] 11416 if x.Op != OpARM64MOVBstoreidx { 11417 break 11418 } 11419 _ = x.Args[3] 11420 ptr0 := x.Args[0] 11421 idx0 := x.Args[1] 11422 x_2 := x.Args[2] 11423 if x_2.Op != OpARM64UBFX { 11424 break 11425 } 11426 if x_2.AuxInt != arm64BFAuxInt(8, 8) { 11427 break 11428 } 11429 if w != x_2.Args[0] { 11430 break 11431 } 11432 mem := x.Args[3] 11433 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11434 break 11435 } 11436 v.reset(OpARM64MOVHstoreidx) 11437 v.AddArg(ptr0) 11438 v.AddArg(idx0) 11439 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11440 v0.AddArg(w) 11441 v.AddArg(v0) 11442 v.AddArg(mem) 11443 return true 11444 } 11445 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem)) 11446 // cond: x.Uses == 1 && clobber(x) 11447 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11448 for { 11449 i := v.AuxInt 11450 s := v.Aux 11451 _ = v.Args[2] 11452 ptr := v.Args[0] 11453 w := v.Args[1] 11454 x := v.Args[2] 11455 if x.Op != OpARM64MOVBstore { 11456 break 11457 } 11458 if x.AuxInt != i-1 { 11459 break 11460 } 11461 if x.Aux != s { 11462 break 11463 } 11464 _ = x.Args[2] 11465 if ptr != x.Args[0] { 11466 break 11467 } 11468 x_1 := x.Args[1] 11469 if x_1.Op != OpARM64SRLconst { 11470 break 11471 } 11472 if x_1.AuxInt != 8 { 11473 break 11474 } 11475 x_1_0 := x_1.Args[0] 11476 if x_1_0.Op != OpARM64MOVDreg { 11477 break 11478 } 11479 if w != x_1_0.Args[0] { 11480 break 11481 } 11482 mem := x.Args[2] 11483 if !(x.Uses == 1 && clobber(x)) { 11484 break 11485 } 11486 v.reset(OpARM64MOVHstore) 11487 v.AuxInt = i - 1 11488 v.Aux = s 11489 v.AddArg(ptr) 11490 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 11491 v0.AddArg(w) 11492 v.AddArg(v0) 11493 v.AddArg(mem) 11494 return true 11495 } 11496 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem)) 11497 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11498 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11499 for { 11500 if v.AuxInt != 1 { 11501 break 11502 } 11503 s := v.Aux 11504 _ = v.Args[2] 11505 v_0 := v.Args[0] 11506 if v_0.Op != OpARM64ADD { 11507 break 11508 } 11509 _ = v_0.Args[1] 11510 ptr1 := v_0.Args[0] 11511 idx1 := v_0.Args[1] 11512 w := v.Args[1] 11513 x := v.Args[2] 11514 if x.Op != OpARM64MOVBstoreidx { 11515 break 11516 } 11517 _ = x.Args[3] 11518 ptr0 := x.Args[0] 11519 idx0 := x.Args[1] 11520 x_2 := x.Args[2] 11521 if x_2.Op != OpARM64SRLconst { 11522 break 11523 } 11524 if x_2.AuxInt != 8 { 11525 break 11526 } 11527 x_2_0 := x_2.Args[0] 11528 if x_2_0.Op != OpARM64MOVDreg { 11529 break 11530 } 11531 if w != x_2_0.Args[0] { 11532 break 11533 } 11534 mem := x.Args[3] 11535 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11536 break 11537 } 11538 v.reset(OpARM64MOVHstoreidx) 11539 v.AddArg(ptr0) 11540 v.AddArg(idx0) 11541 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11542 v0.AddArg(w) 11543 v.AddArg(v0) 11544 v.AddArg(mem) 11545 return true 11546 } 11547 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 24)] w) mem)) 11548 // cond: x.Uses == 1 && clobber(x) 11549 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11550 for { 11551 i := v.AuxInt 11552 s := v.Aux 11553 _ = v.Args[2] 11554 ptr := v.Args[0] 11555 w := v.Args[1] 11556 x := v.Args[2] 11557 if x.Op != OpARM64MOVBstore { 11558 break 11559 } 11560 if x.AuxInt != i-1 { 11561 break 11562 } 11563 if x.Aux != s { 11564 break 11565 } 11566 _ = x.Args[2] 11567 if ptr != x.Args[0] { 11568 break 11569 } 11570 x_1 := x.Args[1] 11571 if x_1.Op != OpARM64UBFX { 11572 break 11573 } 11574 if x_1.AuxInt != arm64BFAuxInt(8, 24) { 11575 break 11576 } 11577 if w != x_1.Args[0] { 11578 break 11579 } 11580 mem := x.Args[2] 11581 if !(x.Uses == 1 && clobber(x)) { 11582 break 11583 } 11584 v.reset(OpARM64MOVHstore) 11585 v.AuxInt = i - 1 11586 v.Aux = s 11587 v.AddArg(ptr) 11588 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 11589 v0.AddArg(w) 11590 v.AddArg(v0) 11591 v.AddArg(mem) 11592 return true 11593 } 11594 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(8, 24)] w) mem)) 11595 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11596 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11597 for { 11598 if v.AuxInt != 1 { 11599 break 11600 } 11601 s := v.Aux 11602 _ = v.Args[2] 11603 v_0 := v.Args[0] 11604 if v_0.Op != OpARM64ADD { 11605 break 11606 } 11607 _ = v_0.Args[1] 11608 ptr1 := v_0.Args[0] 11609 idx1 := v_0.Args[1] 11610 w := v.Args[1] 11611 x := v.Args[2] 11612 if x.Op != OpARM64MOVBstoreidx { 11613 break 11614 } 11615 _ = x.Args[3] 11616 ptr0 := x.Args[0] 11617 idx0 := x.Args[1] 11618 x_2 := x.Args[2] 11619 if x_2.Op != OpARM64UBFX { 11620 break 11621 } 11622 if x_2.AuxInt != arm64BFAuxInt(8, 24) { 11623 break 11624 } 11625 if w != x_2.Args[0] { 11626 break 11627 } 11628 mem := x.Args[3] 11629 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11630 break 11631 } 11632 v.reset(OpARM64MOVHstoreidx) 11633 v.AddArg(ptr0) 11634 v.AddArg(idx0) 11635 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11636 v0.AddArg(w) 11637 v.AddArg(v0) 11638 v.AddArg(mem) 11639 return true 11640 } 11641 return false 11642 } 11643 func rewriteValueARM64_OpARM64MOVBstore_40(v *Value) bool { 11644 b := v.Block 11645 _ = b 11646 // match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem)) 11647 // cond: x.Uses == 1 && clobber(x) 11648 // result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem) 11649 for { 11650 i := v.AuxInt 11651 s := v.Aux 11652 _ = v.Args[2] 11653 ptr := v.Args[0] 11654 w := v.Args[1] 11655 x := v.Args[2] 11656 if x.Op != OpARM64MOVBstore { 11657 break 11658 } 11659 if x.AuxInt != i-1 { 11660 break 11661 } 11662 if x.Aux != s { 11663 break 11664 } 11665 _ = x.Args[2] 11666 if ptr != x.Args[0] { 11667 break 11668 } 11669 x_1 := x.Args[1] 11670 if x_1.Op != OpARM64SRLconst { 11671 break 11672 } 11673 if x_1.AuxInt != 8 { 11674 break 11675 } 11676 x_1_0 := x_1.Args[0] 11677 if x_1_0.Op != OpARM64MOVDreg { 11678 break 11679 } 11680 if w != x_1_0.Args[0] { 11681 break 11682 } 11683 mem := x.Args[2] 11684 if !(x.Uses == 1 && clobber(x)) { 11685 break 11686 } 11687 v.reset(OpARM64MOVHstore) 11688 v.AuxInt = i - 1 11689 v.Aux = s 11690 v.AddArg(ptr) 11691 v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type) 11692 v0.AddArg(w) 11693 v.AddArg(v0) 11694 v.AddArg(mem) 11695 return true 11696 } 11697 // match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem)) 11698 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 11699 // result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem) 11700 for { 11701 if v.AuxInt != 1 { 11702 break 11703 } 11704 s := v.Aux 11705 _ = v.Args[2] 11706 v_0 := v.Args[0] 11707 if v_0.Op != OpARM64ADD { 11708 break 11709 } 11710 _ = v_0.Args[1] 11711 ptr1 := v_0.Args[0] 11712 idx1 := v_0.Args[1] 11713 w := v.Args[1] 11714 x := v.Args[2] 11715 if x.Op != OpARM64MOVBstoreidx { 11716 break 11717 } 11718 _ = x.Args[3] 11719 ptr0 := x.Args[0] 11720 idx0 := x.Args[1] 11721 x_2 := x.Args[2] 11722 if x_2.Op != OpARM64SRLconst { 11723 break 11724 } 11725 if x_2.AuxInt != 8 { 11726 break 11727 } 11728 x_2_0 := x_2.Args[0] 11729 if x_2_0.Op != OpARM64MOVDreg { 11730 break 11731 } 11732 if w != x_2_0.Args[0] { 11733 break 11734 } 11735 mem := x.Args[3] 11736 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 11737 break 11738 } 11739 v.reset(OpARM64MOVHstoreidx) 11740 v.AddArg(ptr0) 11741 v.AddArg(idx0) 11742 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 11743 v0.AddArg(w) 11744 v.AddArg(v0) 11745 v.AddArg(mem) 11746 return true 11747 } 11748 return false 11749 } 11750 func rewriteValueARM64_OpARM64MOVBstoreidx_0(v *Value) bool { 11751 // match: (MOVBstoreidx ptr (MOVDconst [c]) val mem) 11752 // cond: 11753 // result: (MOVBstore [c] ptr val mem) 11754 for { 11755 _ = v.Args[3] 11756 ptr := v.Args[0] 11757 v_1 := v.Args[1] 11758 if v_1.Op != OpARM64MOVDconst { 11759 break 11760 } 11761 c := v_1.AuxInt 11762 val := v.Args[2] 11763 mem := v.Args[3] 11764 v.reset(OpARM64MOVBstore) 11765 v.AuxInt = c 11766 v.AddArg(ptr) 11767 v.AddArg(val) 11768 v.AddArg(mem) 11769 return true 11770 } 11771 // match: (MOVBstoreidx (MOVDconst [c]) idx val mem) 11772 // cond: 11773 // result: (MOVBstore [c] idx val mem) 11774 for { 11775 _ = v.Args[3] 11776 v_0 := v.Args[0] 11777 if v_0.Op != OpARM64MOVDconst { 11778 break 11779 } 11780 c := v_0.AuxInt 11781 idx := v.Args[1] 11782 val := v.Args[2] 11783 mem := v.Args[3] 11784 v.reset(OpARM64MOVBstore) 11785 v.AuxInt = c 11786 v.AddArg(idx) 11787 v.AddArg(val) 11788 v.AddArg(mem) 11789 return true 11790 } 11791 // match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem) 11792 // cond: 11793 // result: (MOVBstorezeroidx ptr idx mem) 11794 for { 11795 _ = v.Args[3] 11796 ptr := v.Args[0] 11797 idx := v.Args[1] 11798 v_2 := v.Args[2] 11799 if v_2.Op != OpARM64MOVDconst { 11800 break 11801 } 11802 if v_2.AuxInt != 0 { 11803 break 11804 } 11805 mem := v.Args[3] 11806 v.reset(OpARM64MOVBstorezeroidx) 11807 v.AddArg(ptr) 11808 v.AddArg(idx) 11809 v.AddArg(mem) 11810 return true 11811 } 11812 // match: (MOVBstoreidx ptr idx (MOVBreg x) mem) 11813 // cond: 11814 // result: (MOVBstoreidx ptr idx x mem) 11815 for { 11816 _ = v.Args[3] 11817 ptr := v.Args[0] 11818 idx := v.Args[1] 11819 v_2 := v.Args[2] 11820 if v_2.Op != OpARM64MOVBreg { 11821 break 11822 } 11823 x := v_2.Args[0] 11824 mem := v.Args[3] 11825 v.reset(OpARM64MOVBstoreidx) 11826 v.AddArg(ptr) 11827 v.AddArg(idx) 11828 v.AddArg(x) 11829 v.AddArg(mem) 11830 return true 11831 } 11832 // match: (MOVBstoreidx ptr idx (MOVBUreg x) mem) 11833 // cond: 11834 // result: (MOVBstoreidx ptr idx x mem) 11835 for { 11836 _ = v.Args[3] 11837 ptr := v.Args[0] 11838 idx := v.Args[1] 11839 v_2 := v.Args[2] 11840 if v_2.Op != OpARM64MOVBUreg { 11841 break 11842 } 11843 x := v_2.Args[0] 11844 mem := v.Args[3] 11845 v.reset(OpARM64MOVBstoreidx) 11846 v.AddArg(ptr) 11847 v.AddArg(idx) 11848 v.AddArg(x) 11849 v.AddArg(mem) 11850 return true 11851 } 11852 // match: (MOVBstoreidx ptr idx (MOVHreg x) mem) 11853 // cond: 11854 // result: (MOVBstoreidx ptr idx x mem) 11855 for { 11856 _ = v.Args[3] 11857 ptr := v.Args[0] 11858 idx := v.Args[1] 11859 v_2 := v.Args[2] 11860 if v_2.Op != OpARM64MOVHreg { 11861 break 11862 } 11863 x := v_2.Args[0] 11864 mem := v.Args[3] 11865 v.reset(OpARM64MOVBstoreidx) 11866 v.AddArg(ptr) 11867 v.AddArg(idx) 11868 v.AddArg(x) 11869 v.AddArg(mem) 11870 return true 11871 } 11872 // match: (MOVBstoreidx ptr idx (MOVHUreg x) mem) 11873 // cond: 11874 // result: (MOVBstoreidx ptr idx x mem) 11875 for { 11876 _ = v.Args[3] 11877 ptr := v.Args[0] 11878 idx := v.Args[1] 11879 v_2 := v.Args[2] 11880 if v_2.Op != OpARM64MOVHUreg { 11881 break 11882 } 11883 x := v_2.Args[0] 11884 mem := v.Args[3] 11885 v.reset(OpARM64MOVBstoreidx) 11886 v.AddArg(ptr) 11887 v.AddArg(idx) 11888 v.AddArg(x) 11889 v.AddArg(mem) 11890 return true 11891 } 11892 // match: (MOVBstoreidx ptr idx (MOVWreg x) mem) 11893 // cond: 11894 // result: (MOVBstoreidx ptr idx x mem) 11895 for { 11896 _ = v.Args[3] 11897 ptr := v.Args[0] 11898 idx := v.Args[1] 11899 v_2 := v.Args[2] 11900 if v_2.Op != OpARM64MOVWreg { 11901 break 11902 } 11903 x := v_2.Args[0] 11904 mem := v.Args[3] 11905 v.reset(OpARM64MOVBstoreidx) 11906 v.AddArg(ptr) 11907 v.AddArg(idx) 11908 v.AddArg(x) 11909 v.AddArg(mem) 11910 return true 11911 } 11912 // match: (MOVBstoreidx ptr idx (MOVWUreg x) mem) 11913 // cond: 11914 // result: (MOVBstoreidx ptr idx x mem) 11915 for { 11916 _ = v.Args[3] 11917 ptr := v.Args[0] 11918 idx := v.Args[1] 11919 v_2 := v.Args[2] 11920 if v_2.Op != OpARM64MOVWUreg { 11921 break 11922 } 11923 x := v_2.Args[0] 11924 mem := v.Args[3] 11925 v.reset(OpARM64MOVBstoreidx) 11926 v.AddArg(ptr) 11927 v.AddArg(idx) 11928 v.AddArg(x) 11929 v.AddArg(mem) 11930 return true 11931 } 11932 // match: (MOVBstoreidx ptr (ADDconst [1] idx) (SRLconst [8] w) x:(MOVBstoreidx ptr idx w mem)) 11933 // cond: x.Uses == 1 && clobber(x) 11934 // result: (MOVHstoreidx ptr idx w mem) 11935 for { 11936 _ = v.Args[3] 11937 ptr := v.Args[0] 11938 v_1 := v.Args[1] 11939 if v_1.Op != OpARM64ADDconst { 11940 break 11941 } 11942 if v_1.AuxInt != 1 { 11943 break 11944 } 11945 idx := v_1.Args[0] 11946 v_2 := v.Args[2] 11947 if v_2.Op != OpARM64SRLconst { 11948 break 11949 } 11950 if v_2.AuxInt != 8 { 11951 break 11952 } 11953 w := v_2.Args[0] 11954 x := v.Args[3] 11955 if x.Op != OpARM64MOVBstoreidx { 11956 break 11957 } 11958 _ = x.Args[3] 11959 if ptr != x.Args[0] { 11960 break 11961 } 11962 if idx != x.Args[1] { 11963 break 11964 } 11965 if w != x.Args[2] { 11966 break 11967 } 11968 mem := x.Args[3] 11969 if !(x.Uses == 1 && clobber(x)) { 11970 break 11971 } 11972 v.reset(OpARM64MOVHstoreidx) 11973 v.AddArg(ptr) 11974 v.AddArg(idx) 11975 v.AddArg(w) 11976 v.AddArg(mem) 11977 return true 11978 } 11979 return false 11980 } 11981 func rewriteValueARM64_OpARM64MOVBstoreidx_10(v *Value) bool { 11982 b := v.Block 11983 _ = b 11984 // match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 11985 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 11986 // result: (MOVWstoreidx ptr idx (REVW <w.Type> w) mem) 11987 for { 11988 _ = v.Args[3] 11989 ptr := v.Args[0] 11990 v_1 := v.Args[1] 11991 if v_1.Op != OpARM64ADDconst { 11992 break 11993 } 11994 if v_1.AuxInt != 3 { 11995 break 11996 } 11997 idx := v_1.Args[0] 11998 w := v.Args[2] 11999 x0 := v.Args[3] 12000 if x0.Op != OpARM64MOVBstoreidx { 12001 break 12002 } 12003 _ = x0.Args[3] 12004 if ptr != x0.Args[0] { 12005 break 12006 } 12007 x0_1 := x0.Args[1] 12008 if x0_1.Op != OpARM64ADDconst { 12009 break 12010 } 12011 if x0_1.AuxInt != 2 { 12012 break 12013 } 12014 if idx != x0_1.Args[0] { 12015 break 12016 } 12017 x0_2 := x0.Args[2] 12018 if x0_2.Op != OpARM64UBFX { 12019 break 12020 } 12021 if x0_2.AuxInt != arm64BFAuxInt(8, 24) { 12022 break 12023 } 12024 if w != x0_2.Args[0] { 12025 break 12026 } 12027 x1 := x0.Args[3] 12028 if x1.Op != OpARM64MOVBstoreidx { 12029 break 12030 } 12031 _ = x1.Args[3] 12032 if ptr != x1.Args[0] { 12033 break 12034 } 12035 x1_1 := x1.Args[1] 12036 if x1_1.Op != OpARM64ADDconst { 12037 break 12038 } 12039 if x1_1.AuxInt != 1 { 12040 break 12041 } 12042 if idx != x1_1.Args[0] { 12043 break 12044 } 12045 x1_2 := x1.Args[2] 12046 if x1_2.Op != OpARM64UBFX { 12047 break 12048 } 12049 if x1_2.AuxInt != arm64BFAuxInt(16, 16) { 12050 break 12051 } 12052 if w != x1_2.Args[0] { 12053 break 12054 } 12055 x2 := x1.Args[3] 12056 if x2.Op != OpARM64MOVBstoreidx { 12057 break 12058 } 12059 _ = x2.Args[3] 12060 if ptr != x2.Args[0] { 12061 break 12062 } 12063 if idx != x2.Args[1] { 12064 break 12065 } 12066 x2_2 := x2.Args[2] 12067 if x2_2.Op != OpARM64UBFX { 12068 break 12069 } 12070 if x2_2.AuxInt != arm64BFAuxInt(24, 8) { 12071 break 12072 } 12073 if w != x2_2.Args[0] { 12074 break 12075 } 12076 mem := x2.Args[3] 12077 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 12078 break 12079 } 12080 v.reset(OpARM64MOVWstoreidx) 12081 v.AddArg(ptr) 12082 v.AddArg(idx) 12083 v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type) 12084 v0.AddArg(w) 12085 v.AddArg(v0) 12086 v.AddArg(mem) 12087 return true 12088 } 12089 // match: (MOVBstoreidx ptr idx w x0:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr (ADDconst [3] idx) (UBFX [arm64BFAuxInt(24, 8)] w) mem)))) 12090 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) 12091 // result: (MOVWstoreidx ptr idx w mem) 12092 for { 12093 _ = v.Args[3] 12094 ptr := v.Args[0] 12095 idx := v.Args[1] 12096 w := v.Args[2] 12097 x0 := v.Args[3] 12098 if x0.Op != OpARM64MOVBstoreidx { 12099 break 12100 } 12101 _ = x0.Args[3] 12102 if ptr != x0.Args[0] { 12103 break 12104 } 12105 x0_1 := x0.Args[1] 12106 if x0_1.Op != OpARM64ADDconst { 12107 break 12108 } 12109 if x0_1.AuxInt != 1 { 12110 break 12111 } 12112 if idx != x0_1.Args[0] { 12113 break 12114 } 12115 x0_2 := x0.Args[2] 12116 if x0_2.Op != OpARM64UBFX { 12117 break 12118 } 12119 if x0_2.AuxInt != arm64BFAuxInt(8, 24) { 12120 break 12121 } 12122 if w != x0_2.Args[0] { 12123 break 12124 } 12125 x1 := x0.Args[3] 12126 if x1.Op != OpARM64MOVBstoreidx { 12127 break 12128 } 12129 _ = x1.Args[3] 12130 if ptr != x1.Args[0] { 12131 break 12132 } 12133 x1_1 := x1.Args[1] 12134 if x1_1.Op != OpARM64ADDconst { 12135 break 12136 } 12137 if x1_1.AuxInt != 2 { 12138 break 12139 } 12140 if idx != x1_1.Args[0] { 12141 break 12142 } 12143 x1_2 := x1.Args[2] 12144 if x1_2.Op != OpARM64UBFX { 12145 break 12146 } 12147 if x1_2.AuxInt != arm64BFAuxInt(16, 16) { 12148 break 12149 } 12150 if w != x1_2.Args[0] { 12151 break 12152 } 12153 x2 := x1.Args[3] 12154 if x2.Op != OpARM64MOVBstoreidx { 12155 break 12156 } 12157 _ = x2.Args[3] 12158 if ptr != x2.Args[0] { 12159 break 12160 } 12161 x2_1 := x2.Args[1] 12162 if x2_1.Op != OpARM64ADDconst { 12163 break 12164 } 12165 if x2_1.AuxInt != 3 { 12166 break 12167 } 12168 if idx != x2_1.Args[0] { 12169 break 12170 } 12171 x2_2 := x2.Args[2] 12172 if x2_2.Op != OpARM64UBFX { 12173 break 12174 } 12175 if x2_2.AuxInt != arm64BFAuxInt(24, 8) { 12176 break 12177 } 12178 if w != x2_2.Args[0] { 12179 break 12180 } 12181 mem := x2.Args[3] 12182 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) { 12183 break 12184 } 12185 v.reset(OpARM64MOVWstoreidx) 12186 v.AddArg(ptr) 12187 v.AddArg(idx) 12188 v.AddArg(w) 12189 v.AddArg(mem) 12190 return true 12191 } 12192 // match: (MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 12193 // cond: x.Uses == 1 && clobber(x) 12194 // result: (MOVHstoreidx ptr idx (REV16W <w.Type> w) mem) 12195 for { 12196 _ = v.Args[3] 12197 ptr := v.Args[0] 12198 v_1 := v.Args[1] 12199 if v_1.Op != OpARM64ADDconst { 12200 break 12201 } 12202 if v_1.AuxInt != 1 { 12203 break 12204 } 12205 idx := v_1.Args[0] 12206 w := v.Args[2] 12207 x := v.Args[3] 12208 if x.Op != OpARM64MOVBstoreidx { 12209 break 12210 } 12211 _ = x.Args[3] 12212 if ptr != x.Args[0] { 12213 break 12214 } 12215 if idx != x.Args[1] { 12216 break 12217 } 12218 x_2 := x.Args[2] 12219 if x_2.Op != OpARM64UBFX { 12220 break 12221 } 12222 if x_2.AuxInt != arm64BFAuxInt(8, 8) { 12223 break 12224 } 12225 if w != x_2.Args[0] { 12226 break 12227 } 12228 mem := x.Args[3] 12229 if !(x.Uses == 1 && clobber(x)) { 12230 break 12231 } 12232 v.reset(OpARM64MOVHstoreidx) 12233 v.AddArg(ptr) 12234 v.AddArg(idx) 12235 v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type) 12236 v0.AddArg(w) 12237 v.AddArg(v0) 12238 v.AddArg(mem) 12239 return true 12240 } 12241 // match: (MOVBstoreidx ptr idx w x:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(8, 8)] w) mem)) 12242 // cond: x.Uses == 1 && clobber(x) 12243 // result: (MOVHstoreidx ptr idx w mem) 12244 for { 12245 _ = v.Args[3] 12246 ptr := v.Args[0] 12247 idx := v.Args[1] 12248 w := v.Args[2] 12249 x := v.Args[3] 12250 if x.Op != OpARM64MOVBstoreidx { 12251 break 12252 } 12253 _ = x.Args[3] 12254 if ptr != x.Args[0] { 12255 break 12256 } 12257 x_1 := x.Args[1] 12258 if x_1.Op != OpARM64ADDconst { 12259 break 12260 } 12261 if x_1.AuxInt != 1 { 12262 break 12263 } 12264 if idx != x_1.Args[0] { 12265 break 12266 } 12267 x_2 := x.Args[2] 12268 if x_2.Op != OpARM64UBFX { 12269 break 12270 } 12271 if x_2.AuxInt != arm64BFAuxInt(8, 8) { 12272 break 12273 } 12274 if w != x_2.Args[0] { 12275 break 12276 } 12277 mem := x.Args[3] 12278 if !(x.Uses == 1 && clobber(x)) { 12279 break 12280 } 12281 v.reset(OpARM64MOVHstoreidx) 12282 v.AddArg(ptr) 12283 v.AddArg(idx) 12284 v.AddArg(w) 12285 v.AddArg(mem) 12286 return true 12287 } 12288 return false 12289 } 12290 func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool { 12291 b := v.Block 12292 _ = b 12293 config := b.Func.Config 12294 _ = config 12295 // match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 12296 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12297 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 12298 for { 12299 off1 := v.AuxInt 12300 sym := v.Aux 12301 _ = v.Args[1] 12302 v_0 := v.Args[0] 12303 if v_0.Op != OpARM64ADDconst { 12304 break 12305 } 12306 off2 := v_0.AuxInt 12307 ptr := v_0.Args[0] 12308 mem := v.Args[1] 12309 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12310 break 12311 } 12312 v.reset(OpARM64MOVBstorezero) 12313 v.AuxInt = off1 + off2 12314 v.Aux = sym 12315 v.AddArg(ptr) 12316 v.AddArg(mem) 12317 return true 12318 } 12319 // match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 12320 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12321 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 12322 for { 12323 off1 := v.AuxInt 12324 sym1 := v.Aux 12325 _ = v.Args[1] 12326 v_0 := v.Args[0] 12327 if v_0.Op != OpARM64MOVDaddr { 12328 break 12329 } 12330 off2 := v_0.AuxInt 12331 sym2 := v_0.Aux 12332 ptr := v_0.Args[0] 12333 mem := v.Args[1] 12334 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12335 break 12336 } 12337 v.reset(OpARM64MOVBstorezero) 12338 v.AuxInt = off1 + off2 12339 v.Aux = mergeSym(sym1, sym2) 12340 v.AddArg(ptr) 12341 v.AddArg(mem) 12342 return true 12343 } 12344 // match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem) 12345 // cond: off == 0 && sym == nil 12346 // result: (MOVBstorezeroidx ptr idx mem) 12347 for { 12348 off := v.AuxInt 12349 sym := v.Aux 12350 _ = v.Args[1] 12351 v_0 := v.Args[0] 12352 if v_0.Op != OpARM64ADD { 12353 break 12354 } 12355 _ = v_0.Args[1] 12356 ptr := v_0.Args[0] 12357 idx := v_0.Args[1] 12358 mem := v.Args[1] 12359 if !(off == 0 && sym == nil) { 12360 break 12361 } 12362 v.reset(OpARM64MOVBstorezeroidx) 12363 v.AddArg(ptr) 12364 v.AddArg(idx) 12365 v.AddArg(mem) 12366 return true 12367 } 12368 // match: (MOVBstorezero [i] {s} ptr0 x:(MOVBstorezero [j] {s} ptr1 mem)) 12369 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,1) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 12370 // result: (MOVHstorezero [min(i,j)] {s} ptr0 mem) 12371 for { 12372 i := v.AuxInt 12373 s := v.Aux 12374 _ = v.Args[1] 12375 ptr0 := v.Args[0] 12376 x := v.Args[1] 12377 if x.Op != OpARM64MOVBstorezero { 12378 break 12379 } 12380 j := x.AuxInt 12381 if x.Aux != s { 12382 break 12383 } 12384 _ = x.Args[1] 12385 ptr1 := x.Args[0] 12386 mem := x.Args[1] 12387 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 1) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 12388 break 12389 } 12390 v.reset(OpARM64MOVHstorezero) 12391 v.AuxInt = min(i, j) 12392 v.Aux = s 12393 v.AddArg(ptr0) 12394 v.AddArg(mem) 12395 return true 12396 } 12397 // match: (MOVBstorezero [1] {s} (ADD ptr0 idx0) x:(MOVBstorezeroidx ptr1 idx1 mem)) 12398 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 12399 // result: (MOVHstorezeroidx ptr1 idx1 mem) 12400 for { 12401 if v.AuxInt != 1 { 12402 break 12403 } 12404 s := v.Aux 12405 _ = v.Args[1] 12406 v_0 := v.Args[0] 12407 if v_0.Op != OpARM64ADD { 12408 break 12409 } 12410 _ = v_0.Args[1] 12411 ptr0 := v_0.Args[0] 12412 idx0 := v_0.Args[1] 12413 x := v.Args[1] 12414 if x.Op != OpARM64MOVBstorezeroidx { 12415 break 12416 } 12417 _ = x.Args[2] 12418 ptr1 := x.Args[0] 12419 idx1 := x.Args[1] 12420 mem := x.Args[2] 12421 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 12422 break 12423 } 12424 v.reset(OpARM64MOVHstorezeroidx) 12425 v.AddArg(ptr1) 12426 v.AddArg(idx1) 12427 v.AddArg(mem) 12428 return true 12429 } 12430 return false 12431 } 12432 func rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v *Value) bool { 12433 // match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem) 12434 // cond: 12435 // result: (MOVBstorezero [c] ptr mem) 12436 for { 12437 _ = v.Args[2] 12438 ptr := v.Args[0] 12439 v_1 := v.Args[1] 12440 if v_1.Op != OpARM64MOVDconst { 12441 break 12442 } 12443 c := v_1.AuxInt 12444 mem := v.Args[2] 12445 v.reset(OpARM64MOVBstorezero) 12446 v.AuxInt = c 12447 v.AddArg(ptr) 12448 v.AddArg(mem) 12449 return true 12450 } 12451 // match: (MOVBstorezeroidx (MOVDconst [c]) idx mem) 12452 // cond: 12453 // result: (MOVBstorezero [c] idx mem) 12454 for { 12455 _ = v.Args[2] 12456 v_0 := v.Args[0] 12457 if v_0.Op != OpARM64MOVDconst { 12458 break 12459 } 12460 c := v_0.AuxInt 12461 idx := v.Args[1] 12462 mem := v.Args[2] 12463 v.reset(OpARM64MOVBstorezero) 12464 v.AuxInt = c 12465 v.AddArg(idx) 12466 v.AddArg(mem) 12467 return true 12468 } 12469 // match: (MOVBstorezeroidx ptr (ADDconst [1] idx) x:(MOVBstorezeroidx ptr idx mem)) 12470 // cond: x.Uses == 1 && clobber(x) 12471 // result: (MOVHstorezeroidx ptr idx mem) 12472 for { 12473 _ = v.Args[2] 12474 ptr := v.Args[0] 12475 v_1 := v.Args[1] 12476 if v_1.Op != OpARM64ADDconst { 12477 break 12478 } 12479 if v_1.AuxInt != 1 { 12480 break 12481 } 12482 idx := v_1.Args[0] 12483 x := v.Args[2] 12484 if x.Op != OpARM64MOVBstorezeroidx { 12485 break 12486 } 12487 _ = x.Args[2] 12488 if ptr != x.Args[0] { 12489 break 12490 } 12491 if idx != x.Args[1] { 12492 break 12493 } 12494 mem := x.Args[2] 12495 if !(x.Uses == 1 && clobber(x)) { 12496 break 12497 } 12498 v.reset(OpARM64MOVHstorezeroidx) 12499 v.AddArg(ptr) 12500 v.AddArg(idx) 12501 v.AddArg(mem) 12502 return true 12503 } 12504 return false 12505 } 12506 func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool { 12507 b := v.Block 12508 _ = b 12509 config := b.Func.Config 12510 _ = config 12511 // match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _)) 12512 // cond: 12513 // result: (FMOVDfpgp val) 12514 for { 12515 off := v.AuxInt 12516 sym := v.Aux 12517 _ = v.Args[1] 12518 ptr := v.Args[0] 12519 v_1 := v.Args[1] 12520 if v_1.Op != OpARM64FMOVDstore { 12521 break 12522 } 12523 if v_1.AuxInt != off { 12524 break 12525 } 12526 if v_1.Aux != sym { 12527 break 12528 } 12529 _ = v_1.Args[2] 12530 if ptr != v_1.Args[0] { 12531 break 12532 } 12533 val := v_1.Args[1] 12534 v.reset(OpARM64FMOVDfpgp) 12535 v.AddArg(val) 12536 return true 12537 } 12538 // match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem) 12539 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12540 // result: (MOVDload [off1+off2] {sym} ptr mem) 12541 for { 12542 off1 := v.AuxInt 12543 sym := v.Aux 12544 _ = v.Args[1] 12545 v_0 := v.Args[0] 12546 if v_0.Op != OpARM64ADDconst { 12547 break 12548 } 12549 off2 := v_0.AuxInt 12550 ptr := v_0.Args[0] 12551 mem := v.Args[1] 12552 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12553 break 12554 } 12555 v.reset(OpARM64MOVDload) 12556 v.AuxInt = off1 + off2 12557 v.Aux = sym 12558 v.AddArg(ptr) 12559 v.AddArg(mem) 12560 return true 12561 } 12562 // match: (MOVDload [off] {sym} (ADD ptr idx) mem) 12563 // cond: off == 0 && sym == nil 12564 // result: (MOVDloadidx ptr idx mem) 12565 for { 12566 off := v.AuxInt 12567 sym := v.Aux 12568 _ = v.Args[1] 12569 v_0 := v.Args[0] 12570 if v_0.Op != OpARM64ADD { 12571 break 12572 } 12573 _ = v_0.Args[1] 12574 ptr := v_0.Args[0] 12575 idx := v_0.Args[1] 12576 mem := v.Args[1] 12577 if !(off == 0 && sym == nil) { 12578 break 12579 } 12580 v.reset(OpARM64MOVDloadidx) 12581 v.AddArg(ptr) 12582 v.AddArg(idx) 12583 v.AddArg(mem) 12584 return true 12585 } 12586 // match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem) 12587 // cond: off == 0 && sym == nil 12588 // result: (MOVDloadidx8 ptr idx mem) 12589 for { 12590 off := v.AuxInt 12591 sym := v.Aux 12592 _ = v.Args[1] 12593 v_0 := v.Args[0] 12594 if v_0.Op != OpARM64ADDshiftLL { 12595 break 12596 } 12597 if v_0.AuxInt != 3 { 12598 break 12599 } 12600 _ = v_0.Args[1] 12601 ptr := v_0.Args[0] 12602 idx := v_0.Args[1] 12603 mem := v.Args[1] 12604 if !(off == 0 && sym == nil) { 12605 break 12606 } 12607 v.reset(OpARM64MOVDloadidx8) 12608 v.AddArg(ptr) 12609 v.AddArg(idx) 12610 v.AddArg(mem) 12611 return true 12612 } 12613 // match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 12614 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12615 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 12616 for { 12617 off1 := v.AuxInt 12618 sym1 := v.Aux 12619 _ = v.Args[1] 12620 v_0 := v.Args[0] 12621 if v_0.Op != OpARM64MOVDaddr { 12622 break 12623 } 12624 off2 := v_0.AuxInt 12625 sym2 := v_0.Aux 12626 ptr := v_0.Args[0] 12627 mem := v.Args[1] 12628 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12629 break 12630 } 12631 v.reset(OpARM64MOVDload) 12632 v.AuxInt = off1 + off2 12633 v.Aux = mergeSym(sym1, sym2) 12634 v.AddArg(ptr) 12635 v.AddArg(mem) 12636 return true 12637 } 12638 // match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _)) 12639 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 12640 // result: (MOVDconst [0]) 12641 for { 12642 off := v.AuxInt 12643 sym := v.Aux 12644 _ = v.Args[1] 12645 ptr := v.Args[0] 12646 v_1 := v.Args[1] 12647 if v_1.Op != OpARM64MOVDstorezero { 12648 break 12649 } 12650 off2 := v_1.AuxInt 12651 sym2 := v_1.Aux 12652 _ = v_1.Args[1] 12653 ptr2 := v_1.Args[0] 12654 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 12655 break 12656 } 12657 v.reset(OpARM64MOVDconst) 12658 v.AuxInt = 0 12659 return true 12660 } 12661 // match: (MOVDload [off] {sym} (SB) _) 12662 // cond: symIsRO(sym) 12663 // result: (MOVDconst [int64(read64(sym, off, config.BigEndian))]) 12664 for { 12665 off := v.AuxInt 12666 sym := v.Aux 12667 _ = v.Args[1] 12668 v_0 := v.Args[0] 12669 if v_0.Op != OpSB { 12670 break 12671 } 12672 if !(symIsRO(sym)) { 12673 break 12674 } 12675 v.reset(OpARM64MOVDconst) 12676 v.AuxInt = int64(read64(sym, off, config.BigEndian)) 12677 return true 12678 } 12679 return false 12680 } 12681 func rewriteValueARM64_OpARM64MOVDloadidx_0(v *Value) bool { 12682 // match: (MOVDloadidx ptr (MOVDconst [c]) mem) 12683 // cond: 12684 // result: (MOVDload [c] ptr mem) 12685 for { 12686 _ = v.Args[2] 12687 ptr := v.Args[0] 12688 v_1 := v.Args[1] 12689 if v_1.Op != OpARM64MOVDconst { 12690 break 12691 } 12692 c := v_1.AuxInt 12693 mem := v.Args[2] 12694 v.reset(OpARM64MOVDload) 12695 v.AuxInt = c 12696 v.AddArg(ptr) 12697 v.AddArg(mem) 12698 return true 12699 } 12700 // match: (MOVDloadidx (MOVDconst [c]) ptr mem) 12701 // cond: 12702 // result: (MOVDload [c] ptr mem) 12703 for { 12704 _ = v.Args[2] 12705 v_0 := v.Args[0] 12706 if v_0.Op != OpARM64MOVDconst { 12707 break 12708 } 12709 c := v_0.AuxInt 12710 ptr := v.Args[1] 12711 mem := v.Args[2] 12712 v.reset(OpARM64MOVDload) 12713 v.AuxInt = c 12714 v.AddArg(ptr) 12715 v.AddArg(mem) 12716 return true 12717 } 12718 // match: (MOVDloadidx ptr (SLLconst [3] idx) mem) 12719 // cond: 12720 // result: (MOVDloadidx8 ptr idx mem) 12721 for { 12722 _ = v.Args[2] 12723 ptr := v.Args[0] 12724 v_1 := v.Args[1] 12725 if v_1.Op != OpARM64SLLconst { 12726 break 12727 } 12728 if v_1.AuxInt != 3 { 12729 break 12730 } 12731 idx := v_1.Args[0] 12732 mem := v.Args[2] 12733 v.reset(OpARM64MOVDloadidx8) 12734 v.AddArg(ptr) 12735 v.AddArg(idx) 12736 v.AddArg(mem) 12737 return true 12738 } 12739 // match: (MOVDloadidx (SLLconst [3] idx) ptr mem) 12740 // cond: 12741 // result: (MOVDloadidx8 ptr idx mem) 12742 for { 12743 _ = v.Args[2] 12744 v_0 := v.Args[0] 12745 if v_0.Op != OpARM64SLLconst { 12746 break 12747 } 12748 if v_0.AuxInt != 3 { 12749 break 12750 } 12751 idx := v_0.Args[0] 12752 ptr := v.Args[1] 12753 mem := v.Args[2] 12754 v.reset(OpARM64MOVDloadidx8) 12755 v.AddArg(ptr) 12756 v.AddArg(idx) 12757 v.AddArg(mem) 12758 return true 12759 } 12760 // match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _)) 12761 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 12762 // result: (MOVDconst [0]) 12763 for { 12764 _ = v.Args[2] 12765 ptr := v.Args[0] 12766 idx := v.Args[1] 12767 v_2 := v.Args[2] 12768 if v_2.Op != OpARM64MOVDstorezeroidx { 12769 break 12770 } 12771 _ = v_2.Args[2] 12772 ptr2 := v_2.Args[0] 12773 idx2 := v_2.Args[1] 12774 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 12775 break 12776 } 12777 v.reset(OpARM64MOVDconst) 12778 v.AuxInt = 0 12779 return true 12780 } 12781 return false 12782 } 12783 func rewriteValueARM64_OpARM64MOVDloadidx8_0(v *Value) bool { 12784 // match: (MOVDloadidx8 ptr (MOVDconst [c]) mem) 12785 // cond: 12786 // result: (MOVDload [c<<3] ptr mem) 12787 for { 12788 _ = v.Args[2] 12789 ptr := v.Args[0] 12790 v_1 := v.Args[1] 12791 if v_1.Op != OpARM64MOVDconst { 12792 break 12793 } 12794 c := v_1.AuxInt 12795 mem := v.Args[2] 12796 v.reset(OpARM64MOVDload) 12797 v.AuxInt = c << 3 12798 v.AddArg(ptr) 12799 v.AddArg(mem) 12800 return true 12801 } 12802 // match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _)) 12803 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 12804 // result: (MOVDconst [0]) 12805 for { 12806 _ = v.Args[2] 12807 ptr := v.Args[0] 12808 idx := v.Args[1] 12809 v_2 := v.Args[2] 12810 if v_2.Op != OpARM64MOVDstorezeroidx8 { 12811 break 12812 } 12813 _ = v_2.Args[2] 12814 ptr2 := v_2.Args[0] 12815 idx2 := v_2.Args[1] 12816 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 12817 break 12818 } 12819 v.reset(OpARM64MOVDconst) 12820 v.AuxInt = 0 12821 return true 12822 } 12823 return false 12824 } 12825 func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool { 12826 // match: (MOVDreg x) 12827 // cond: x.Uses == 1 12828 // result: (MOVDnop x) 12829 for { 12830 x := v.Args[0] 12831 if !(x.Uses == 1) { 12832 break 12833 } 12834 v.reset(OpARM64MOVDnop) 12835 v.AddArg(x) 12836 return true 12837 } 12838 // match: (MOVDreg (MOVDconst [c])) 12839 // cond: 12840 // result: (MOVDconst [c]) 12841 for { 12842 v_0 := v.Args[0] 12843 if v_0.Op != OpARM64MOVDconst { 12844 break 12845 } 12846 c := v_0.AuxInt 12847 v.reset(OpARM64MOVDconst) 12848 v.AuxInt = c 12849 return true 12850 } 12851 return false 12852 } 12853 func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool { 12854 b := v.Block 12855 _ = b 12856 config := b.Func.Config 12857 _ = config 12858 // match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem) 12859 // cond: 12860 // result: (FMOVDstore [off] {sym} ptr val mem) 12861 for { 12862 off := v.AuxInt 12863 sym := v.Aux 12864 _ = v.Args[2] 12865 ptr := v.Args[0] 12866 v_1 := v.Args[1] 12867 if v_1.Op != OpARM64FMOVDfpgp { 12868 break 12869 } 12870 val := v_1.Args[0] 12871 mem := v.Args[2] 12872 v.reset(OpARM64FMOVDstore) 12873 v.AuxInt = off 12874 v.Aux = sym 12875 v.AddArg(ptr) 12876 v.AddArg(val) 12877 v.AddArg(mem) 12878 return true 12879 } 12880 // match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem) 12881 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12882 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 12883 for { 12884 off1 := v.AuxInt 12885 sym := v.Aux 12886 _ = v.Args[2] 12887 v_0 := v.Args[0] 12888 if v_0.Op != OpARM64ADDconst { 12889 break 12890 } 12891 off2 := v_0.AuxInt 12892 ptr := v_0.Args[0] 12893 val := v.Args[1] 12894 mem := v.Args[2] 12895 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12896 break 12897 } 12898 v.reset(OpARM64MOVDstore) 12899 v.AuxInt = off1 + off2 12900 v.Aux = sym 12901 v.AddArg(ptr) 12902 v.AddArg(val) 12903 v.AddArg(mem) 12904 return true 12905 } 12906 // match: (MOVDstore [off] {sym} (ADD ptr idx) val mem) 12907 // cond: off == 0 && sym == nil 12908 // result: (MOVDstoreidx ptr idx val mem) 12909 for { 12910 off := v.AuxInt 12911 sym := v.Aux 12912 _ = v.Args[2] 12913 v_0 := v.Args[0] 12914 if v_0.Op != OpARM64ADD { 12915 break 12916 } 12917 _ = v_0.Args[1] 12918 ptr := v_0.Args[0] 12919 idx := v_0.Args[1] 12920 val := v.Args[1] 12921 mem := v.Args[2] 12922 if !(off == 0 && sym == nil) { 12923 break 12924 } 12925 v.reset(OpARM64MOVDstoreidx) 12926 v.AddArg(ptr) 12927 v.AddArg(idx) 12928 v.AddArg(val) 12929 v.AddArg(mem) 12930 return true 12931 } 12932 // match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem) 12933 // cond: off == 0 && sym == nil 12934 // result: (MOVDstoreidx8 ptr idx val mem) 12935 for { 12936 off := v.AuxInt 12937 sym := v.Aux 12938 _ = v.Args[2] 12939 v_0 := v.Args[0] 12940 if v_0.Op != OpARM64ADDshiftLL { 12941 break 12942 } 12943 if v_0.AuxInt != 3 { 12944 break 12945 } 12946 _ = v_0.Args[1] 12947 ptr := v_0.Args[0] 12948 idx := v_0.Args[1] 12949 val := v.Args[1] 12950 mem := v.Args[2] 12951 if !(off == 0 && sym == nil) { 12952 break 12953 } 12954 v.reset(OpARM64MOVDstoreidx8) 12955 v.AddArg(ptr) 12956 v.AddArg(idx) 12957 v.AddArg(val) 12958 v.AddArg(mem) 12959 return true 12960 } 12961 // match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 12962 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 12963 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 12964 for { 12965 off1 := v.AuxInt 12966 sym1 := v.Aux 12967 _ = v.Args[2] 12968 v_0 := v.Args[0] 12969 if v_0.Op != OpARM64MOVDaddr { 12970 break 12971 } 12972 off2 := v_0.AuxInt 12973 sym2 := v_0.Aux 12974 ptr := v_0.Args[0] 12975 val := v.Args[1] 12976 mem := v.Args[2] 12977 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 12978 break 12979 } 12980 v.reset(OpARM64MOVDstore) 12981 v.AuxInt = off1 + off2 12982 v.Aux = mergeSym(sym1, sym2) 12983 v.AddArg(ptr) 12984 v.AddArg(val) 12985 v.AddArg(mem) 12986 return true 12987 } 12988 // match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem) 12989 // cond: 12990 // result: (MOVDstorezero [off] {sym} ptr mem) 12991 for { 12992 off := v.AuxInt 12993 sym := v.Aux 12994 _ = v.Args[2] 12995 ptr := v.Args[0] 12996 v_1 := v.Args[1] 12997 if v_1.Op != OpARM64MOVDconst { 12998 break 12999 } 13000 if v_1.AuxInt != 0 { 13001 break 13002 } 13003 mem := v.Args[2] 13004 v.reset(OpARM64MOVDstorezero) 13005 v.AuxInt = off 13006 v.Aux = sym 13007 v.AddArg(ptr) 13008 v.AddArg(mem) 13009 return true 13010 } 13011 return false 13012 } 13013 func rewriteValueARM64_OpARM64MOVDstoreidx_0(v *Value) bool { 13014 // match: (MOVDstoreidx ptr (MOVDconst [c]) val mem) 13015 // cond: 13016 // result: (MOVDstore [c] ptr val mem) 13017 for { 13018 _ = v.Args[3] 13019 ptr := v.Args[0] 13020 v_1 := v.Args[1] 13021 if v_1.Op != OpARM64MOVDconst { 13022 break 13023 } 13024 c := v_1.AuxInt 13025 val := v.Args[2] 13026 mem := v.Args[3] 13027 v.reset(OpARM64MOVDstore) 13028 v.AuxInt = c 13029 v.AddArg(ptr) 13030 v.AddArg(val) 13031 v.AddArg(mem) 13032 return true 13033 } 13034 // match: (MOVDstoreidx (MOVDconst [c]) idx val mem) 13035 // cond: 13036 // result: (MOVDstore [c] idx val mem) 13037 for { 13038 _ = v.Args[3] 13039 v_0 := v.Args[0] 13040 if v_0.Op != OpARM64MOVDconst { 13041 break 13042 } 13043 c := v_0.AuxInt 13044 idx := v.Args[1] 13045 val := v.Args[2] 13046 mem := v.Args[3] 13047 v.reset(OpARM64MOVDstore) 13048 v.AuxInt = c 13049 v.AddArg(idx) 13050 v.AddArg(val) 13051 v.AddArg(mem) 13052 return true 13053 } 13054 // match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem) 13055 // cond: 13056 // result: (MOVDstoreidx8 ptr idx val mem) 13057 for { 13058 _ = v.Args[3] 13059 ptr := v.Args[0] 13060 v_1 := v.Args[1] 13061 if v_1.Op != OpARM64SLLconst { 13062 break 13063 } 13064 if v_1.AuxInt != 3 { 13065 break 13066 } 13067 idx := v_1.Args[0] 13068 val := v.Args[2] 13069 mem := v.Args[3] 13070 v.reset(OpARM64MOVDstoreidx8) 13071 v.AddArg(ptr) 13072 v.AddArg(idx) 13073 v.AddArg(val) 13074 v.AddArg(mem) 13075 return true 13076 } 13077 // match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem) 13078 // cond: 13079 // result: (MOVDstoreidx8 ptr idx val mem) 13080 for { 13081 _ = v.Args[3] 13082 v_0 := v.Args[0] 13083 if v_0.Op != OpARM64SLLconst { 13084 break 13085 } 13086 if v_0.AuxInt != 3 { 13087 break 13088 } 13089 idx := v_0.Args[0] 13090 ptr := v.Args[1] 13091 val := v.Args[2] 13092 mem := v.Args[3] 13093 v.reset(OpARM64MOVDstoreidx8) 13094 v.AddArg(ptr) 13095 v.AddArg(idx) 13096 v.AddArg(val) 13097 v.AddArg(mem) 13098 return true 13099 } 13100 // match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem) 13101 // cond: 13102 // result: (MOVDstorezeroidx ptr idx mem) 13103 for { 13104 _ = v.Args[3] 13105 ptr := v.Args[0] 13106 idx := v.Args[1] 13107 v_2 := v.Args[2] 13108 if v_2.Op != OpARM64MOVDconst { 13109 break 13110 } 13111 if v_2.AuxInt != 0 { 13112 break 13113 } 13114 mem := v.Args[3] 13115 v.reset(OpARM64MOVDstorezeroidx) 13116 v.AddArg(ptr) 13117 v.AddArg(idx) 13118 v.AddArg(mem) 13119 return true 13120 } 13121 return false 13122 } 13123 func rewriteValueARM64_OpARM64MOVDstoreidx8_0(v *Value) bool { 13124 // match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem) 13125 // cond: 13126 // result: (MOVDstore [c<<3] ptr val mem) 13127 for { 13128 _ = v.Args[3] 13129 ptr := v.Args[0] 13130 v_1 := v.Args[1] 13131 if v_1.Op != OpARM64MOVDconst { 13132 break 13133 } 13134 c := v_1.AuxInt 13135 val := v.Args[2] 13136 mem := v.Args[3] 13137 v.reset(OpARM64MOVDstore) 13138 v.AuxInt = c << 3 13139 v.AddArg(ptr) 13140 v.AddArg(val) 13141 v.AddArg(mem) 13142 return true 13143 } 13144 // match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem) 13145 // cond: 13146 // result: (MOVDstorezeroidx8 ptr idx mem) 13147 for { 13148 _ = v.Args[3] 13149 ptr := v.Args[0] 13150 idx := v.Args[1] 13151 v_2 := v.Args[2] 13152 if v_2.Op != OpARM64MOVDconst { 13153 break 13154 } 13155 if v_2.AuxInt != 0 { 13156 break 13157 } 13158 mem := v.Args[3] 13159 v.reset(OpARM64MOVDstorezeroidx8) 13160 v.AddArg(ptr) 13161 v.AddArg(idx) 13162 v.AddArg(mem) 13163 return true 13164 } 13165 return false 13166 } 13167 func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool { 13168 b := v.Block 13169 _ = b 13170 config := b.Func.Config 13171 _ = config 13172 // match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 13173 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13174 // result: (MOVDstorezero [off1+off2] {sym} ptr mem) 13175 for { 13176 off1 := v.AuxInt 13177 sym := v.Aux 13178 _ = v.Args[1] 13179 v_0 := v.Args[0] 13180 if v_0.Op != OpARM64ADDconst { 13181 break 13182 } 13183 off2 := v_0.AuxInt 13184 ptr := v_0.Args[0] 13185 mem := v.Args[1] 13186 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13187 break 13188 } 13189 v.reset(OpARM64MOVDstorezero) 13190 v.AuxInt = off1 + off2 13191 v.Aux = sym 13192 v.AddArg(ptr) 13193 v.AddArg(mem) 13194 return true 13195 } 13196 // match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 13197 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13198 // result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 13199 for { 13200 off1 := v.AuxInt 13201 sym1 := v.Aux 13202 _ = v.Args[1] 13203 v_0 := v.Args[0] 13204 if v_0.Op != OpARM64MOVDaddr { 13205 break 13206 } 13207 off2 := v_0.AuxInt 13208 sym2 := v_0.Aux 13209 ptr := v_0.Args[0] 13210 mem := v.Args[1] 13211 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13212 break 13213 } 13214 v.reset(OpARM64MOVDstorezero) 13215 v.AuxInt = off1 + off2 13216 v.Aux = mergeSym(sym1, sym2) 13217 v.AddArg(ptr) 13218 v.AddArg(mem) 13219 return true 13220 } 13221 // match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem) 13222 // cond: off == 0 && sym == nil 13223 // result: (MOVDstorezeroidx ptr idx mem) 13224 for { 13225 off := v.AuxInt 13226 sym := v.Aux 13227 _ = v.Args[1] 13228 v_0 := v.Args[0] 13229 if v_0.Op != OpARM64ADD { 13230 break 13231 } 13232 _ = v_0.Args[1] 13233 ptr := v_0.Args[0] 13234 idx := v_0.Args[1] 13235 mem := v.Args[1] 13236 if !(off == 0 && sym == nil) { 13237 break 13238 } 13239 v.reset(OpARM64MOVDstorezeroidx) 13240 v.AddArg(ptr) 13241 v.AddArg(idx) 13242 v.AddArg(mem) 13243 return true 13244 } 13245 // match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem) 13246 // cond: off == 0 && sym == nil 13247 // result: (MOVDstorezeroidx8 ptr idx mem) 13248 for { 13249 off := v.AuxInt 13250 sym := v.Aux 13251 _ = v.Args[1] 13252 v_0 := v.Args[0] 13253 if v_0.Op != OpARM64ADDshiftLL { 13254 break 13255 } 13256 if v_0.AuxInt != 3 { 13257 break 13258 } 13259 _ = v_0.Args[1] 13260 ptr := v_0.Args[0] 13261 idx := v_0.Args[1] 13262 mem := v.Args[1] 13263 if !(off == 0 && sym == nil) { 13264 break 13265 } 13266 v.reset(OpARM64MOVDstorezeroidx8) 13267 v.AddArg(ptr) 13268 v.AddArg(idx) 13269 v.AddArg(mem) 13270 return true 13271 } 13272 // match: (MOVDstorezero [i] {s} ptr0 x:(MOVDstorezero [j] {s} ptr1 mem)) 13273 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,8) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 13274 // result: (MOVQstorezero [min(i,j)] {s} ptr0 mem) 13275 for { 13276 i := v.AuxInt 13277 s := v.Aux 13278 _ = v.Args[1] 13279 ptr0 := v.Args[0] 13280 x := v.Args[1] 13281 if x.Op != OpARM64MOVDstorezero { 13282 break 13283 } 13284 j := x.AuxInt 13285 if x.Aux != s { 13286 break 13287 } 13288 _ = x.Args[1] 13289 ptr1 := x.Args[0] 13290 mem := x.Args[1] 13291 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 8) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 13292 break 13293 } 13294 v.reset(OpARM64MOVQstorezero) 13295 v.AuxInt = min(i, j) 13296 v.Aux = s 13297 v.AddArg(ptr0) 13298 v.AddArg(mem) 13299 return true 13300 } 13301 // match: (MOVDstorezero [8] {s} p0:(ADD ptr0 idx0) x:(MOVDstorezeroidx ptr1 idx1 mem)) 13302 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 13303 // result: (MOVQstorezero [0] {s} p0 mem) 13304 for { 13305 if v.AuxInt != 8 { 13306 break 13307 } 13308 s := v.Aux 13309 _ = v.Args[1] 13310 p0 := v.Args[0] 13311 if p0.Op != OpARM64ADD { 13312 break 13313 } 13314 _ = p0.Args[1] 13315 ptr0 := p0.Args[0] 13316 idx0 := p0.Args[1] 13317 x := v.Args[1] 13318 if x.Op != OpARM64MOVDstorezeroidx { 13319 break 13320 } 13321 _ = x.Args[2] 13322 ptr1 := x.Args[0] 13323 idx1 := x.Args[1] 13324 mem := x.Args[2] 13325 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 13326 break 13327 } 13328 v.reset(OpARM64MOVQstorezero) 13329 v.AuxInt = 0 13330 v.Aux = s 13331 v.AddArg(p0) 13332 v.AddArg(mem) 13333 return true 13334 } 13335 // match: (MOVDstorezero [8] {s} p0:(ADDshiftLL [3] ptr0 idx0) x:(MOVDstorezeroidx8 ptr1 idx1 mem)) 13336 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 13337 // result: (MOVQstorezero [0] {s} p0 mem) 13338 for { 13339 if v.AuxInt != 8 { 13340 break 13341 } 13342 s := v.Aux 13343 _ = v.Args[1] 13344 p0 := v.Args[0] 13345 if p0.Op != OpARM64ADDshiftLL { 13346 break 13347 } 13348 if p0.AuxInt != 3 { 13349 break 13350 } 13351 _ = p0.Args[1] 13352 ptr0 := p0.Args[0] 13353 idx0 := p0.Args[1] 13354 x := v.Args[1] 13355 if x.Op != OpARM64MOVDstorezeroidx8 { 13356 break 13357 } 13358 _ = x.Args[2] 13359 ptr1 := x.Args[0] 13360 idx1 := x.Args[1] 13361 mem := x.Args[2] 13362 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 13363 break 13364 } 13365 v.reset(OpARM64MOVQstorezero) 13366 v.AuxInt = 0 13367 v.Aux = s 13368 v.AddArg(p0) 13369 v.AddArg(mem) 13370 return true 13371 } 13372 return false 13373 } 13374 func rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v *Value) bool { 13375 // match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem) 13376 // cond: 13377 // result: (MOVDstorezero [c] ptr mem) 13378 for { 13379 _ = v.Args[2] 13380 ptr := v.Args[0] 13381 v_1 := v.Args[1] 13382 if v_1.Op != OpARM64MOVDconst { 13383 break 13384 } 13385 c := v_1.AuxInt 13386 mem := v.Args[2] 13387 v.reset(OpARM64MOVDstorezero) 13388 v.AuxInt = c 13389 v.AddArg(ptr) 13390 v.AddArg(mem) 13391 return true 13392 } 13393 // match: (MOVDstorezeroidx (MOVDconst [c]) idx mem) 13394 // cond: 13395 // result: (MOVDstorezero [c] idx mem) 13396 for { 13397 _ = v.Args[2] 13398 v_0 := v.Args[0] 13399 if v_0.Op != OpARM64MOVDconst { 13400 break 13401 } 13402 c := v_0.AuxInt 13403 idx := v.Args[1] 13404 mem := v.Args[2] 13405 v.reset(OpARM64MOVDstorezero) 13406 v.AuxInt = c 13407 v.AddArg(idx) 13408 v.AddArg(mem) 13409 return true 13410 } 13411 // match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem) 13412 // cond: 13413 // result: (MOVDstorezeroidx8 ptr idx mem) 13414 for { 13415 _ = v.Args[2] 13416 ptr := v.Args[0] 13417 v_1 := v.Args[1] 13418 if v_1.Op != OpARM64SLLconst { 13419 break 13420 } 13421 if v_1.AuxInt != 3 { 13422 break 13423 } 13424 idx := v_1.Args[0] 13425 mem := v.Args[2] 13426 v.reset(OpARM64MOVDstorezeroidx8) 13427 v.AddArg(ptr) 13428 v.AddArg(idx) 13429 v.AddArg(mem) 13430 return true 13431 } 13432 // match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem) 13433 // cond: 13434 // result: (MOVDstorezeroidx8 ptr idx mem) 13435 for { 13436 _ = v.Args[2] 13437 v_0 := v.Args[0] 13438 if v_0.Op != OpARM64SLLconst { 13439 break 13440 } 13441 if v_0.AuxInt != 3 { 13442 break 13443 } 13444 idx := v_0.Args[0] 13445 ptr := v.Args[1] 13446 mem := v.Args[2] 13447 v.reset(OpARM64MOVDstorezeroidx8) 13448 v.AddArg(ptr) 13449 v.AddArg(idx) 13450 v.AddArg(mem) 13451 return true 13452 } 13453 return false 13454 } 13455 func rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v *Value) bool { 13456 // match: (MOVDstorezeroidx8 ptr (MOVDconst [c]) mem) 13457 // cond: 13458 // result: (MOVDstorezero [c<<3] ptr mem) 13459 for { 13460 _ = v.Args[2] 13461 ptr := v.Args[0] 13462 v_1 := v.Args[1] 13463 if v_1.Op != OpARM64MOVDconst { 13464 break 13465 } 13466 c := v_1.AuxInt 13467 mem := v.Args[2] 13468 v.reset(OpARM64MOVDstorezero) 13469 v.AuxInt = c << 3 13470 v.AddArg(ptr) 13471 v.AddArg(mem) 13472 return true 13473 } 13474 return false 13475 } 13476 func rewriteValueARM64_OpARM64MOVHUload_0(v *Value) bool { 13477 b := v.Block 13478 _ = b 13479 config := b.Func.Config 13480 _ = config 13481 // match: (MOVHUload [off1] {sym} (ADDconst [off2] ptr) mem) 13482 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13483 // result: (MOVHUload [off1+off2] {sym} ptr mem) 13484 for { 13485 off1 := v.AuxInt 13486 sym := v.Aux 13487 _ = v.Args[1] 13488 v_0 := v.Args[0] 13489 if v_0.Op != OpARM64ADDconst { 13490 break 13491 } 13492 off2 := v_0.AuxInt 13493 ptr := v_0.Args[0] 13494 mem := v.Args[1] 13495 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13496 break 13497 } 13498 v.reset(OpARM64MOVHUload) 13499 v.AuxInt = off1 + off2 13500 v.Aux = sym 13501 v.AddArg(ptr) 13502 v.AddArg(mem) 13503 return true 13504 } 13505 // match: (MOVHUload [off] {sym} (ADD ptr idx) mem) 13506 // cond: off == 0 && sym == nil 13507 // result: (MOVHUloadidx ptr idx mem) 13508 for { 13509 off := v.AuxInt 13510 sym := v.Aux 13511 _ = v.Args[1] 13512 v_0 := v.Args[0] 13513 if v_0.Op != OpARM64ADD { 13514 break 13515 } 13516 _ = v_0.Args[1] 13517 ptr := v_0.Args[0] 13518 idx := v_0.Args[1] 13519 mem := v.Args[1] 13520 if !(off == 0 && sym == nil) { 13521 break 13522 } 13523 v.reset(OpARM64MOVHUloadidx) 13524 v.AddArg(ptr) 13525 v.AddArg(idx) 13526 v.AddArg(mem) 13527 return true 13528 } 13529 // match: (MOVHUload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 13530 // cond: off == 0 && sym == nil 13531 // result: (MOVHUloadidx2 ptr idx mem) 13532 for { 13533 off := v.AuxInt 13534 sym := v.Aux 13535 _ = v.Args[1] 13536 v_0 := v.Args[0] 13537 if v_0.Op != OpARM64ADDshiftLL { 13538 break 13539 } 13540 if v_0.AuxInt != 1 { 13541 break 13542 } 13543 _ = v_0.Args[1] 13544 ptr := v_0.Args[0] 13545 idx := v_0.Args[1] 13546 mem := v.Args[1] 13547 if !(off == 0 && sym == nil) { 13548 break 13549 } 13550 v.reset(OpARM64MOVHUloadidx2) 13551 v.AddArg(ptr) 13552 v.AddArg(idx) 13553 v.AddArg(mem) 13554 return true 13555 } 13556 // match: (MOVHUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 13557 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13558 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 13559 for { 13560 off1 := v.AuxInt 13561 sym1 := v.Aux 13562 _ = v.Args[1] 13563 v_0 := v.Args[0] 13564 if v_0.Op != OpARM64MOVDaddr { 13565 break 13566 } 13567 off2 := v_0.AuxInt 13568 sym2 := v_0.Aux 13569 ptr := v_0.Args[0] 13570 mem := v.Args[1] 13571 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13572 break 13573 } 13574 v.reset(OpARM64MOVHUload) 13575 v.AuxInt = off1 + off2 13576 v.Aux = mergeSym(sym1, sym2) 13577 v.AddArg(ptr) 13578 v.AddArg(mem) 13579 return true 13580 } 13581 // match: (MOVHUload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 13582 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 13583 // result: (MOVDconst [0]) 13584 for { 13585 off := v.AuxInt 13586 sym := v.Aux 13587 _ = v.Args[1] 13588 ptr := v.Args[0] 13589 v_1 := v.Args[1] 13590 if v_1.Op != OpARM64MOVHstorezero { 13591 break 13592 } 13593 off2 := v_1.AuxInt 13594 sym2 := v_1.Aux 13595 _ = v_1.Args[1] 13596 ptr2 := v_1.Args[0] 13597 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 13598 break 13599 } 13600 v.reset(OpARM64MOVDconst) 13601 v.AuxInt = 0 13602 return true 13603 } 13604 // match: (MOVHUload [off] {sym} (SB) _) 13605 // cond: symIsRO(sym) 13606 // result: (MOVDconst [int64(read16(sym, off, config.BigEndian))]) 13607 for { 13608 off := v.AuxInt 13609 sym := v.Aux 13610 _ = v.Args[1] 13611 v_0 := v.Args[0] 13612 if v_0.Op != OpSB { 13613 break 13614 } 13615 if !(symIsRO(sym)) { 13616 break 13617 } 13618 v.reset(OpARM64MOVDconst) 13619 v.AuxInt = int64(read16(sym, off, config.BigEndian)) 13620 return true 13621 } 13622 return false 13623 } 13624 func rewriteValueARM64_OpARM64MOVHUloadidx_0(v *Value) bool { 13625 // match: (MOVHUloadidx ptr (MOVDconst [c]) mem) 13626 // cond: 13627 // result: (MOVHUload [c] ptr mem) 13628 for { 13629 _ = v.Args[2] 13630 ptr := v.Args[0] 13631 v_1 := v.Args[1] 13632 if v_1.Op != OpARM64MOVDconst { 13633 break 13634 } 13635 c := v_1.AuxInt 13636 mem := v.Args[2] 13637 v.reset(OpARM64MOVHUload) 13638 v.AuxInt = c 13639 v.AddArg(ptr) 13640 v.AddArg(mem) 13641 return true 13642 } 13643 // match: (MOVHUloadidx (MOVDconst [c]) ptr mem) 13644 // cond: 13645 // result: (MOVHUload [c] ptr mem) 13646 for { 13647 _ = v.Args[2] 13648 v_0 := v.Args[0] 13649 if v_0.Op != OpARM64MOVDconst { 13650 break 13651 } 13652 c := v_0.AuxInt 13653 ptr := v.Args[1] 13654 mem := v.Args[2] 13655 v.reset(OpARM64MOVHUload) 13656 v.AuxInt = c 13657 v.AddArg(ptr) 13658 v.AddArg(mem) 13659 return true 13660 } 13661 // match: (MOVHUloadidx ptr (SLLconst [1] idx) mem) 13662 // cond: 13663 // result: (MOVHUloadidx2 ptr idx mem) 13664 for { 13665 _ = v.Args[2] 13666 ptr := v.Args[0] 13667 v_1 := v.Args[1] 13668 if v_1.Op != OpARM64SLLconst { 13669 break 13670 } 13671 if v_1.AuxInt != 1 { 13672 break 13673 } 13674 idx := v_1.Args[0] 13675 mem := v.Args[2] 13676 v.reset(OpARM64MOVHUloadidx2) 13677 v.AddArg(ptr) 13678 v.AddArg(idx) 13679 v.AddArg(mem) 13680 return true 13681 } 13682 // match: (MOVHUloadidx ptr (ADD idx idx) mem) 13683 // cond: 13684 // result: (MOVHUloadidx2 ptr idx mem) 13685 for { 13686 _ = v.Args[2] 13687 ptr := v.Args[0] 13688 v_1 := v.Args[1] 13689 if v_1.Op != OpARM64ADD { 13690 break 13691 } 13692 _ = v_1.Args[1] 13693 idx := v_1.Args[0] 13694 if idx != v_1.Args[1] { 13695 break 13696 } 13697 mem := v.Args[2] 13698 v.reset(OpARM64MOVHUloadidx2) 13699 v.AddArg(ptr) 13700 v.AddArg(idx) 13701 v.AddArg(mem) 13702 return true 13703 } 13704 // match: (MOVHUloadidx (ADD idx idx) ptr mem) 13705 // cond: 13706 // result: (MOVHUloadidx2 ptr idx mem) 13707 for { 13708 _ = v.Args[2] 13709 v_0 := v.Args[0] 13710 if v_0.Op != OpARM64ADD { 13711 break 13712 } 13713 _ = v_0.Args[1] 13714 idx := v_0.Args[0] 13715 if idx != v_0.Args[1] { 13716 break 13717 } 13718 ptr := v.Args[1] 13719 mem := v.Args[2] 13720 v.reset(OpARM64MOVHUloadidx2) 13721 v.AddArg(ptr) 13722 v.AddArg(idx) 13723 v.AddArg(mem) 13724 return true 13725 } 13726 // match: (MOVHUloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 13727 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 13728 // result: (MOVDconst [0]) 13729 for { 13730 _ = v.Args[2] 13731 ptr := v.Args[0] 13732 idx := v.Args[1] 13733 v_2 := v.Args[2] 13734 if v_2.Op != OpARM64MOVHstorezeroidx { 13735 break 13736 } 13737 _ = v_2.Args[2] 13738 ptr2 := v_2.Args[0] 13739 idx2 := v_2.Args[1] 13740 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 13741 break 13742 } 13743 v.reset(OpARM64MOVDconst) 13744 v.AuxInt = 0 13745 return true 13746 } 13747 return false 13748 } 13749 func rewriteValueARM64_OpARM64MOVHUloadidx2_0(v *Value) bool { 13750 // match: (MOVHUloadidx2 ptr (MOVDconst [c]) mem) 13751 // cond: 13752 // result: (MOVHUload [c<<1] ptr mem) 13753 for { 13754 _ = v.Args[2] 13755 ptr := v.Args[0] 13756 v_1 := v.Args[1] 13757 if v_1.Op != OpARM64MOVDconst { 13758 break 13759 } 13760 c := v_1.AuxInt 13761 mem := v.Args[2] 13762 v.reset(OpARM64MOVHUload) 13763 v.AuxInt = c << 1 13764 v.AddArg(ptr) 13765 v.AddArg(mem) 13766 return true 13767 } 13768 // match: (MOVHUloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 13769 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 13770 // result: (MOVDconst [0]) 13771 for { 13772 _ = v.Args[2] 13773 ptr := v.Args[0] 13774 idx := v.Args[1] 13775 v_2 := v.Args[2] 13776 if v_2.Op != OpARM64MOVHstorezeroidx2 { 13777 break 13778 } 13779 _ = v_2.Args[2] 13780 ptr2 := v_2.Args[0] 13781 idx2 := v_2.Args[1] 13782 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 13783 break 13784 } 13785 v.reset(OpARM64MOVDconst) 13786 v.AuxInt = 0 13787 return true 13788 } 13789 return false 13790 } 13791 func rewriteValueARM64_OpARM64MOVHUreg_0(v *Value) bool { 13792 // match: (MOVHUreg x:(MOVBUload _ _)) 13793 // cond: 13794 // result: (MOVDreg x) 13795 for { 13796 x := v.Args[0] 13797 if x.Op != OpARM64MOVBUload { 13798 break 13799 } 13800 _ = x.Args[1] 13801 v.reset(OpARM64MOVDreg) 13802 v.AddArg(x) 13803 return true 13804 } 13805 // match: (MOVHUreg x:(MOVHUload _ _)) 13806 // cond: 13807 // result: (MOVDreg x) 13808 for { 13809 x := v.Args[0] 13810 if x.Op != OpARM64MOVHUload { 13811 break 13812 } 13813 _ = x.Args[1] 13814 v.reset(OpARM64MOVDreg) 13815 v.AddArg(x) 13816 return true 13817 } 13818 // match: (MOVHUreg x:(MOVBUloadidx _ _ _)) 13819 // cond: 13820 // result: (MOVDreg x) 13821 for { 13822 x := v.Args[0] 13823 if x.Op != OpARM64MOVBUloadidx { 13824 break 13825 } 13826 _ = x.Args[2] 13827 v.reset(OpARM64MOVDreg) 13828 v.AddArg(x) 13829 return true 13830 } 13831 // match: (MOVHUreg x:(MOVHUloadidx _ _ _)) 13832 // cond: 13833 // result: (MOVDreg x) 13834 for { 13835 x := v.Args[0] 13836 if x.Op != OpARM64MOVHUloadidx { 13837 break 13838 } 13839 _ = x.Args[2] 13840 v.reset(OpARM64MOVDreg) 13841 v.AddArg(x) 13842 return true 13843 } 13844 // match: (MOVHUreg x:(MOVHUloadidx2 _ _ _)) 13845 // cond: 13846 // result: (MOVDreg x) 13847 for { 13848 x := v.Args[0] 13849 if x.Op != OpARM64MOVHUloadidx2 { 13850 break 13851 } 13852 _ = x.Args[2] 13853 v.reset(OpARM64MOVDreg) 13854 v.AddArg(x) 13855 return true 13856 } 13857 // match: (MOVHUreg x:(MOVBUreg _)) 13858 // cond: 13859 // result: (MOVDreg x) 13860 for { 13861 x := v.Args[0] 13862 if x.Op != OpARM64MOVBUreg { 13863 break 13864 } 13865 v.reset(OpARM64MOVDreg) 13866 v.AddArg(x) 13867 return true 13868 } 13869 // match: (MOVHUreg x:(MOVHUreg _)) 13870 // cond: 13871 // result: (MOVDreg x) 13872 for { 13873 x := v.Args[0] 13874 if x.Op != OpARM64MOVHUreg { 13875 break 13876 } 13877 v.reset(OpARM64MOVDreg) 13878 v.AddArg(x) 13879 return true 13880 } 13881 // match: (MOVHUreg (ANDconst [c] x)) 13882 // cond: 13883 // result: (ANDconst [c&(1<<16-1)] x) 13884 for { 13885 v_0 := v.Args[0] 13886 if v_0.Op != OpARM64ANDconst { 13887 break 13888 } 13889 c := v_0.AuxInt 13890 x := v_0.Args[0] 13891 v.reset(OpARM64ANDconst) 13892 v.AuxInt = c & (1<<16 - 1) 13893 v.AddArg(x) 13894 return true 13895 } 13896 // match: (MOVHUreg (MOVDconst [c])) 13897 // cond: 13898 // result: (MOVDconst [int64(uint16(c))]) 13899 for { 13900 v_0 := v.Args[0] 13901 if v_0.Op != OpARM64MOVDconst { 13902 break 13903 } 13904 c := v_0.AuxInt 13905 v.reset(OpARM64MOVDconst) 13906 v.AuxInt = int64(uint16(c)) 13907 return true 13908 } 13909 // match: (MOVHUreg (SLLconst [sc] x)) 13910 // cond: isARM64BFMask(sc, 1<<16-1, sc) 13911 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x) 13912 for { 13913 v_0 := v.Args[0] 13914 if v_0.Op != OpARM64SLLconst { 13915 break 13916 } 13917 sc := v_0.AuxInt 13918 x := v_0.Args[0] 13919 if !(isARM64BFMask(sc, 1<<16-1, sc)) { 13920 break 13921 } 13922 v.reset(OpARM64UBFIZ) 13923 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc)) 13924 v.AddArg(x) 13925 return true 13926 } 13927 return false 13928 } 13929 func rewriteValueARM64_OpARM64MOVHUreg_10(v *Value) bool { 13930 // match: (MOVHUreg (SRLconst [sc] x)) 13931 // cond: isARM64BFMask(sc, 1<<16-1, 0) 13932 // result: (UBFX [arm64BFAuxInt(sc, 16)] x) 13933 for { 13934 v_0 := v.Args[0] 13935 if v_0.Op != OpARM64SRLconst { 13936 break 13937 } 13938 sc := v_0.AuxInt 13939 x := v_0.Args[0] 13940 if !(isARM64BFMask(sc, 1<<16-1, 0)) { 13941 break 13942 } 13943 v.reset(OpARM64UBFX) 13944 v.AuxInt = arm64BFAuxInt(sc, 16) 13945 v.AddArg(x) 13946 return true 13947 } 13948 return false 13949 } 13950 func rewriteValueARM64_OpARM64MOVHload_0(v *Value) bool { 13951 b := v.Block 13952 _ = b 13953 config := b.Func.Config 13954 _ = config 13955 // match: (MOVHload [off1] {sym} (ADDconst [off2] ptr) mem) 13956 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 13957 // result: (MOVHload [off1+off2] {sym} ptr mem) 13958 for { 13959 off1 := v.AuxInt 13960 sym := v.Aux 13961 _ = v.Args[1] 13962 v_0 := v.Args[0] 13963 if v_0.Op != OpARM64ADDconst { 13964 break 13965 } 13966 off2 := v_0.AuxInt 13967 ptr := v_0.Args[0] 13968 mem := v.Args[1] 13969 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 13970 break 13971 } 13972 v.reset(OpARM64MOVHload) 13973 v.AuxInt = off1 + off2 13974 v.Aux = sym 13975 v.AddArg(ptr) 13976 v.AddArg(mem) 13977 return true 13978 } 13979 // match: (MOVHload [off] {sym} (ADD ptr idx) mem) 13980 // cond: off == 0 && sym == nil 13981 // result: (MOVHloadidx ptr idx mem) 13982 for { 13983 off := v.AuxInt 13984 sym := v.Aux 13985 _ = v.Args[1] 13986 v_0 := v.Args[0] 13987 if v_0.Op != OpARM64ADD { 13988 break 13989 } 13990 _ = v_0.Args[1] 13991 ptr := v_0.Args[0] 13992 idx := v_0.Args[1] 13993 mem := v.Args[1] 13994 if !(off == 0 && sym == nil) { 13995 break 13996 } 13997 v.reset(OpARM64MOVHloadidx) 13998 v.AddArg(ptr) 13999 v.AddArg(idx) 14000 v.AddArg(mem) 14001 return true 14002 } 14003 // match: (MOVHload [off] {sym} (ADDshiftLL [1] ptr idx) mem) 14004 // cond: off == 0 && sym == nil 14005 // result: (MOVHloadidx2 ptr idx mem) 14006 for { 14007 off := v.AuxInt 14008 sym := v.Aux 14009 _ = v.Args[1] 14010 v_0 := v.Args[0] 14011 if v_0.Op != OpARM64ADDshiftLL { 14012 break 14013 } 14014 if v_0.AuxInt != 1 { 14015 break 14016 } 14017 _ = v_0.Args[1] 14018 ptr := v_0.Args[0] 14019 idx := v_0.Args[1] 14020 mem := v.Args[1] 14021 if !(off == 0 && sym == nil) { 14022 break 14023 } 14024 v.reset(OpARM64MOVHloadidx2) 14025 v.AddArg(ptr) 14026 v.AddArg(idx) 14027 v.AddArg(mem) 14028 return true 14029 } 14030 // match: (MOVHload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 14031 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14032 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 14033 for { 14034 off1 := v.AuxInt 14035 sym1 := v.Aux 14036 _ = v.Args[1] 14037 v_0 := v.Args[0] 14038 if v_0.Op != OpARM64MOVDaddr { 14039 break 14040 } 14041 off2 := v_0.AuxInt 14042 sym2 := v_0.Aux 14043 ptr := v_0.Args[0] 14044 mem := v.Args[1] 14045 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14046 break 14047 } 14048 v.reset(OpARM64MOVHload) 14049 v.AuxInt = off1 + off2 14050 v.Aux = mergeSym(sym1, sym2) 14051 v.AddArg(ptr) 14052 v.AddArg(mem) 14053 return true 14054 } 14055 // match: (MOVHload [off] {sym} ptr (MOVHstorezero [off2] {sym2} ptr2 _)) 14056 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 14057 // result: (MOVDconst [0]) 14058 for { 14059 off := v.AuxInt 14060 sym := v.Aux 14061 _ = v.Args[1] 14062 ptr := v.Args[0] 14063 v_1 := v.Args[1] 14064 if v_1.Op != OpARM64MOVHstorezero { 14065 break 14066 } 14067 off2 := v_1.AuxInt 14068 sym2 := v_1.Aux 14069 _ = v_1.Args[1] 14070 ptr2 := v_1.Args[0] 14071 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 14072 break 14073 } 14074 v.reset(OpARM64MOVDconst) 14075 v.AuxInt = 0 14076 return true 14077 } 14078 return false 14079 } 14080 func rewriteValueARM64_OpARM64MOVHloadidx_0(v *Value) bool { 14081 // match: (MOVHloadidx ptr (MOVDconst [c]) mem) 14082 // cond: 14083 // result: (MOVHload [c] ptr mem) 14084 for { 14085 _ = v.Args[2] 14086 ptr := v.Args[0] 14087 v_1 := v.Args[1] 14088 if v_1.Op != OpARM64MOVDconst { 14089 break 14090 } 14091 c := v_1.AuxInt 14092 mem := v.Args[2] 14093 v.reset(OpARM64MOVHload) 14094 v.AuxInt = c 14095 v.AddArg(ptr) 14096 v.AddArg(mem) 14097 return true 14098 } 14099 // match: (MOVHloadidx (MOVDconst [c]) ptr mem) 14100 // cond: 14101 // result: (MOVHload [c] ptr mem) 14102 for { 14103 _ = v.Args[2] 14104 v_0 := v.Args[0] 14105 if v_0.Op != OpARM64MOVDconst { 14106 break 14107 } 14108 c := v_0.AuxInt 14109 ptr := v.Args[1] 14110 mem := v.Args[2] 14111 v.reset(OpARM64MOVHload) 14112 v.AuxInt = c 14113 v.AddArg(ptr) 14114 v.AddArg(mem) 14115 return true 14116 } 14117 // match: (MOVHloadidx ptr (SLLconst [1] idx) mem) 14118 // cond: 14119 // result: (MOVHloadidx2 ptr idx mem) 14120 for { 14121 _ = v.Args[2] 14122 ptr := v.Args[0] 14123 v_1 := v.Args[1] 14124 if v_1.Op != OpARM64SLLconst { 14125 break 14126 } 14127 if v_1.AuxInt != 1 { 14128 break 14129 } 14130 idx := v_1.Args[0] 14131 mem := v.Args[2] 14132 v.reset(OpARM64MOVHloadidx2) 14133 v.AddArg(ptr) 14134 v.AddArg(idx) 14135 v.AddArg(mem) 14136 return true 14137 } 14138 // match: (MOVHloadidx ptr (ADD idx idx) mem) 14139 // cond: 14140 // result: (MOVHloadidx2 ptr idx mem) 14141 for { 14142 _ = v.Args[2] 14143 ptr := v.Args[0] 14144 v_1 := v.Args[1] 14145 if v_1.Op != OpARM64ADD { 14146 break 14147 } 14148 _ = v_1.Args[1] 14149 idx := v_1.Args[0] 14150 if idx != v_1.Args[1] { 14151 break 14152 } 14153 mem := v.Args[2] 14154 v.reset(OpARM64MOVHloadidx2) 14155 v.AddArg(ptr) 14156 v.AddArg(idx) 14157 v.AddArg(mem) 14158 return true 14159 } 14160 // match: (MOVHloadidx (ADD idx idx) ptr mem) 14161 // cond: 14162 // result: (MOVHloadidx2 ptr idx mem) 14163 for { 14164 _ = v.Args[2] 14165 v_0 := v.Args[0] 14166 if v_0.Op != OpARM64ADD { 14167 break 14168 } 14169 _ = v_0.Args[1] 14170 idx := v_0.Args[0] 14171 if idx != v_0.Args[1] { 14172 break 14173 } 14174 ptr := v.Args[1] 14175 mem := v.Args[2] 14176 v.reset(OpARM64MOVHloadidx2) 14177 v.AddArg(ptr) 14178 v.AddArg(idx) 14179 v.AddArg(mem) 14180 return true 14181 } 14182 // match: (MOVHloadidx ptr idx (MOVHstorezeroidx ptr2 idx2 _)) 14183 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 14184 // result: (MOVDconst [0]) 14185 for { 14186 _ = v.Args[2] 14187 ptr := v.Args[0] 14188 idx := v.Args[1] 14189 v_2 := v.Args[2] 14190 if v_2.Op != OpARM64MOVHstorezeroidx { 14191 break 14192 } 14193 _ = v_2.Args[2] 14194 ptr2 := v_2.Args[0] 14195 idx2 := v_2.Args[1] 14196 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 14197 break 14198 } 14199 v.reset(OpARM64MOVDconst) 14200 v.AuxInt = 0 14201 return true 14202 } 14203 return false 14204 } 14205 func rewriteValueARM64_OpARM64MOVHloadidx2_0(v *Value) bool { 14206 // match: (MOVHloadidx2 ptr (MOVDconst [c]) mem) 14207 // cond: 14208 // result: (MOVHload [c<<1] ptr mem) 14209 for { 14210 _ = v.Args[2] 14211 ptr := v.Args[0] 14212 v_1 := v.Args[1] 14213 if v_1.Op != OpARM64MOVDconst { 14214 break 14215 } 14216 c := v_1.AuxInt 14217 mem := v.Args[2] 14218 v.reset(OpARM64MOVHload) 14219 v.AuxInt = c << 1 14220 v.AddArg(ptr) 14221 v.AddArg(mem) 14222 return true 14223 } 14224 // match: (MOVHloadidx2 ptr idx (MOVHstorezeroidx2 ptr2 idx2 _)) 14225 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 14226 // result: (MOVDconst [0]) 14227 for { 14228 _ = v.Args[2] 14229 ptr := v.Args[0] 14230 idx := v.Args[1] 14231 v_2 := v.Args[2] 14232 if v_2.Op != OpARM64MOVHstorezeroidx2 { 14233 break 14234 } 14235 _ = v_2.Args[2] 14236 ptr2 := v_2.Args[0] 14237 idx2 := v_2.Args[1] 14238 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 14239 break 14240 } 14241 v.reset(OpARM64MOVDconst) 14242 v.AuxInt = 0 14243 return true 14244 } 14245 return false 14246 } 14247 func rewriteValueARM64_OpARM64MOVHreg_0(v *Value) bool { 14248 // match: (MOVHreg x:(MOVBload _ _)) 14249 // cond: 14250 // result: (MOVDreg x) 14251 for { 14252 x := v.Args[0] 14253 if x.Op != OpARM64MOVBload { 14254 break 14255 } 14256 _ = x.Args[1] 14257 v.reset(OpARM64MOVDreg) 14258 v.AddArg(x) 14259 return true 14260 } 14261 // match: (MOVHreg x:(MOVBUload _ _)) 14262 // cond: 14263 // result: (MOVDreg x) 14264 for { 14265 x := v.Args[0] 14266 if x.Op != OpARM64MOVBUload { 14267 break 14268 } 14269 _ = x.Args[1] 14270 v.reset(OpARM64MOVDreg) 14271 v.AddArg(x) 14272 return true 14273 } 14274 // match: (MOVHreg x:(MOVHload _ _)) 14275 // cond: 14276 // result: (MOVDreg x) 14277 for { 14278 x := v.Args[0] 14279 if x.Op != OpARM64MOVHload { 14280 break 14281 } 14282 _ = x.Args[1] 14283 v.reset(OpARM64MOVDreg) 14284 v.AddArg(x) 14285 return true 14286 } 14287 // match: (MOVHreg x:(MOVBloadidx _ _ _)) 14288 // cond: 14289 // result: (MOVDreg x) 14290 for { 14291 x := v.Args[0] 14292 if x.Op != OpARM64MOVBloadidx { 14293 break 14294 } 14295 _ = x.Args[2] 14296 v.reset(OpARM64MOVDreg) 14297 v.AddArg(x) 14298 return true 14299 } 14300 // match: (MOVHreg x:(MOVBUloadidx _ _ _)) 14301 // cond: 14302 // result: (MOVDreg x) 14303 for { 14304 x := v.Args[0] 14305 if x.Op != OpARM64MOVBUloadidx { 14306 break 14307 } 14308 _ = x.Args[2] 14309 v.reset(OpARM64MOVDreg) 14310 v.AddArg(x) 14311 return true 14312 } 14313 // match: (MOVHreg x:(MOVHloadidx _ _ _)) 14314 // cond: 14315 // result: (MOVDreg x) 14316 for { 14317 x := v.Args[0] 14318 if x.Op != OpARM64MOVHloadidx { 14319 break 14320 } 14321 _ = x.Args[2] 14322 v.reset(OpARM64MOVDreg) 14323 v.AddArg(x) 14324 return true 14325 } 14326 // match: (MOVHreg x:(MOVHloadidx2 _ _ _)) 14327 // cond: 14328 // result: (MOVDreg x) 14329 for { 14330 x := v.Args[0] 14331 if x.Op != OpARM64MOVHloadidx2 { 14332 break 14333 } 14334 _ = x.Args[2] 14335 v.reset(OpARM64MOVDreg) 14336 v.AddArg(x) 14337 return true 14338 } 14339 // match: (MOVHreg x:(MOVBreg _)) 14340 // cond: 14341 // result: (MOVDreg x) 14342 for { 14343 x := v.Args[0] 14344 if x.Op != OpARM64MOVBreg { 14345 break 14346 } 14347 v.reset(OpARM64MOVDreg) 14348 v.AddArg(x) 14349 return true 14350 } 14351 // match: (MOVHreg x:(MOVBUreg _)) 14352 // cond: 14353 // result: (MOVDreg x) 14354 for { 14355 x := v.Args[0] 14356 if x.Op != OpARM64MOVBUreg { 14357 break 14358 } 14359 v.reset(OpARM64MOVDreg) 14360 v.AddArg(x) 14361 return true 14362 } 14363 // match: (MOVHreg x:(MOVHreg _)) 14364 // cond: 14365 // result: (MOVDreg x) 14366 for { 14367 x := v.Args[0] 14368 if x.Op != OpARM64MOVHreg { 14369 break 14370 } 14371 v.reset(OpARM64MOVDreg) 14372 v.AddArg(x) 14373 return true 14374 } 14375 return false 14376 } 14377 func rewriteValueARM64_OpARM64MOVHreg_10(v *Value) bool { 14378 // match: (MOVHreg (MOVDconst [c])) 14379 // cond: 14380 // result: (MOVDconst [int64(int16(c))]) 14381 for { 14382 v_0 := v.Args[0] 14383 if v_0.Op != OpARM64MOVDconst { 14384 break 14385 } 14386 c := v_0.AuxInt 14387 v.reset(OpARM64MOVDconst) 14388 v.AuxInt = int64(int16(c)) 14389 return true 14390 } 14391 // match: (MOVHreg (SLLconst [lc] x)) 14392 // cond: lc < 16 14393 // result: (SBFIZ [arm64BFAuxInt(lc, 16-lc)] x) 14394 for { 14395 v_0 := v.Args[0] 14396 if v_0.Op != OpARM64SLLconst { 14397 break 14398 } 14399 lc := v_0.AuxInt 14400 x := v_0.Args[0] 14401 if !(lc < 16) { 14402 break 14403 } 14404 v.reset(OpARM64SBFIZ) 14405 v.AuxInt = arm64BFAuxInt(lc, 16-lc) 14406 v.AddArg(x) 14407 return true 14408 } 14409 return false 14410 } 14411 func rewriteValueARM64_OpARM64MOVHstore_0(v *Value) bool { 14412 b := v.Block 14413 _ = b 14414 config := b.Func.Config 14415 _ = config 14416 // match: (MOVHstore [off1] {sym} (ADDconst [off2] ptr) val mem) 14417 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14418 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 14419 for { 14420 off1 := v.AuxInt 14421 sym := v.Aux 14422 _ = v.Args[2] 14423 v_0 := v.Args[0] 14424 if v_0.Op != OpARM64ADDconst { 14425 break 14426 } 14427 off2 := v_0.AuxInt 14428 ptr := v_0.Args[0] 14429 val := v.Args[1] 14430 mem := v.Args[2] 14431 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14432 break 14433 } 14434 v.reset(OpARM64MOVHstore) 14435 v.AuxInt = off1 + off2 14436 v.Aux = sym 14437 v.AddArg(ptr) 14438 v.AddArg(val) 14439 v.AddArg(mem) 14440 return true 14441 } 14442 // match: (MOVHstore [off] {sym} (ADD ptr idx) val mem) 14443 // cond: off == 0 && sym == nil 14444 // result: (MOVHstoreidx ptr idx val mem) 14445 for { 14446 off := v.AuxInt 14447 sym := v.Aux 14448 _ = v.Args[2] 14449 v_0 := v.Args[0] 14450 if v_0.Op != OpARM64ADD { 14451 break 14452 } 14453 _ = v_0.Args[1] 14454 ptr := v_0.Args[0] 14455 idx := v_0.Args[1] 14456 val := v.Args[1] 14457 mem := v.Args[2] 14458 if !(off == 0 && sym == nil) { 14459 break 14460 } 14461 v.reset(OpARM64MOVHstoreidx) 14462 v.AddArg(ptr) 14463 v.AddArg(idx) 14464 v.AddArg(val) 14465 v.AddArg(mem) 14466 return true 14467 } 14468 // match: (MOVHstore [off] {sym} (ADDshiftLL [1] ptr idx) val mem) 14469 // cond: off == 0 && sym == nil 14470 // result: (MOVHstoreidx2 ptr idx val mem) 14471 for { 14472 off := v.AuxInt 14473 sym := v.Aux 14474 _ = v.Args[2] 14475 v_0 := v.Args[0] 14476 if v_0.Op != OpARM64ADDshiftLL { 14477 break 14478 } 14479 if v_0.AuxInt != 1 { 14480 break 14481 } 14482 _ = v_0.Args[1] 14483 ptr := v_0.Args[0] 14484 idx := v_0.Args[1] 14485 val := v.Args[1] 14486 mem := v.Args[2] 14487 if !(off == 0 && sym == nil) { 14488 break 14489 } 14490 v.reset(OpARM64MOVHstoreidx2) 14491 v.AddArg(ptr) 14492 v.AddArg(idx) 14493 v.AddArg(val) 14494 v.AddArg(mem) 14495 return true 14496 } 14497 // match: (MOVHstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 14498 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 14499 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 14500 for { 14501 off1 := v.AuxInt 14502 sym1 := v.Aux 14503 _ = v.Args[2] 14504 v_0 := v.Args[0] 14505 if v_0.Op != OpARM64MOVDaddr { 14506 break 14507 } 14508 off2 := v_0.AuxInt 14509 sym2 := v_0.Aux 14510 ptr := v_0.Args[0] 14511 val := v.Args[1] 14512 mem := v.Args[2] 14513 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 14514 break 14515 } 14516 v.reset(OpARM64MOVHstore) 14517 v.AuxInt = off1 + off2 14518 v.Aux = mergeSym(sym1, sym2) 14519 v.AddArg(ptr) 14520 v.AddArg(val) 14521 v.AddArg(mem) 14522 return true 14523 } 14524 // match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem) 14525 // cond: 14526 // result: (MOVHstorezero [off] {sym} ptr mem) 14527 for { 14528 off := v.AuxInt 14529 sym := v.Aux 14530 _ = v.Args[2] 14531 ptr := v.Args[0] 14532 v_1 := v.Args[1] 14533 if v_1.Op != OpARM64MOVDconst { 14534 break 14535 } 14536 if v_1.AuxInt != 0 { 14537 break 14538 } 14539 mem := v.Args[2] 14540 v.reset(OpARM64MOVHstorezero) 14541 v.AuxInt = off 14542 v.Aux = sym 14543 v.AddArg(ptr) 14544 v.AddArg(mem) 14545 return true 14546 } 14547 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 14548 // cond: 14549 // result: (MOVHstore [off] {sym} ptr x mem) 14550 for { 14551 off := v.AuxInt 14552 sym := v.Aux 14553 _ = v.Args[2] 14554 ptr := v.Args[0] 14555 v_1 := v.Args[1] 14556 if v_1.Op != OpARM64MOVHreg { 14557 break 14558 } 14559 x := v_1.Args[0] 14560 mem := v.Args[2] 14561 v.reset(OpARM64MOVHstore) 14562 v.AuxInt = off 14563 v.Aux = sym 14564 v.AddArg(ptr) 14565 v.AddArg(x) 14566 v.AddArg(mem) 14567 return true 14568 } 14569 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 14570 // cond: 14571 // result: (MOVHstore [off] {sym} ptr x mem) 14572 for { 14573 off := v.AuxInt 14574 sym := v.Aux 14575 _ = v.Args[2] 14576 ptr := v.Args[0] 14577 v_1 := v.Args[1] 14578 if v_1.Op != OpARM64MOVHUreg { 14579 break 14580 } 14581 x := v_1.Args[0] 14582 mem := v.Args[2] 14583 v.reset(OpARM64MOVHstore) 14584 v.AuxInt = off 14585 v.Aux = sym 14586 v.AddArg(ptr) 14587 v.AddArg(x) 14588 v.AddArg(mem) 14589 return true 14590 } 14591 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 14592 // cond: 14593 // result: (MOVHstore [off] {sym} ptr x mem) 14594 for { 14595 off := v.AuxInt 14596 sym := v.Aux 14597 _ = v.Args[2] 14598 ptr := v.Args[0] 14599 v_1 := v.Args[1] 14600 if v_1.Op != OpARM64MOVWreg { 14601 break 14602 } 14603 x := v_1.Args[0] 14604 mem := v.Args[2] 14605 v.reset(OpARM64MOVHstore) 14606 v.AuxInt = off 14607 v.Aux = sym 14608 v.AddArg(ptr) 14609 v.AddArg(x) 14610 v.AddArg(mem) 14611 return true 14612 } 14613 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 14614 // cond: 14615 // result: (MOVHstore [off] {sym} ptr x mem) 14616 for { 14617 off := v.AuxInt 14618 sym := v.Aux 14619 _ = v.Args[2] 14620 ptr := v.Args[0] 14621 v_1 := v.Args[1] 14622 if v_1.Op != OpARM64MOVWUreg { 14623 break 14624 } 14625 x := v_1.Args[0] 14626 mem := v.Args[2] 14627 v.reset(OpARM64MOVHstore) 14628 v.AuxInt = off 14629 v.Aux = sym 14630 v.AddArg(ptr) 14631 v.AddArg(x) 14632 v.AddArg(mem) 14633 return true 14634 } 14635 // match: (MOVHstore [i] {s} ptr0 (SRLconst [16] w) x:(MOVHstore [i-2] {s} ptr1 w mem)) 14636 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 14637 // result: (MOVWstore [i-2] {s} ptr0 w mem) 14638 for { 14639 i := v.AuxInt 14640 s := v.Aux 14641 _ = v.Args[2] 14642 ptr0 := v.Args[0] 14643 v_1 := v.Args[1] 14644 if v_1.Op != OpARM64SRLconst { 14645 break 14646 } 14647 if v_1.AuxInt != 16 { 14648 break 14649 } 14650 w := v_1.Args[0] 14651 x := v.Args[2] 14652 if x.Op != OpARM64MOVHstore { 14653 break 14654 } 14655 if x.AuxInt != i-2 { 14656 break 14657 } 14658 if x.Aux != s { 14659 break 14660 } 14661 _ = x.Args[2] 14662 ptr1 := x.Args[0] 14663 if w != x.Args[1] { 14664 break 14665 } 14666 mem := x.Args[2] 14667 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 14668 break 14669 } 14670 v.reset(OpARM64MOVWstore) 14671 v.AuxInt = i - 2 14672 v.Aux = s 14673 v.AddArg(ptr0) 14674 v.AddArg(w) 14675 v.AddArg(mem) 14676 return true 14677 } 14678 return false 14679 } 14680 func rewriteValueARM64_OpARM64MOVHstore_10(v *Value) bool { 14681 b := v.Block 14682 _ = b 14683 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx ptr1 idx1 w mem)) 14684 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 14685 // result: (MOVWstoreidx ptr1 idx1 w mem) 14686 for { 14687 if v.AuxInt != 2 { 14688 break 14689 } 14690 s := v.Aux 14691 _ = v.Args[2] 14692 v_0 := v.Args[0] 14693 if v_0.Op != OpARM64ADD { 14694 break 14695 } 14696 _ = v_0.Args[1] 14697 ptr0 := v_0.Args[0] 14698 idx0 := v_0.Args[1] 14699 v_1 := v.Args[1] 14700 if v_1.Op != OpARM64SRLconst { 14701 break 14702 } 14703 if v_1.AuxInt != 16 { 14704 break 14705 } 14706 w := v_1.Args[0] 14707 x := v.Args[2] 14708 if x.Op != OpARM64MOVHstoreidx { 14709 break 14710 } 14711 _ = x.Args[3] 14712 ptr1 := x.Args[0] 14713 idx1 := x.Args[1] 14714 if w != x.Args[2] { 14715 break 14716 } 14717 mem := x.Args[3] 14718 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 14719 break 14720 } 14721 v.reset(OpARM64MOVWstoreidx) 14722 v.AddArg(ptr1) 14723 v.AddArg(idx1) 14724 v.AddArg(w) 14725 v.AddArg(mem) 14726 return true 14727 } 14728 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] w) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 14729 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 14730 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 14731 for { 14732 if v.AuxInt != 2 { 14733 break 14734 } 14735 s := v.Aux 14736 _ = v.Args[2] 14737 v_0 := v.Args[0] 14738 if v_0.Op != OpARM64ADDshiftLL { 14739 break 14740 } 14741 if v_0.AuxInt != 1 { 14742 break 14743 } 14744 _ = v_0.Args[1] 14745 ptr0 := v_0.Args[0] 14746 idx0 := v_0.Args[1] 14747 v_1 := v.Args[1] 14748 if v_1.Op != OpARM64SRLconst { 14749 break 14750 } 14751 if v_1.AuxInt != 16 { 14752 break 14753 } 14754 w := v_1.Args[0] 14755 x := v.Args[2] 14756 if x.Op != OpARM64MOVHstoreidx2 { 14757 break 14758 } 14759 _ = x.Args[3] 14760 ptr1 := x.Args[0] 14761 idx1 := x.Args[1] 14762 if w != x.Args[2] { 14763 break 14764 } 14765 mem := x.Args[3] 14766 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 14767 break 14768 } 14769 v.reset(OpARM64MOVWstoreidx) 14770 v.AddArg(ptr1) 14771 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 14772 v0.AuxInt = 1 14773 v0.AddArg(idx1) 14774 v.AddArg(v0) 14775 v.AddArg(w) 14776 v.AddArg(mem) 14777 return true 14778 } 14779 // match: (MOVHstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(16, 16)] w) x:(MOVHstore [i-2] {s} ptr1 w mem)) 14780 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 14781 // result: (MOVWstore [i-2] {s} ptr0 w mem) 14782 for { 14783 i := v.AuxInt 14784 s := v.Aux 14785 _ = v.Args[2] 14786 ptr0 := v.Args[0] 14787 v_1 := v.Args[1] 14788 if v_1.Op != OpARM64UBFX { 14789 break 14790 } 14791 if v_1.AuxInt != arm64BFAuxInt(16, 16) { 14792 break 14793 } 14794 w := v_1.Args[0] 14795 x := v.Args[2] 14796 if x.Op != OpARM64MOVHstore { 14797 break 14798 } 14799 if x.AuxInt != i-2 { 14800 break 14801 } 14802 if x.Aux != s { 14803 break 14804 } 14805 _ = x.Args[2] 14806 ptr1 := x.Args[0] 14807 if w != x.Args[1] { 14808 break 14809 } 14810 mem := x.Args[2] 14811 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 14812 break 14813 } 14814 v.reset(OpARM64MOVWstore) 14815 v.AuxInt = i - 2 14816 v.Aux = s 14817 v.AddArg(ptr0) 14818 v.AddArg(w) 14819 v.AddArg(mem) 14820 return true 14821 } 14822 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(16, 16)] w) x:(MOVHstoreidx ptr1 idx1 w mem)) 14823 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 14824 // result: (MOVWstoreidx ptr1 idx1 w mem) 14825 for { 14826 if v.AuxInt != 2 { 14827 break 14828 } 14829 s := v.Aux 14830 _ = v.Args[2] 14831 v_0 := v.Args[0] 14832 if v_0.Op != OpARM64ADD { 14833 break 14834 } 14835 _ = v_0.Args[1] 14836 ptr0 := v_0.Args[0] 14837 idx0 := v_0.Args[1] 14838 v_1 := v.Args[1] 14839 if v_1.Op != OpARM64UBFX { 14840 break 14841 } 14842 if v_1.AuxInt != arm64BFAuxInt(16, 16) { 14843 break 14844 } 14845 w := v_1.Args[0] 14846 x := v.Args[2] 14847 if x.Op != OpARM64MOVHstoreidx { 14848 break 14849 } 14850 _ = x.Args[3] 14851 ptr1 := x.Args[0] 14852 idx1 := x.Args[1] 14853 if w != x.Args[2] { 14854 break 14855 } 14856 mem := x.Args[3] 14857 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 14858 break 14859 } 14860 v.reset(OpARM64MOVWstoreidx) 14861 v.AddArg(ptr1) 14862 v.AddArg(idx1) 14863 v.AddArg(w) 14864 v.AddArg(mem) 14865 return true 14866 } 14867 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (UBFX [arm64BFAuxInt(16, 16)] w) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 14868 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 14869 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 14870 for { 14871 if v.AuxInt != 2 { 14872 break 14873 } 14874 s := v.Aux 14875 _ = v.Args[2] 14876 v_0 := v.Args[0] 14877 if v_0.Op != OpARM64ADDshiftLL { 14878 break 14879 } 14880 if v_0.AuxInt != 1 { 14881 break 14882 } 14883 _ = v_0.Args[1] 14884 ptr0 := v_0.Args[0] 14885 idx0 := v_0.Args[1] 14886 v_1 := v.Args[1] 14887 if v_1.Op != OpARM64UBFX { 14888 break 14889 } 14890 if v_1.AuxInt != arm64BFAuxInt(16, 16) { 14891 break 14892 } 14893 w := v_1.Args[0] 14894 x := v.Args[2] 14895 if x.Op != OpARM64MOVHstoreidx2 { 14896 break 14897 } 14898 _ = x.Args[3] 14899 ptr1 := x.Args[0] 14900 idx1 := x.Args[1] 14901 if w != x.Args[2] { 14902 break 14903 } 14904 mem := x.Args[3] 14905 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 14906 break 14907 } 14908 v.reset(OpARM64MOVWstoreidx) 14909 v.AddArg(ptr1) 14910 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 14911 v0.AuxInt = 1 14912 v0.AddArg(idx1) 14913 v.AddArg(v0) 14914 v.AddArg(w) 14915 v.AddArg(mem) 14916 return true 14917 } 14918 // match: (MOVHstore [i] {s} ptr0 (SRLconst [16] (MOVDreg w)) x:(MOVHstore [i-2] {s} ptr1 w mem)) 14919 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 14920 // result: (MOVWstore [i-2] {s} ptr0 w mem) 14921 for { 14922 i := v.AuxInt 14923 s := v.Aux 14924 _ = v.Args[2] 14925 ptr0 := v.Args[0] 14926 v_1 := v.Args[1] 14927 if v_1.Op != OpARM64SRLconst { 14928 break 14929 } 14930 if v_1.AuxInt != 16 { 14931 break 14932 } 14933 v_1_0 := v_1.Args[0] 14934 if v_1_0.Op != OpARM64MOVDreg { 14935 break 14936 } 14937 w := v_1_0.Args[0] 14938 x := v.Args[2] 14939 if x.Op != OpARM64MOVHstore { 14940 break 14941 } 14942 if x.AuxInt != i-2 { 14943 break 14944 } 14945 if x.Aux != s { 14946 break 14947 } 14948 _ = x.Args[2] 14949 ptr1 := x.Args[0] 14950 if w != x.Args[1] { 14951 break 14952 } 14953 mem := x.Args[2] 14954 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 14955 break 14956 } 14957 v.reset(OpARM64MOVWstore) 14958 v.AuxInt = i - 2 14959 v.Aux = s 14960 v.AddArg(ptr0) 14961 v.AddArg(w) 14962 v.AddArg(mem) 14963 return true 14964 } 14965 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx ptr1 idx1 w mem)) 14966 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 14967 // result: (MOVWstoreidx ptr1 idx1 w mem) 14968 for { 14969 if v.AuxInt != 2 { 14970 break 14971 } 14972 s := v.Aux 14973 _ = v.Args[2] 14974 v_0 := v.Args[0] 14975 if v_0.Op != OpARM64ADD { 14976 break 14977 } 14978 _ = v_0.Args[1] 14979 ptr0 := v_0.Args[0] 14980 idx0 := v_0.Args[1] 14981 v_1 := v.Args[1] 14982 if v_1.Op != OpARM64SRLconst { 14983 break 14984 } 14985 if v_1.AuxInt != 16 { 14986 break 14987 } 14988 v_1_0 := v_1.Args[0] 14989 if v_1_0.Op != OpARM64MOVDreg { 14990 break 14991 } 14992 w := v_1_0.Args[0] 14993 x := v.Args[2] 14994 if x.Op != OpARM64MOVHstoreidx { 14995 break 14996 } 14997 _ = x.Args[3] 14998 ptr1 := x.Args[0] 14999 idx1 := x.Args[1] 15000 if w != x.Args[2] { 15001 break 15002 } 15003 mem := x.Args[3] 15004 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15005 break 15006 } 15007 v.reset(OpARM64MOVWstoreidx) 15008 v.AddArg(ptr1) 15009 v.AddArg(idx1) 15010 v.AddArg(w) 15011 v.AddArg(mem) 15012 return true 15013 } 15014 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [16] (MOVDreg w)) x:(MOVHstoreidx2 ptr1 idx1 w mem)) 15015 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15016 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w mem) 15017 for { 15018 if v.AuxInt != 2 { 15019 break 15020 } 15021 s := v.Aux 15022 _ = v.Args[2] 15023 v_0 := v.Args[0] 15024 if v_0.Op != OpARM64ADDshiftLL { 15025 break 15026 } 15027 if v_0.AuxInt != 1 { 15028 break 15029 } 15030 _ = v_0.Args[1] 15031 ptr0 := v_0.Args[0] 15032 idx0 := v_0.Args[1] 15033 v_1 := v.Args[1] 15034 if v_1.Op != OpARM64SRLconst { 15035 break 15036 } 15037 if v_1.AuxInt != 16 { 15038 break 15039 } 15040 v_1_0 := v_1.Args[0] 15041 if v_1_0.Op != OpARM64MOVDreg { 15042 break 15043 } 15044 w := v_1_0.Args[0] 15045 x := v.Args[2] 15046 if x.Op != OpARM64MOVHstoreidx2 { 15047 break 15048 } 15049 _ = x.Args[3] 15050 ptr1 := x.Args[0] 15051 idx1 := x.Args[1] 15052 if w != x.Args[2] { 15053 break 15054 } 15055 mem := x.Args[3] 15056 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15057 break 15058 } 15059 v.reset(OpARM64MOVWstoreidx) 15060 v.AddArg(ptr1) 15061 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15062 v0.AuxInt = 1 15063 v0.AddArg(idx1) 15064 v.AddArg(v0) 15065 v.AddArg(w) 15066 v.AddArg(mem) 15067 return true 15068 } 15069 // match: (MOVHstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVHstore [i-2] {s} ptr1 w0:(SRLconst [j-16] w) mem)) 15070 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 15071 // result: (MOVWstore [i-2] {s} ptr0 w0 mem) 15072 for { 15073 i := v.AuxInt 15074 s := v.Aux 15075 _ = v.Args[2] 15076 ptr0 := v.Args[0] 15077 v_1 := v.Args[1] 15078 if v_1.Op != OpARM64SRLconst { 15079 break 15080 } 15081 j := v_1.AuxInt 15082 w := v_1.Args[0] 15083 x := v.Args[2] 15084 if x.Op != OpARM64MOVHstore { 15085 break 15086 } 15087 if x.AuxInt != i-2 { 15088 break 15089 } 15090 if x.Aux != s { 15091 break 15092 } 15093 _ = x.Args[2] 15094 ptr1 := x.Args[0] 15095 w0 := x.Args[1] 15096 if w0.Op != OpARM64SRLconst { 15097 break 15098 } 15099 if w0.AuxInt != j-16 { 15100 break 15101 } 15102 if w != w0.Args[0] { 15103 break 15104 } 15105 mem := x.Args[2] 15106 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 15107 break 15108 } 15109 v.reset(OpARM64MOVWstore) 15110 v.AuxInt = i - 2 15111 v.Aux = s 15112 v.AddArg(ptr0) 15113 v.AddArg(w0) 15114 v.AddArg(mem) 15115 return true 15116 } 15117 // match: (MOVHstore [2] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx ptr1 idx1 w0:(SRLconst [j-16] w) mem)) 15118 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15119 // result: (MOVWstoreidx ptr1 idx1 w0 mem) 15120 for { 15121 if v.AuxInt != 2 { 15122 break 15123 } 15124 s := v.Aux 15125 _ = v.Args[2] 15126 v_0 := v.Args[0] 15127 if v_0.Op != OpARM64ADD { 15128 break 15129 } 15130 _ = v_0.Args[1] 15131 ptr0 := v_0.Args[0] 15132 idx0 := v_0.Args[1] 15133 v_1 := v.Args[1] 15134 if v_1.Op != OpARM64SRLconst { 15135 break 15136 } 15137 j := v_1.AuxInt 15138 w := v_1.Args[0] 15139 x := v.Args[2] 15140 if x.Op != OpARM64MOVHstoreidx { 15141 break 15142 } 15143 _ = x.Args[3] 15144 ptr1 := x.Args[0] 15145 idx1 := x.Args[1] 15146 w0 := x.Args[2] 15147 if w0.Op != OpARM64SRLconst { 15148 break 15149 } 15150 if w0.AuxInt != j-16 { 15151 break 15152 } 15153 if w != w0.Args[0] { 15154 break 15155 } 15156 mem := x.Args[3] 15157 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15158 break 15159 } 15160 v.reset(OpARM64MOVWstoreidx) 15161 v.AddArg(ptr1) 15162 v.AddArg(idx1) 15163 v.AddArg(w0) 15164 v.AddArg(mem) 15165 return true 15166 } 15167 return false 15168 } 15169 func rewriteValueARM64_OpARM64MOVHstore_20(v *Value) bool { 15170 b := v.Block 15171 _ = b 15172 // match: (MOVHstore [2] {s} (ADDshiftLL [1] ptr0 idx0) (SRLconst [j] w) x:(MOVHstoreidx2 ptr1 idx1 w0:(SRLconst [j-16] w) mem)) 15173 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15174 // result: (MOVWstoreidx ptr1 (SLLconst <idx1.Type> [1] idx1) w0 mem) 15175 for { 15176 if v.AuxInt != 2 { 15177 break 15178 } 15179 s := v.Aux 15180 _ = v.Args[2] 15181 v_0 := v.Args[0] 15182 if v_0.Op != OpARM64ADDshiftLL { 15183 break 15184 } 15185 if v_0.AuxInt != 1 { 15186 break 15187 } 15188 _ = v_0.Args[1] 15189 ptr0 := v_0.Args[0] 15190 idx0 := v_0.Args[1] 15191 v_1 := v.Args[1] 15192 if v_1.Op != OpARM64SRLconst { 15193 break 15194 } 15195 j := v_1.AuxInt 15196 w := v_1.Args[0] 15197 x := v.Args[2] 15198 if x.Op != OpARM64MOVHstoreidx2 { 15199 break 15200 } 15201 _ = x.Args[3] 15202 ptr1 := x.Args[0] 15203 idx1 := x.Args[1] 15204 w0 := x.Args[2] 15205 if w0.Op != OpARM64SRLconst { 15206 break 15207 } 15208 if w0.AuxInt != j-16 { 15209 break 15210 } 15211 if w != w0.Args[0] { 15212 break 15213 } 15214 mem := x.Args[3] 15215 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15216 break 15217 } 15218 v.reset(OpARM64MOVWstoreidx) 15219 v.AddArg(ptr1) 15220 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15221 v0.AuxInt = 1 15222 v0.AddArg(idx1) 15223 v.AddArg(v0) 15224 v.AddArg(w0) 15225 v.AddArg(mem) 15226 return true 15227 } 15228 return false 15229 } 15230 func rewriteValueARM64_OpARM64MOVHstoreidx_0(v *Value) bool { 15231 // match: (MOVHstoreidx ptr (MOVDconst [c]) val mem) 15232 // cond: 15233 // result: (MOVHstore [c] ptr val mem) 15234 for { 15235 _ = v.Args[3] 15236 ptr := v.Args[0] 15237 v_1 := v.Args[1] 15238 if v_1.Op != OpARM64MOVDconst { 15239 break 15240 } 15241 c := v_1.AuxInt 15242 val := v.Args[2] 15243 mem := v.Args[3] 15244 v.reset(OpARM64MOVHstore) 15245 v.AuxInt = c 15246 v.AddArg(ptr) 15247 v.AddArg(val) 15248 v.AddArg(mem) 15249 return true 15250 } 15251 // match: (MOVHstoreidx (MOVDconst [c]) idx val mem) 15252 // cond: 15253 // result: (MOVHstore [c] idx val mem) 15254 for { 15255 _ = v.Args[3] 15256 v_0 := v.Args[0] 15257 if v_0.Op != OpARM64MOVDconst { 15258 break 15259 } 15260 c := v_0.AuxInt 15261 idx := v.Args[1] 15262 val := v.Args[2] 15263 mem := v.Args[3] 15264 v.reset(OpARM64MOVHstore) 15265 v.AuxInt = c 15266 v.AddArg(idx) 15267 v.AddArg(val) 15268 v.AddArg(mem) 15269 return true 15270 } 15271 // match: (MOVHstoreidx ptr (SLLconst [1] idx) val mem) 15272 // cond: 15273 // result: (MOVHstoreidx2 ptr idx val mem) 15274 for { 15275 _ = v.Args[3] 15276 ptr := v.Args[0] 15277 v_1 := v.Args[1] 15278 if v_1.Op != OpARM64SLLconst { 15279 break 15280 } 15281 if v_1.AuxInt != 1 { 15282 break 15283 } 15284 idx := v_1.Args[0] 15285 val := v.Args[2] 15286 mem := v.Args[3] 15287 v.reset(OpARM64MOVHstoreidx2) 15288 v.AddArg(ptr) 15289 v.AddArg(idx) 15290 v.AddArg(val) 15291 v.AddArg(mem) 15292 return true 15293 } 15294 // match: (MOVHstoreidx ptr (ADD idx idx) val mem) 15295 // cond: 15296 // result: (MOVHstoreidx2 ptr idx val mem) 15297 for { 15298 _ = v.Args[3] 15299 ptr := v.Args[0] 15300 v_1 := v.Args[1] 15301 if v_1.Op != OpARM64ADD { 15302 break 15303 } 15304 _ = v_1.Args[1] 15305 idx := v_1.Args[0] 15306 if idx != v_1.Args[1] { 15307 break 15308 } 15309 val := v.Args[2] 15310 mem := v.Args[3] 15311 v.reset(OpARM64MOVHstoreidx2) 15312 v.AddArg(ptr) 15313 v.AddArg(idx) 15314 v.AddArg(val) 15315 v.AddArg(mem) 15316 return true 15317 } 15318 // match: (MOVHstoreidx (SLLconst [1] idx) ptr val mem) 15319 // cond: 15320 // result: (MOVHstoreidx2 ptr idx val mem) 15321 for { 15322 _ = v.Args[3] 15323 v_0 := v.Args[0] 15324 if v_0.Op != OpARM64SLLconst { 15325 break 15326 } 15327 if v_0.AuxInt != 1 { 15328 break 15329 } 15330 idx := v_0.Args[0] 15331 ptr := v.Args[1] 15332 val := v.Args[2] 15333 mem := v.Args[3] 15334 v.reset(OpARM64MOVHstoreidx2) 15335 v.AddArg(ptr) 15336 v.AddArg(idx) 15337 v.AddArg(val) 15338 v.AddArg(mem) 15339 return true 15340 } 15341 // match: (MOVHstoreidx (ADD idx idx) ptr val mem) 15342 // cond: 15343 // result: (MOVHstoreidx2 ptr idx val mem) 15344 for { 15345 _ = v.Args[3] 15346 v_0 := v.Args[0] 15347 if v_0.Op != OpARM64ADD { 15348 break 15349 } 15350 _ = v_0.Args[1] 15351 idx := v_0.Args[0] 15352 if idx != v_0.Args[1] { 15353 break 15354 } 15355 ptr := v.Args[1] 15356 val := v.Args[2] 15357 mem := v.Args[3] 15358 v.reset(OpARM64MOVHstoreidx2) 15359 v.AddArg(ptr) 15360 v.AddArg(idx) 15361 v.AddArg(val) 15362 v.AddArg(mem) 15363 return true 15364 } 15365 // match: (MOVHstoreidx ptr idx (MOVDconst [0]) mem) 15366 // cond: 15367 // result: (MOVHstorezeroidx ptr idx mem) 15368 for { 15369 _ = v.Args[3] 15370 ptr := v.Args[0] 15371 idx := v.Args[1] 15372 v_2 := v.Args[2] 15373 if v_2.Op != OpARM64MOVDconst { 15374 break 15375 } 15376 if v_2.AuxInt != 0 { 15377 break 15378 } 15379 mem := v.Args[3] 15380 v.reset(OpARM64MOVHstorezeroidx) 15381 v.AddArg(ptr) 15382 v.AddArg(idx) 15383 v.AddArg(mem) 15384 return true 15385 } 15386 // match: (MOVHstoreidx ptr idx (MOVHreg x) mem) 15387 // cond: 15388 // result: (MOVHstoreidx ptr idx x mem) 15389 for { 15390 _ = v.Args[3] 15391 ptr := v.Args[0] 15392 idx := v.Args[1] 15393 v_2 := v.Args[2] 15394 if v_2.Op != OpARM64MOVHreg { 15395 break 15396 } 15397 x := v_2.Args[0] 15398 mem := v.Args[3] 15399 v.reset(OpARM64MOVHstoreidx) 15400 v.AddArg(ptr) 15401 v.AddArg(idx) 15402 v.AddArg(x) 15403 v.AddArg(mem) 15404 return true 15405 } 15406 // match: (MOVHstoreidx ptr idx (MOVHUreg x) mem) 15407 // cond: 15408 // result: (MOVHstoreidx ptr idx x mem) 15409 for { 15410 _ = v.Args[3] 15411 ptr := v.Args[0] 15412 idx := v.Args[1] 15413 v_2 := v.Args[2] 15414 if v_2.Op != OpARM64MOVHUreg { 15415 break 15416 } 15417 x := v_2.Args[0] 15418 mem := v.Args[3] 15419 v.reset(OpARM64MOVHstoreidx) 15420 v.AddArg(ptr) 15421 v.AddArg(idx) 15422 v.AddArg(x) 15423 v.AddArg(mem) 15424 return true 15425 } 15426 // match: (MOVHstoreidx ptr idx (MOVWreg x) mem) 15427 // cond: 15428 // result: (MOVHstoreidx ptr idx x mem) 15429 for { 15430 _ = v.Args[3] 15431 ptr := v.Args[0] 15432 idx := v.Args[1] 15433 v_2 := v.Args[2] 15434 if v_2.Op != OpARM64MOVWreg { 15435 break 15436 } 15437 x := v_2.Args[0] 15438 mem := v.Args[3] 15439 v.reset(OpARM64MOVHstoreidx) 15440 v.AddArg(ptr) 15441 v.AddArg(idx) 15442 v.AddArg(x) 15443 v.AddArg(mem) 15444 return true 15445 } 15446 return false 15447 } 15448 func rewriteValueARM64_OpARM64MOVHstoreidx_10(v *Value) bool { 15449 // match: (MOVHstoreidx ptr idx (MOVWUreg x) mem) 15450 // cond: 15451 // result: (MOVHstoreidx ptr idx x mem) 15452 for { 15453 _ = v.Args[3] 15454 ptr := v.Args[0] 15455 idx := v.Args[1] 15456 v_2 := v.Args[2] 15457 if v_2.Op != OpARM64MOVWUreg { 15458 break 15459 } 15460 x := v_2.Args[0] 15461 mem := v.Args[3] 15462 v.reset(OpARM64MOVHstoreidx) 15463 v.AddArg(ptr) 15464 v.AddArg(idx) 15465 v.AddArg(x) 15466 v.AddArg(mem) 15467 return true 15468 } 15469 // match: (MOVHstoreidx ptr (ADDconst [2] idx) (SRLconst [16] w) x:(MOVHstoreidx ptr idx w mem)) 15470 // cond: x.Uses == 1 && clobber(x) 15471 // result: (MOVWstoreidx ptr idx w mem) 15472 for { 15473 _ = v.Args[3] 15474 ptr := v.Args[0] 15475 v_1 := v.Args[1] 15476 if v_1.Op != OpARM64ADDconst { 15477 break 15478 } 15479 if v_1.AuxInt != 2 { 15480 break 15481 } 15482 idx := v_1.Args[0] 15483 v_2 := v.Args[2] 15484 if v_2.Op != OpARM64SRLconst { 15485 break 15486 } 15487 if v_2.AuxInt != 16 { 15488 break 15489 } 15490 w := v_2.Args[0] 15491 x := v.Args[3] 15492 if x.Op != OpARM64MOVHstoreidx { 15493 break 15494 } 15495 _ = x.Args[3] 15496 if ptr != x.Args[0] { 15497 break 15498 } 15499 if idx != x.Args[1] { 15500 break 15501 } 15502 if w != x.Args[2] { 15503 break 15504 } 15505 mem := x.Args[3] 15506 if !(x.Uses == 1 && clobber(x)) { 15507 break 15508 } 15509 v.reset(OpARM64MOVWstoreidx) 15510 v.AddArg(ptr) 15511 v.AddArg(idx) 15512 v.AddArg(w) 15513 v.AddArg(mem) 15514 return true 15515 } 15516 return false 15517 } 15518 func rewriteValueARM64_OpARM64MOVHstoreidx2_0(v *Value) bool { 15519 // match: (MOVHstoreidx2 ptr (MOVDconst [c]) val mem) 15520 // cond: 15521 // result: (MOVHstore [c<<1] ptr val mem) 15522 for { 15523 _ = v.Args[3] 15524 ptr := v.Args[0] 15525 v_1 := v.Args[1] 15526 if v_1.Op != OpARM64MOVDconst { 15527 break 15528 } 15529 c := v_1.AuxInt 15530 val := v.Args[2] 15531 mem := v.Args[3] 15532 v.reset(OpARM64MOVHstore) 15533 v.AuxInt = c << 1 15534 v.AddArg(ptr) 15535 v.AddArg(val) 15536 v.AddArg(mem) 15537 return true 15538 } 15539 // match: (MOVHstoreidx2 ptr idx (MOVDconst [0]) mem) 15540 // cond: 15541 // result: (MOVHstorezeroidx2 ptr idx mem) 15542 for { 15543 _ = v.Args[3] 15544 ptr := v.Args[0] 15545 idx := v.Args[1] 15546 v_2 := v.Args[2] 15547 if v_2.Op != OpARM64MOVDconst { 15548 break 15549 } 15550 if v_2.AuxInt != 0 { 15551 break 15552 } 15553 mem := v.Args[3] 15554 v.reset(OpARM64MOVHstorezeroidx2) 15555 v.AddArg(ptr) 15556 v.AddArg(idx) 15557 v.AddArg(mem) 15558 return true 15559 } 15560 // match: (MOVHstoreidx2 ptr idx (MOVHreg x) mem) 15561 // cond: 15562 // result: (MOVHstoreidx2 ptr idx x mem) 15563 for { 15564 _ = v.Args[3] 15565 ptr := v.Args[0] 15566 idx := v.Args[1] 15567 v_2 := v.Args[2] 15568 if v_2.Op != OpARM64MOVHreg { 15569 break 15570 } 15571 x := v_2.Args[0] 15572 mem := v.Args[3] 15573 v.reset(OpARM64MOVHstoreidx2) 15574 v.AddArg(ptr) 15575 v.AddArg(idx) 15576 v.AddArg(x) 15577 v.AddArg(mem) 15578 return true 15579 } 15580 // match: (MOVHstoreidx2 ptr idx (MOVHUreg x) mem) 15581 // cond: 15582 // result: (MOVHstoreidx2 ptr idx x mem) 15583 for { 15584 _ = v.Args[3] 15585 ptr := v.Args[0] 15586 idx := v.Args[1] 15587 v_2 := v.Args[2] 15588 if v_2.Op != OpARM64MOVHUreg { 15589 break 15590 } 15591 x := v_2.Args[0] 15592 mem := v.Args[3] 15593 v.reset(OpARM64MOVHstoreidx2) 15594 v.AddArg(ptr) 15595 v.AddArg(idx) 15596 v.AddArg(x) 15597 v.AddArg(mem) 15598 return true 15599 } 15600 // match: (MOVHstoreidx2 ptr idx (MOVWreg x) mem) 15601 // cond: 15602 // result: (MOVHstoreidx2 ptr idx x mem) 15603 for { 15604 _ = v.Args[3] 15605 ptr := v.Args[0] 15606 idx := v.Args[1] 15607 v_2 := v.Args[2] 15608 if v_2.Op != OpARM64MOVWreg { 15609 break 15610 } 15611 x := v_2.Args[0] 15612 mem := v.Args[3] 15613 v.reset(OpARM64MOVHstoreidx2) 15614 v.AddArg(ptr) 15615 v.AddArg(idx) 15616 v.AddArg(x) 15617 v.AddArg(mem) 15618 return true 15619 } 15620 // match: (MOVHstoreidx2 ptr idx (MOVWUreg x) mem) 15621 // cond: 15622 // result: (MOVHstoreidx2 ptr idx x mem) 15623 for { 15624 _ = v.Args[3] 15625 ptr := v.Args[0] 15626 idx := v.Args[1] 15627 v_2 := v.Args[2] 15628 if v_2.Op != OpARM64MOVWUreg { 15629 break 15630 } 15631 x := v_2.Args[0] 15632 mem := v.Args[3] 15633 v.reset(OpARM64MOVHstoreidx2) 15634 v.AddArg(ptr) 15635 v.AddArg(idx) 15636 v.AddArg(x) 15637 v.AddArg(mem) 15638 return true 15639 } 15640 return false 15641 } 15642 func rewriteValueARM64_OpARM64MOVHstorezero_0(v *Value) bool { 15643 b := v.Block 15644 _ = b 15645 config := b.Func.Config 15646 _ = config 15647 // match: (MOVHstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 15648 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15649 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 15650 for { 15651 off1 := v.AuxInt 15652 sym := v.Aux 15653 _ = v.Args[1] 15654 v_0 := v.Args[0] 15655 if v_0.Op != OpARM64ADDconst { 15656 break 15657 } 15658 off2 := v_0.AuxInt 15659 ptr := v_0.Args[0] 15660 mem := v.Args[1] 15661 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15662 break 15663 } 15664 v.reset(OpARM64MOVHstorezero) 15665 v.AuxInt = off1 + off2 15666 v.Aux = sym 15667 v.AddArg(ptr) 15668 v.AddArg(mem) 15669 return true 15670 } 15671 // match: (MOVHstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 15672 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 15673 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 15674 for { 15675 off1 := v.AuxInt 15676 sym1 := v.Aux 15677 _ = v.Args[1] 15678 v_0 := v.Args[0] 15679 if v_0.Op != OpARM64MOVDaddr { 15680 break 15681 } 15682 off2 := v_0.AuxInt 15683 sym2 := v_0.Aux 15684 ptr := v_0.Args[0] 15685 mem := v.Args[1] 15686 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 15687 break 15688 } 15689 v.reset(OpARM64MOVHstorezero) 15690 v.AuxInt = off1 + off2 15691 v.Aux = mergeSym(sym1, sym2) 15692 v.AddArg(ptr) 15693 v.AddArg(mem) 15694 return true 15695 } 15696 // match: (MOVHstorezero [off] {sym} (ADD ptr idx) mem) 15697 // cond: off == 0 && sym == nil 15698 // result: (MOVHstorezeroidx ptr idx mem) 15699 for { 15700 off := v.AuxInt 15701 sym := v.Aux 15702 _ = v.Args[1] 15703 v_0 := v.Args[0] 15704 if v_0.Op != OpARM64ADD { 15705 break 15706 } 15707 _ = v_0.Args[1] 15708 ptr := v_0.Args[0] 15709 idx := v_0.Args[1] 15710 mem := v.Args[1] 15711 if !(off == 0 && sym == nil) { 15712 break 15713 } 15714 v.reset(OpARM64MOVHstorezeroidx) 15715 v.AddArg(ptr) 15716 v.AddArg(idx) 15717 v.AddArg(mem) 15718 return true 15719 } 15720 // match: (MOVHstorezero [off] {sym} (ADDshiftLL [1] ptr idx) mem) 15721 // cond: off == 0 && sym == nil 15722 // result: (MOVHstorezeroidx2 ptr idx mem) 15723 for { 15724 off := v.AuxInt 15725 sym := v.Aux 15726 _ = v.Args[1] 15727 v_0 := v.Args[0] 15728 if v_0.Op != OpARM64ADDshiftLL { 15729 break 15730 } 15731 if v_0.AuxInt != 1 { 15732 break 15733 } 15734 _ = v_0.Args[1] 15735 ptr := v_0.Args[0] 15736 idx := v_0.Args[1] 15737 mem := v.Args[1] 15738 if !(off == 0 && sym == nil) { 15739 break 15740 } 15741 v.reset(OpARM64MOVHstorezeroidx2) 15742 v.AddArg(ptr) 15743 v.AddArg(idx) 15744 v.AddArg(mem) 15745 return true 15746 } 15747 // match: (MOVHstorezero [i] {s} ptr0 x:(MOVHstorezero [j] {s} ptr1 mem)) 15748 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,2) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 15749 // result: (MOVWstorezero [min(i,j)] {s} ptr0 mem) 15750 for { 15751 i := v.AuxInt 15752 s := v.Aux 15753 _ = v.Args[1] 15754 ptr0 := v.Args[0] 15755 x := v.Args[1] 15756 if x.Op != OpARM64MOVHstorezero { 15757 break 15758 } 15759 j := x.AuxInt 15760 if x.Aux != s { 15761 break 15762 } 15763 _ = x.Args[1] 15764 ptr1 := x.Args[0] 15765 mem := x.Args[1] 15766 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 2) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 15767 break 15768 } 15769 v.reset(OpARM64MOVWstorezero) 15770 v.AuxInt = min(i, j) 15771 v.Aux = s 15772 v.AddArg(ptr0) 15773 v.AddArg(mem) 15774 return true 15775 } 15776 // match: (MOVHstorezero [2] {s} (ADD ptr0 idx0) x:(MOVHstorezeroidx ptr1 idx1 mem)) 15777 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 15778 // result: (MOVWstorezeroidx ptr1 idx1 mem) 15779 for { 15780 if v.AuxInt != 2 { 15781 break 15782 } 15783 s := v.Aux 15784 _ = v.Args[1] 15785 v_0 := v.Args[0] 15786 if v_0.Op != OpARM64ADD { 15787 break 15788 } 15789 _ = v_0.Args[1] 15790 ptr0 := v_0.Args[0] 15791 idx0 := v_0.Args[1] 15792 x := v.Args[1] 15793 if x.Op != OpARM64MOVHstorezeroidx { 15794 break 15795 } 15796 _ = x.Args[2] 15797 ptr1 := x.Args[0] 15798 idx1 := x.Args[1] 15799 mem := x.Args[2] 15800 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 15801 break 15802 } 15803 v.reset(OpARM64MOVWstorezeroidx) 15804 v.AddArg(ptr1) 15805 v.AddArg(idx1) 15806 v.AddArg(mem) 15807 return true 15808 } 15809 // match: (MOVHstorezero [2] {s} (ADDshiftLL [1] ptr0 idx0) x:(MOVHstorezeroidx2 ptr1 idx1 mem)) 15810 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 15811 // result: (MOVWstorezeroidx ptr1 (SLLconst <idx1.Type> [1] idx1) mem) 15812 for { 15813 if v.AuxInt != 2 { 15814 break 15815 } 15816 s := v.Aux 15817 _ = v.Args[1] 15818 v_0 := v.Args[0] 15819 if v_0.Op != OpARM64ADDshiftLL { 15820 break 15821 } 15822 if v_0.AuxInt != 1 { 15823 break 15824 } 15825 _ = v_0.Args[1] 15826 ptr0 := v_0.Args[0] 15827 idx0 := v_0.Args[1] 15828 x := v.Args[1] 15829 if x.Op != OpARM64MOVHstorezeroidx2 { 15830 break 15831 } 15832 _ = x.Args[2] 15833 ptr1 := x.Args[0] 15834 idx1 := x.Args[1] 15835 mem := x.Args[2] 15836 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 15837 break 15838 } 15839 v.reset(OpARM64MOVWstorezeroidx) 15840 v.AddArg(ptr1) 15841 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 15842 v0.AuxInt = 1 15843 v0.AddArg(idx1) 15844 v.AddArg(v0) 15845 v.AddArg(mem) 15846 return true 15847 } 15848 return false 15849 } 15850 func rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v *Value) bool { 15851 // match: (MOVHstorezeroidx ptr (MOVDconst [c]) mem) 15852 // cond: 15853 // result: (MOVHstorezero [c] ptr mem) 15854 for { 15855 _ = v.Args[2] 15856 ptr := v.Args[0] 15857 v_1 := v.Args[1] 15858 if v_1.Op != OpARM64MOVDconst { 15859 break 15860 } 15861 c := v_1.AuxInt 15862 mem := v.Args[2] 15863 v.reset(OpARM64MOVHstorezero) 15864 v.AuxInt = c 15865 v.AddArg(ptr) 15866 v.AddArg(mem) 15867 return true 15868 } 15869 // match: (MOVHstorezeroidx (MOVDconst [c]) idx mem) 15870 // cond: 15871 // result: (MOVHstorezero [c] idx mem) 15872 for { 15873 _ = v.Args[2] 15874 v_0 := v.Args[0] 15875 if v_0.Op != OpARM64MOVDconst { 15876 break 15877 } 15878 c := v_0.AuxInt 15879 idx := v.Args[1] 15880 mem := v.Args[2] 15881 v.reset(OpARM64MOVHstorezero) 15882 v.AuxInt = c 15883 v.AddArg(idx) 15884 v.AddArg(mem) 15885 return true 15886 } 15887 // match: (MOVHstorezeroidx ptr (SLLconst [1] idx) mem) 15888 // cond: 15889 // result: (MOVHstorezeroidx2 ptr idx mem) 15890 for { 15891 _ = v.Args[2] 15892 ptr := v.Args[0] 15893 v_1 := v.Args[1] 15894 if v_1.Op != OpARM64SLLconst { 15895 break 15896 } 15897 if v_1.AuxInt != 1 { 15898 break 15899 } 15900 idx := v_1.Args[0] 15901 mem := v.Args[2] 15902 v.reset(OpARM64MOVHstorezeroidx2) 15903 v.AddArg(ptr) 15904 v.AddArg(idx) 15905 v.AddArg(mem) 15906 return true 15907 } 15908 // match: (MOVHstorezeroidx ptr (ADD idx idx) mem) 15909 // cond: 15910 // result: (MOVHstorezeroidx2 ptr idx mem) 15911 for { 15912 _ = v.Args[2] 15913 ptr := v.Args[0] 15914 v_1 := v.Args[1] 15915 if v_1.Op != OpARM64ADD { 15916 break 15917 } 15918 _ = v_1.Args[1] 15919 idx := v_1.Args[0] 15920 if idx != v_1.Args[1] { 15921 break 15922 } 15923 mem := v.Args[2] 15924 v.reset(OpARM64MOVHstorezeroidx2) 15925 v.AddArg(ptr) 15926 v.AddArg(idx) 15927 v.AddArg(mem) 15928 return true 15929 } 15930 // match: (MOVHstorezeroidx (SLLconst [1] idx) ptr mem) 15931 // cond: 15932 // result: (MOVHstorezeroidx2 ptr idx mem) 15933 for { 15934 _ = v.Args[2] 15935 v_0 := v.Args[0] 15936 if v_0.Op != OpARM64SLLconst { 15937 break 15938 } 15939 if v_0.AuxInt != 1 { 15940 break 15941 } 15942 idx := v_0.Args[0] 15943 ptr := v.Args[1] 15944 mem := v.Args[2] 15945 v.reset(OpARM64MOVHstorezeroidx2) 15946 v.AddArg(ptr) 15947 v.AddArg(idx) 15948 v.AddArg(mem) 15949 return true 15950 } 15951 // match: (MOVHstorezeroidx (ADD idx idx) ptr mem) 15952 // cond: 15953 // result: (MOVHstorezeroidx2 ptr idx mem) 15954 for { 15955 _ = v.Args[2] 15956 v_0 := v.Args[0] 15957 if v_0.Op != OpARM64ADD { 15958 break 15959 } 15960 _ = v_0.Args[1] 15961 idx := v_0.Args[0] 15962 if idx != v_0.Args[1] { 15963 break 15964 } 15965 ptr := v.Args[1] 15966 mem := v.Args[2] 15967 v.reset(OpARM64MOVHstorezeroidx2) 15968 v.AddArg(ptr) 15969 v.AddArg(idx) 15970 v.AddArg(mem) 15971 return true 15972 } 15973 // match: (MOVHstorezeroidx ptr (ADDconst [2] idx) x:(MOVHstorezeroidx ptr idx mem)) 15974 // cond: x.Uses == 1 && clobber(x) 15975 // result: (MOVWstorezeroidx ptr idx mem) 15976 for { 15977 _ = v.Args[2] 15978 ptr := v.Args[0] 15979 v_1 := v.Args[1] 15980 if v_1.Op != OpARM64ADDconst { 15981 break 15982 } 15983 if v_1.AuxInt != 2 { 15984 break 15985 } 15986 idx := v_1.Args[0] 15987 x := v.Args[2] 15988 if x.Op != OpARM64MOVHstorezeroidx { 15989 break 15990 } 15991 _ = x.Args[2] 15992 if ptr != x.Args[0] { 15993 break 15994 } 15995 if idx != x.Args[1] { 15996 break 15997 } 15998 mem := x.Args[2] 15999 if !(x.Uses == 1 && clobber(x)) { 16000 break 16001 } 16002 v.reset(OpARM64MOVWstorezeroidx) 16003 v.AddArg(ptr) 16004 v.AddArg(idx) 16005 v.AddArg(mem) 16006 return true 16007 } 16008 return false 16009 } 16010 func rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v *Value) bool { 16011 // match: (MOVHstorezeroidx2 ptr (MOVDconst [c]) mem) 16012 // cond: 16013 // result: (MOVHstorezero [c<<1] ptr mem) 16014 for { 16015 _ = v.Args[2] 16016 ptr := v.Args[0] 16017 v_1 := v.Args[1] 16018 if v_1.Op != OpARM64MOVDconst { 16019 break 16020 } 16021 c := v_1.AuxInt 16022 mem := v.Args[2] 16023 v.reset(OpARM64MOVHstorezero) 16024 v.AuxInt = c << 1 16025 v.AddArg(ptr) 16026 v.AddArg(mem) 16027 return true 16028 } 16029 return false 16030 } 16031 func rewriteValueARM64_OpARM64MOVQstorezero_0(v *Value) bool { 16032 b := v.Block 16033 _ = b 16034 config := b.Func.Config 16035 _ = config 16036 // match: (MOVQstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 16037 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16038 // result: (MOVQstorezero [off1+off2] {sym} ptr mem) 16039 for { 16040 off1 := v.AuxInt 16041 sym := v.Aux 16042 _ = v.Args[1] 16043 v_0 := v.Args[0] 16044 if v_0.Op != OpARM64ADDconst { 16045 break 16046 } 16047 off2 := v_0.AuxInt 16048 ptr := v_0.Args[0] 16049 mem := v.Args[1] 16050 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16051 break 16052 } 16053 v.reset(OpARM64MOVQstorezero) 16054 v.AuxInt = off1 + off2 16055 v.Aux = sym 16056 v.AddArg(ptr) 16057 v.AddArg(mem) 16058 return true 16059 } 16060 // match: (MOVQstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 16061 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16062 // result: (MOVQstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 16063 for { 16064 off1 := v.AuxInt 16065 sym1 := v.Aux 16066 _ = v.Args[1] 16067 v_0 := v.Args[0] 16068 if v_0.Op != OpARM64MOVDaddr { 16069 break 16070 } 16071 off2 := v_0.AuxInt 16072 sym2 := v_0.Aux 16073 ptr := v_0.Args[0] 16074 mem := v.Args[1] 16075 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16076 break 16077 } 16078 v.reset(OpARM64MOVQstorezero) 16079 v.AuxInt = off1 + off2 16080 v.Aux = mergeSym(sym1, sym2) 16081 v.AddArg(ptr) 16082 v.AddArg(mem) 16083 return true 16084 } 16085 return false 16086 } 16087 func rewriteValueARM64_OpARM64MOVWUload_0(v *Value) bool { 16088 b := v.Block 16089 _ = b 16090 config := b.Func.Config 16091 _ = config 16092 // match: (MOVWUload [off] {sym} ptr (FMOVSstore [off] {sym} ptr val _)) 16093 // cond: 16094 // result: (FMOVSfpgp val) 16095 for { 16096 off := v.AuxInt 16097 sym := v.Aux 16098 _ = v.Args[1] 16099 ptr := v.Args[0] 16100 v_1 := v.Args[1] 16101 if v_1.Op != OpARM64FMOVSstore { 16102 break 16103 } 16104 if v_1.AuxInt != off { 16105 break 16106 } 16107 if v_1.Aux != sym { 16108 break 16109 } 16110 _ = v_1.Args[2] 16111 if ptr != v_1.Args[0] { 16112 break 16113 } 16114 val := v_1.Args[1] 16115 v.reset(OpARM64FMOVSfpgp) 16116 v.AddArg(val) 16117 return true 16118 } 16119 // match: (MOVWUload [off1] {sym} (ADDconst [off2] ptr) mem) 16120 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16121 // result: (MOVWUload [off1+off2] {sym} ptr mem) 16122 for { 16123 off1 := v.AuxInt 16124 sym := v.Aux 16125 _ = v.Args[1] 16126 v_0 := v.Args[0] 16127 if v_0.Op != OpARM64ADDconst { 16128 break 16129 } 16130 off2 := v_0.AuxInt 16131 ptr := v_0.Args[0] 16132 mem := v.Args[1] 16133 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16134 break 16135 } 16136 v.reset(OpARM64MOVWUload) 16137 v.AuxInt = off1 + off2 16138 v.Aux = sym 16139 v.AddArg(ptr) 16140 v.AddArg(mem) 16141 return true 16142 } 16143 // match: (MOVWUload [off] {sym} (ADD ptr idx) mem) 16144 // cond: off == 0 && sym == nil 16145 // result: (MOVWUloadidx ptr idx mem) 16146 for { 16147 off := v.AuxInt 16148 sym := v.Aux 16149 _ = v.Args[1] 16150 v_0 := v.Args[0] 16151 if v_0.Op != OpARM64ADD { 16152 break 16153 } 16154 _ = v_0.Args[1] 16155 ptr := v_0.Args[0] 16156 idx := v_0.Args[1] 16157 mem := v.Args[1] 16158 if !(off == 0 && sym == nil) { 16159 break 16160 } 16161 v.reset(OpARM64MOVWUloadidx) 16162 v.AddArg(ptr) 16163 v.AddArg(idx) 16164 v.AddArg(mem) 16165 return true 16166 } 16167 // match: (MOVWUload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 16168 // cond: off == 0 && sym == nil 16169 // result: (MOVWUloadidx4 ptr idx mem) 16170 for { 16171 off := v.AuxInt 16172 sym := v.Aux 16173 _ = v.Args[1] 16174 v_0 := v.Args[0] 16175 if v_0.Op != OpARM64ADDshiftLL { 16176 break 16177 } 16178 if v_0.AuxInt != 2 { 16179 break 16180 } 16181 _ = v_0.Args[1] 16182 ptr := v_0.Args[0] 16183 idx := v_0.Args[1] 16184 mem := v.Args[1] 16185 if !(off == 0 && sym == nil) { 16186 break 16187 } 16188 v.reset(OpARM64MOVWUloadidx4) 16189 v.AddArg(ptr) 16190 v.AddArg(idx) 16191 v.AddArg(mem) 16192 return true 16193 } 16194 // match: (MOVWUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 16195 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16196 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 16197 for { 16198 off1 := v.AuxInt 16199 sym1 := v.Aux 16200 _ = v.Args[1] 16201 v_0 := v.Args[0] 16202 if v_0.Op != OpARM64MOVDaddr { 16203 break 16204 } 16205 off2 := v_0.AuxInt 16206 sym2 := v_0.Aux 16207 ptr := v_0.Args[0] 16208 mem := v.Args[1] 16209 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16210 break 16211 } 16212 v.reset(OpARM64MOVWUload) 16213 v.AuxInt = off1 + off2 16214 v.Aux = mergeSym(sym1, sym2) 16215 v.AddArg(ptr) 16216 v.AddArg(mem) 16217 return true 16218 } 16219 // match: (MOVWUload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 16220 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 16221 // result: (MOVDconst [0]) 16222 for { 16223 off := v.AuxInt 16224 sym := v.Aux 16225 _ = v.Args[1] 16226 ptr := v.Args[0] 16227 v_1 := v.Args[1] 16228 if v_1.Op != OpARM64MOVWstorezero { 16229 break 16230 } 16231 off2 := v_1.AuxInt 16232 sym2 := v_1.Aux 16233 _ = v_1.Args[1] 16234 ptr2 := v_1.Args[0] 16235 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 16236 break 16237 } 16238 v.reset(OpARM64MOVDconst) 16239 v.AuxInt = 0 16240 return true 16241 } 16242 // match: (MOVWUload [off] {sym} (SB) _) 16243 // cond: symIsRO(sym) 16244 // result: (MOVDconst [int64(read32(sym, off, config.BigEndian))]) 16245 for { 16246 off := v.AuxInt 16247 sym := v.Aux 16248 _ = v.Args[1] 16249 v_0 := v.Args[0] 16250 if v_0.Op != OpSB { 16251 break 16252 } 16253 if !(symIsRO(sym)) { 16254 break 16255 } 16256 v.reset(OpARM64MOVDconst) 16257 v.AuxInt = int64(read32(sym, off, config.BigEndian)) 16258 return true 16259 } 16260 return false 16261 } 16262 func rewriteValueARM64_OpARM64MOVWUloadidx_0(v *Value) bool { 16263 // match: (MOVWUloadidx ptr (MOVDconst [c]) mem) 16264 // cond: 16265 // result: (MOVWUload [c] ptr mem) 16266 for { 16267 _ = v.Args[2] 16268 ptr := v.Args[0] 16269 v_1 := v.Args[1] 16270 if v_1.Op != OpARM64MOVDconst { 16271 break 16272 } 16273 c := v_1.AuxInt 16274 mem := v.Args[2] 16275 v.reset(OpARM64MOVWUload) 16276 v.AuxInt = c 16277 v.AddArg(ptr) 16278 v.AddArg(mem) 16279 return true 16280 } 16281 // match: (MOVWUloadidx (MOVDconst [c]) ptr mem) 16282 // cond: 16283 // result: (MOVWUload [c] ptr mem) 16284 for { 16285 _ = v.Args[2] 16286 v_0 := v.Args[0] 16287 if v_0.Op != OpARM64MOVDconst { 16288 break 16289 } 16290 c := v_0.AuxInt 16291 ptr := v.Args[1] 16292 mem := v.Args[2] 16293 v.reset(OpARM64MOVWUload) 16294 v.AuxInt = c 16295 v.AddArg(ptr) 16296 v.AddArg(mem) 16297 return true 16298 } 16299 // match: (MOVWUloadidx ptr (SLLconst [2] idx) mem) 16300 // cond: 16301 // result: (MOVWUloadidx4 ptr idx mem) 16302 for { 16303 _ = v.Args[2] 16304 ptr := v.Args[0] 16305 v_1 := v.Args[1] 16306 if v_1.Op != OpARM64SLLconst { 16307 break 16308 } 16309 if v_1.AuxInt != 2 { 16310 break 16311 } 16312 idx := v_1.Args[0] 16313 mem := v.Args[2] 16314 v.reset(OpARM64MOVWUloadidx4) 16315 v.AddArg(ptr) 16316 v.AddArg(idx) 16317 v.AddArg(mem) 16318 return true 16319 } 16320 // match: (MOVWUloadidx (SLLconst [2] idx) ptr mem) 16321 // cond: 16322 // result: (MOVWUloadidx4 ptr idx mem) 16323 for { 16324 _ = v.Args[2] 16325 v_0 := v.Args[0] 16326 if v_0.Op != OpARM64SLLconst { 16327 break 16328 } 16329 if v_0.AuxInt != 2 { 16330 break 16331 } 16332 idx := v_0.Args[0] 16333 ptr := v.Args[1] 16334 mem := v.Args[2] 16335 v.reset(OpARM64MOVWUloadidx4) 16336 v.AddArg(ptr) 16337 v.AddArg(idx) 16338 v.AddArg(mem) 16339 return true 16340 } 16341 // match: (MOVWUloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 16342 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 16343 // result: (MOVDconst [0]) 16344 for { 16345 _ = v.Args[2] 16346 ptr := v.Args[0] 16347 idx := v.Args[1] 16348 v_2 := v.Args[2] 16349 if v_2.Op != OpARM64MOVWstorezeroidx { 16350 break 16351 } 16352 _ = v_2.Args[2] 16353 ptr2 := v_2.Args[0] 16354 idx2 := v_2.Args[1] 16355 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 16356 break 16357 } 16358 v.reset(OpARM64MOVDconst) 16359 v.AuxInt = 0 16360 return true 16361 } 16362 return false 16363 } 16364 func rewriteValueARM64_OpARM64MOVWUloadidx4_0(v *Value) bool { 16365 // match: (MOVWUloadidx4 ptr (MOVDconst [c]) mem) 16366 // cond: 16367 // result: (MOVWUload [c<<2] ptr mem) 16368 for { 16369 _ = v.Args[2] 16370 ptr := v.Args[0] 16371 v_1 := v.Args[1] 16372 if v_1.Op != OpARM64MOVDconst { 16373 break 16374 } 16375 c := v_1.AuxInt 16376 mem := v.Args[2] 16377 v.reset(OpARM64MOVWUload) 16378 v.AuxInt = c << 2 16379 v.AddArg(ptr) 16380 v.AddArg(mem) 16381 return true 16382 } 16383 // match: (MOVWUloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 16384 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 16385 // result: (MOVDconst [0]) 16386 for { 16387 _ = v.Args[2] 16388 ptr := v.Args[0] 16389 idx := v.Args[1] 16390 v_2 := v.Args[2] 16391 if v_2.Op != OpARM64MOVWstorezeroidx4 { 16392 break 16393 } 16394 _ = v_2.Args[2] 16395 ptr2 := v_2.Args[0] 16396 idx2 := v_2.Args[1] 16397 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 16398 break 16399 } 16400 v.reset(OpARM64MOVDconst) 16401 v.AuxInt = 0 16402 return true 16403 } 16404 return false 16405 } 16406 func rewriteValueARM64_OpARM64MOVWUreg_0(v *Value) bool { 16407 // match: (MOVWUreg x:(MOVBUload _ _)) 16408 // cond: 16409 // result: (MOVDreg x) 16410 for { 16411 x := v.Args[0] 16412 if x.Op != OpARM64MOVBUload { 16413 break 16414 } 16415 _ = x.Args[1] 16416 v.reset(OpARM64MOVDreg) 16417 v.AddArg(x) 16418 return true 16419 } 16420 // match: (MOVWUreg x:(MOVHUload _ _)) 16421 // cond: 16422 // result: (MOVDreg x) 16423 for { 16424 x := v.Args[0] 16425 if x.Op != OpARM64MOVHUload { 16426 break 16427 } 16428 _ = x.Args[1] 16429 v.reset(OpARM64MOVDreg) 16430 v.AddArg(x) 16431 return true 16432 } 16433 // match: (MOVWUreg x:(MOVWUload _ _)) 16434 // cond: 16435 // result: (MOVDreg x) 16436 for { 16437 x := v.Args[0] 16438 if x.Op != OpARM64MOVWUload { 16439 break 16440 } 16441 _ = x.Args[1] 16442 v.reset(OpARM64MOVDreg) 16443 v.AddArg(x) 16444 return true 16445 } 16446 // match: (MOVWUreg x:(MOVBUloadidx _ _ _)) 16447 // cond: 16448 // result: (MOVDreg x) 16449 for { 16450 x := v.Args[0] 16451 if x.Op != OpARM64MOVBUloadidx { 16452 break 16453 } 16454 _ = x.Args[2] 16455 v.reset(OpARM64MOVDreg) 16456 v.AddArg(x) 16457 return true 16458 } 16459 // match: (MOVWUreg x:(MOVHUloadidx _ _ _)) 16460 // cond: 16461 // result: (MOVDreg x) 16462 for { 16463 x := v.Args[0] 16464 if x.Op != OpARM64MOVHUloadidx { 16465 break 16466 } 16467 _ = x.Args[2] 16468 v.reset(OpARM64MOVDreg) 16469 v.AddArg(x) 16470 return true 16471 } 16472 // match: (MOVWUreg x:(MOVWUloadidx _ _ _)) 16473 // cond: 16474 // result: (MOVDreg x) 16475 for { 16476 x := v.Args[0] 16477 if x.Op != OpARM64MOVWUloadidx { 16478 break 16479 } 16480 _ = x.Args[2] 16481 v.reset(OpARM64MOVDreg) 16482 v.AddArg(x) 16483 return true 16484 } 16485 // match: (MOVWUreg x:(MOVHUloadidx2 _ _ _)) 16486 // cond: 16487 // result: (MOVDreg x) 16488 for { 16489 x := v.Args[0] 16490 if x.Op != OpARM64MOVHUloadidx2 { 16491 break 16492 } 16493 _ = x.Args[2] 16494 v.reset(OpARM64MOVDreg) 16495 v.AddArg(x) 16496 return true 16497 } 16498 // match: (MOVWUreg x:(MOVWUloadidx4 _ _ _)) 16499 // cond: 16500 // result: (MOVDreg x) 16501 for { 16502 x := v.Args[0] 16503 if x.Op != OpARM64MOVWUloadidx4 { 16504 break 16505 } 16506 _ = x.Args[2] 16507 v.reset(OpARM64MOVDreg) 16508 v.AddArg(x) 16509 return true 16510 } 16511 // match: (MOVWUreg x:(MOVBUreg _)) 16512 // cond: 16513 // result: (MOVDreg x) 16514 for { 16515 x := v.Args[0] 16516 if x.Op != OpARM64MOVBUreg { 16517 break 16518 } 16519 v.reset(OpARM64MOVDreg) 16520 v.AddArg(x) 16521 return true 16522 } 16523 // match: (MOVWUreg x:(MOVHUreg _)) 16524 // cond: 16525 // result: (MOVDreg x) 16526 for { 16527 x := v.Args[0] 16528 if x.Op != OpARM64MOVHUreg { 16529 break 16530 } 16531 v.reset(OpARM64MOVDreg) 16532 v.AddArg(x) 16533 return true 16534 } 16535 return false 16536 } 16537 func rewriteValueARM64_OpARM64MOVWUreg_10(v *Value) bool { 16538 // match: (MOVWUreg x:(MOVWUreg _)) 16539 // cond: 16540 // result: (MOVDreg x) 16541 for { 16542 x := v.Args[0] 16543 if x.Op != OpARM64MOVWUreg { 16544 break 16545 } 16546 v.reset(OpARM64MOVDreg) 16547 v.AddArg(x) 16548 return true 16549 } 16550 // match: (MOVWUreg (ANDconst [c] x)) 16551 // cond: 16552 // result: (ANDconst [c&(1<<32-1)] x) 16553 for { 16554 v_0 := v.Args[0] 16555 if v_0.Op != OpARM64ANDconst { 16556 break 16557 } 16558 c := v_0.AuxInt 16559 x := v_0.Args[0] 16560 v.reset(OpARM64ANDconst) 16561 v.AuxInt = c & (1<<32 - 1) 16562 v.AddArg(x) 16563 return true 16564 } 16565 // match: (MOVWUreg (MOVDconst [c])) 16566 // cond: 16567 // result: (MOVDconst [int64(uint32(c))]) 16568 for { 16569 v_0 := v.Args[0] 16570 if v_0.Op != OpARM64MOVDconst { 16571 break 16572 } 16573 c := v_0.AuxInt 16574 v.reset(OpARM64MOVDconst) 16575 v.AuxInt = int64(uint32(c)) 16576 return true 16577 } 16578 // match: (MOVWUreg (SLLconst [sc] x)) 16579 // cond: isARM64BFMask(sc, 1<<32-1, sc) 16580 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x) 16581 for { 16582 v_0 := v.Args[0] 16583 if v_0.Op != OpARM64SLLconst { 16584 break 16585 } 16586 sc := v_0.AuxInt 16587 x := v_0.Args[0] 16588 if !(isARM64BFMask(sc, 1<<32-1, sc)) { 16589 break 16590 } 16591 v.reset(OpARM64UBFIZ) 16592 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc)) 16593 v.AddArg(x) 16594 return true 16595 } 16596 // match: (MOVWUreg (SRLconst [sc] x)) 16597 // cond: isARM64BFMask(sc, 1<<32-1, 0) 16598 // result: (UBFX [arm64BFAuxInt(sc, 32)] x) 16599 for { 16600 v_0 := v.Args[0] 16601 if v_0.Op != OpARM64SRLconst { 16602 break 16603 } 16604 sc := v_0.AuxInt 16605 x := v_0.Args[0] 16606 if !(isARM64BFMask(sc, 1<<32-1, 0)) { 16607 break 16608 } 16609 v.reset(OpARM64UBFX) 16610 v.AuxInt = arm64BFAuxInt(sc, 32) 16611 v.AddArg(x) 16612 return true 16613 } 16614 return false 16615 } 16616 func rewriteValueARM64_OpARM64MOVWload_0(v *Value) bool { 16617 b := v.Block 16618 _ = b 16619 config := b.Func.Config 16620 _ = config 16621 // match: (MOVWload [off1] {sym} (ADDconst [off2] ptr) mem) 16622 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16623 // result: (MOVWload [off1+off2] {sym} ptr mem) 16624 for { 16625 off1 := v.AuxInt 16626 sym := v.Aux 16627 _ = v.Args[1] 16628 v_0 := v.Args[0] 16629 if v_0.Op != OpARM64ADDconst { 16630 break 16631 } 16632 off2 := v_0.AuxInt 16633 ptr := v_0.Args[0] 16634 mem := v.Args[1] 16635 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16636 break 16637 } 16638 v.reset(OpARM64MOVWload) 16639 v.AuxInt = off1 + off2 16640 v.Aux = sym 16641 v.AddArg(ptr) 16642 v.AddArg(mem) 16643 return true 16644 } 16645 // match: (MOVWload [off] {sym} (ADD ptr idx) mem) 16646 // cond: off == 0 && sym == nil 16647 // result: (MOVWloadidx ptr idx mem) 16648 for { 16649 off := v.AuxInt 16650 sym := v.Aux 16651 _ = v.Args[1] 16652 v_0 := v.Args[0] 16653 if v_0.Op != OpARM64ADD { 16654 break 16655 } 16656 _ = v_0.Args[1] 16657 ptr := v_0.Args[0] 16658 idx := v_0.Args[1] 16659 mem := v.Args[1] 16660 if !(off == 0 && sym == nil) { 16661 break 16662 } 16663 v.reset(OpARM64MOVWloadidx) 16664 v.AddArg(ptr) 16665 v.AddArg(idx) 16666 v.AddArg(mem) 16667 return true 16668 } 16669 // match: (MOVWload [off] {sym} (ADDshiftLL [2] ptr idx) mem) 16670 // cond: off == 0 && sym == nil 16671 // result: (MOVWloadidx4 ptr idx mem) 16672 for { 16673 off := v.AuxInt 16674 sym := v.Aux 16675 _ = v.Args[1] 16676 v_0 := v.Args[0] 16677 if v_0.Op != OpARM64ADDshiftLL { 16678 break 16679 } 16680 if v_0.AuxInt != 2 { 16681 break 16682 } 16683 _ = v_0.Args[1] 16684 ptr := v_0.Args[0] 16685 idx := v_0.Args[1] 16686 mem := v.Args[1] 16687 if !(off == 0 && sym == nil) { 16688 break 16689 } 16690 v.reset(OpARM64MOVWloadidx4) 16691 v.AddArg(ptr) 16692 v.AddArg(idx) 16693 v.AddArg(mem) 16694 return true 16695 } 16696 // match: (MOVWload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 16697 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 16698 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 16699 for { 16700 off1 := v.AuxInt 16701 sym1 := v.Aux 16702 _ = v.Args[1] 16703 v_0 := v.Args[0] 16704 if v_0.Op != OpARM64MOVDaddr { 16705 break 16706 } 16707 off2 := v_0.AuxInt 16708 sym2 := v_0.Aux 16709 ptr := v_0.Args[0] 16710 mem := v.Args[1] 16711 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 16712 break 16713 } 16714 v.reset(OpARM64MOVWload) 16715 v.AuxInt = off1 + off2 16716 v.Aux = mergeSym(sym1, sym2) 16717 v.AddArg(ptr) 16718 v.AddArg(mem) 16719 return true 16720 } 16721 // match: (MOVWload [off] {sym} ptr (MOVWstorezero [off2] {sym2} ptr2 _)) 16722 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 16723 // result: (MOVDconst [0]) 16724 for { 16725 off := v.AuxInt 16726 sym := v.Aux 16727 _ = v.Args[1] 16728 ptr := v.Args[0] 16729 v_1 := v.Args[1] 16730 if v_1.Op != OpARM64MOVWstorezero { 16731 break 16732 } 16733 off2 := v_1.AuxInt 16734 sym2 := v_1.Aux 16735 _ = v_1.Args[1] 16736 ptr2 := v_1.Args[0] 16737 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 16738 break 16739 } 16740 v.reset(OpARM64MOVDconst) 16741 v.AuxInt = 0 16742 return true 16743 } 16744 return false 16745 } 16746 func rewriteValueARM64_OpARM64MOVWloadidx_0(v *Value) bool { 16747 // match: (MOVWloadidx ptr (MOVDconst [c]) mem) 16748 // cond: 16749 // result: (MOVWload [c] ptr mem) 16750 for { 16751 _ = v.Args[2] 16752 ptr := v.Args[0] 16753 v_1 := v.Args[1] 16754 if v_1.Op != OpARM64MOVDconst { 16755 break 16756 } 16757 c := v_1.AuxInt 16758 mem := v.Args[2] 16759 v.reset(OpARM64MOVWload) 16760 v.AuxInt = c 16761 v.AddArg(ptr) 16762 v.AddArg(mem) 16763 return true 16764 } 16765 // match: (MOVWloadidx (MOVDconst [c]) ptr mem) 16766 // cond: 16767 // result: (MOVWload [c] ptr mem) 16768 for { 16769 _ = v.Args[2] 16770 v_0 := v.Args[0] 16771 if v_0.Op != OpARM64MOVDconst { 16772 break 16773 } 16774 c := v_0.AuxInt 16775 ptr := v.Args[1] 16776 mem := v.Args[2] 16777 v.reset(OpARM64MOVWload) 16778 v.AuxInt = c 16779 v.AddArg(ptr) 16780 v.AddArg(mem) 16781 return true 16782 } 16783 // match: (MOVWloadidx ptr (SLLconst [2] idx) mem) 16784 // cond: 16785 // result: (MOVWloadidx4 ptr idx mem) 16786 for { 16787 _ = v.Args[2] 16788 ptr := v.Args[0] 16789 v_1 := v.Args[1] 16790 if v_1.Op != OpARM64SLLconst { 16791 break 16792 } 16793 if v_1.AuxInt != 2 { 16794 break 16795 } 16796 idx := v_1.Args[0] 16797 mem := v.Args[2] 16798 v.reset(OpARM64MOVWloadidx4) 16799 v.AddArg(ptr) 16800 v.AddArg(idx) 16801 v.AddArg(mem) 16802 return true 16803 } 16804 // match: (MOVWloadidx (SLLconst [2] idx) ptr mem) 16805 // cond: 16806 // result: (MOVWloadidx4 ptr idx mem) 16807 for { 16808 _ = v.Args[2] 16809 v_0 := v.Args[0] 16810 if v_0.Op != OpARM64SLLconst { 16811 break 16812 } 16813 if v_0.AuxInt != 2 { 16814 break 16815 } 16816 idx := v_0.Args[0] 16817 ptr := v.Args[1] 16818 mem := v.Args[2] 16819 v.reset(OpARM64MOVWloadidx4) 16820 v.AddArg(ptr) 16821 v.AddArg(idx) 16822 v.AddArg(mem) 16823 return true 16824 } 16825 // match: (MOVWloadidx ptr idx (MOVWstorezeroidx ptr2 idx2 _)) 16826 // cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) 16827 // result: (MOVDconst [0]) 16828 for { 16829 _ = v.Args[2] 16830 ptr := v.Args[0] 16831 idx := v.Args[1] 16832 v_2 := v.Args[2] 16833 if v_2.Op != OpARM64MOVWstorezeroidx { 16834 break 16835 } 16836 _ = v_2.Args[2] 16837 ptr2 := v_2.Args[0] 16838 idx2 := v_2.Args[1] 16839 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) { 16840 break 16841 } 16842 v.reset(OpARM64MOVDconst) 16843 v.AuxInt = 0 16844 return true 16845 } 16846 return false 16847 } 16848 func rewriteValueARM64_OpARM64MOVWloadidx4_0(v *Value) bool { 16849 // match: (MOVWloadidx4 ptr (MOVDconst [c]) mem) 16850 // cond: 16851 // result: (MOVWload [c<<2] ptr mem) 16852 for { 16853 _ = v.Args[2] 16854 ptr := v.Args[0] 16855 v_1 := v.Args[1] 16856 if v_1.Op != OpARM64MOVDconst { 16857 break 16858 } 16859 c := v_1.AuxInt 16860 mem := v.Args[2] 16861 v.reset(OpARM64MOVWload) 16862 v.AuxInt = c << 2 16863 v.AddArg(ptr) 16864 v.AddArg(mem) 16865 return true 16866 } 16867 // match: (MOVWloadidx4 ptr idx (MOVWstorezeroidx4 ptr2 idx2 _)) 16868 // cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) 16869 // result: (MOVDconst [0]) 16870 for { 16871 _ = v.Args[2] 16872 ptr := v.Args[0] 16873 idx := v.Args[1] 16874 v_2 := v.Args[2] 16875 if v_2.Op != OpARM64MOVWstorezeroidx4 { 16876 break 16877 } 16878 _ = v_2.Args[2] 16879 ptr2 := v_2.Args[0] 16880 idx2 := v_2.Args[1] 16881 if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) { 16882 break 16883 } 16884 v.reset(OpARM64MOVDconst) 16885 v.AuxInt = 0 16886 return true 16887 } 16888 return false 16889 } 16890 func rewriteValueARM64_OpARM64MOVWreg_0(v *Value) bool { 16891 // match: (MOVWreg x:(MOVBload _ _)) 16892 // cond: 16893 // result: (MOVDreg x) 16894 for { 16895 x := v.Args[0] 16896 if x.Op != OpARM64MOVBload { 16897 break 16898 } 16899 _ = x.Args[1] 16900 v.reset(OpARM64MOVDreg) 16901 v.AddArg(x) 16902 return true 16903 } 16904 // match: (MOVWreg x:(MOVBUload _ _)) 16905 // cond: 16906 // result: (MOVDreg x) 16907 for { 16908 x := v.Args[0] 16909 if x.Op != OpARM64MOVBUload { 16910 break 16911 } 16912 _ = x.Args[1] 16913 v.reset(OpARM64MOVDreg) 16914 v.AddArg(x) 16915 return true 16916 } 16917 // match: (MOVWreg x:(MOVHload _ _)) 16918 // cond: 16919 // result: (MOVDreg x) 16920 for { 16921 x := v.Args[0] 16922 if x.Op != OpARM64MOVHload { 16923 break 16924 } 16925 _ = x.Args[1] 16926 v.reset(OpARM64MOVDreg) 16927 v.AddArg(x) 16928 return true 16929 } 16930 // match: (MOVWreg x:(MOVHUload _ _)) 16931 // cond: 16932 // result: (MOVDreg x) 16933 for { 16934 x := v.Args[0] 16935 if x.Op != OpARM64MOVHUload { 16936 break 16937 } 16938 _ = x.Args[1] 16939 v.reset(OpARM64MOVDreg) 16940 v.AddArg(x) 16941 return true 16942 } 16943 // match: (MOVWreg x:(MOVWload _ _)) 16944 // cond: 16945 // result: (MOVDreg x) 16946 for { 16947 x := v.Args[0] 16948 if x.Op != OpARM64MOVWload { 16949 break 16950 } 16951 _ = x.Args[1] 16952 v.reset(OpARM64MOVDreg) 16953 v.AddArg(x) 16954 return true 16955 } 16956 // match: (MOVWreg x:(MOVBloadidx _ _ _)) 16957 // cond: 16958 // result: (MOVDreg x) 16959 for { 16960 x := v.Args[0] 16961 if x.Op != OpARM64MOVBloadidx { 16962 break 16963 } 16964 _ = x.Args[2] 16965 v.reset(OpARM64MOVDreg) 16966 v.AddArg(x) 16967 return true 16968 } 16969 // match: (MOVWreg x:(MOVBUloadidx _ _ _)) 16970 // cond: 16971 // result: (MOVDreg x) 16972 for { 16973 x := v.Args[0] 16974 if x.Op != OpARM64MOVBUloadidx { 16975 break 16976 } 16977 _ = x.Args[2] 16978 v.reset(OpARM64MOVDreg) 16979 v.AddArg(x) 16980 return true 16981 } 16982 // match: (MOVWreg x:(MOVHloadidx _ _ _)) 16983 // cond: 16984 // result: (MOVDreg x) 16985 for { 16986 x := v.Args[0] 16987 if x.Op != OpARM64MOVHloadidx { 16988 break 16989 } 16990 _ = x.Args[2] 16991 v.reset(OpARM64MOVDreg) 16992 v.AddArg(x) 16993 return true 16994 } 16995 // match: (MOVWreg x:(MOVHUloadidx _ _ _)) 16996 // cond: 16997 // result: (MOVDreg x) 16998 for { 16999 x := v.Args[0] 17000 if x.Op != OpARM64MOVHUloadidx { 17001 break 17002 } 17003 _ = x.Args[2] 17004 v.reset(OpARM64MOVDreg) 17005 v.AddArg(x) 17006 return true 17007 } 17008 // match: (MOVWreg x:(MOVWloadidx _ _ _)) 17009 // cond: 17010 // result: (MOVDreg x) 17011 for { 17012 x := v.Args[0] 17013 if x.Op != OpARM64MOVWloadidx { 17014 break 17015 } 17016 _ = x.Args[2] 17017 v.reset(OpARM64MOVDreg) 17018 v.AddArg(x) 17019 return true 17020 } 17021 return false 17022 } 17023 func rewriteValueARM64_OpARM64MOVWreg_10(v *Value) bool { 17024 // match: (MOVWreg x:(MOVHloadidx2 _ _ _)) 17025 // cond: 17026 // result: (MOVDreg x) 17027 for { 17028 x := v.Args[0] 17029 if x.Op != OpARM64MOVHloadidx2 { 17030 break 17031 } 17032 _ = x.Args[2] 17033 v.reset(OpARM64MOVDreg) 17034 v.AddArg(x) 17035 return true 17036 } 17037 // match: (MOVWreg x:(MOVHUloadidx2 _ _ _)) 17038 // cond: 17039 // result: (MOVDreg x) 17040 for { 17041 x := v.Args[0] 17042 if x.Op != OpARM64MOVHUloadidx2 { 17043 break 17044 } 17045 _ = x.Args[2] 17046 v.reset(OpARM64MOVDreg) 17047 v.AddArg(x) 17048 return true 17049 } 17050 // match: (MOVWreg x:(MOVWloadidx4 _ _ _)) 17051 // cond: 17052 // result: (MOVDreg x) 17053 for { 17054 x := v.Args[0] 17055 if x.Op != OpARM64MOVWloadidx4 { 17056 break 17057 } 17058 _ = x.Args[2] 17059 v.reset(OpARM64MOVDreg) 17060 v.AddArg(x) 17061 return true 17062 } 17063 // match: (MOVWreg x:(MOVBreg _)) 17064 // cond: 17065 // result: (MOVDreg x) 17066 for { 17067 x := v.Args[0] 17068 if x.Op != OpARM64MOVBreg { 17069 break 17070 } 17071 v.reset(OpARM64MOVDreg) 17072 v.AddArg(x) 17073 return true 17074 } 17075 // match: (MOVWreg x:(MOVBUreg _)) 17076 // cond: 17077 // result: (MOVDreg x) 17078 for { 17079 x := v.Args[0] 17080 if x.Op != OpARM64MOVBUreg { 17081 break 17082 } 17083 v.reset(OpARM64MOVDreg) 17084 v.AddArg(x) 17085 return true 17086 } 17087 // match: (MOVWreg x:(MOVHreg _)) 17088 // cond: 17089 // result: (MOVDreg x) 17090 for { 17091 x := v.Args[0] 17092 if x.Op != OpARM64MOVHreg { 17093 break 17094 } 17095 v.reset(OpARM64MOVDreg) 17096 v.AddArg(x) 17097 return true 17098 } 17099 // match: (MOVWreg x:(MOVHreg _)) 17100 // cond: 17101 // result: (MOVDreg x) 17102 for { 17103 x := v.Args[0] 17104 if x.Op != OpARM64MOVHreg { 17105 break 17106 } 17107 v.reset(OpARM64MOVDreg) 17108 v.AddArg(x) 17109 return true 17110 } 17111 // match: (MOVWreg x:(MOVWreg _)) 17112 // cond: 17113 // result: (MOVDreg x) 17114 for { 17115 x := v.Args[0] 17116 if x.Op != OpARM64MOVWreg { 17117 break 17118 } 17119 v.reset(OpARM64MOVDreg) 17120 v.AddArg(x) 17121 return true 17122 } 17123 // match: (MOVWreg (MOVDconst [c])) 17124 // cond: 17125 // result: (MOVDconst [int64(int32(c))]) 17126 for { 17127 v_0 := v.Args[0] 17128 if v_0.Op != OpARM64MOVDconst { 17129 break 17130 } 17131 c := v_0.AuxInt 17132 v.reset(OpARM64MOVDconst) 17133 v.AuxInt = int64(int32(c)) 17134 return true 17135 } 17136 // match: (MOVWreg (SLLconst [lc] x)) 17137 // cond: lc < 32 17138 // result: (SBFIZ [arm64BFAuxInt(lc, 32-lc)] x) 17139 for { 17140 v_0 := v.Args[0] 17141 if v_0.Op != OpARM64SLLconst { 17142 break 17143 } 17144 lc := v_0.AuxInt 17145 x := v_0.Args[0] 17146 if !(lc < 32) { 17147 break 17148 } 17149 v.reset(OpARM64SBFIZ) 17150 v.AuxInt = arm64BFAuxInt(lc, 32-lc) 17151 v.AddArg(x) 17152 return true 17153 } 17154 return false 17155 } 17156 func rewriteValueARM64_OpARM64MOVWstore_0(v *Value) bool { 17157 b := v.Block 17158 _ = b 17159 config := b.Func.Config 17160 _ = config 17161 // match: (MOVWstore [off] {sym} ptr (FMOVSfpgp val) mem) 17162 // cond: 17163 // result: (FMOVSstore [off] {sym} ptr val mem) 17164 for { 17165 off := v.AuxInt 17166 sym := v.Aux 17167 _ = v.Args[2] 17168 ptr := v.Args[0] 17169 v_1 := v.Args[1] 17170 if v_1.Op != OpARM64FMOVSfpgp { 17171 break 17172 } 17173 val := v_1.Args[0] 17174 mem := v.Args[2] 17175 v.reset(OpARM64FMOVSstore) 17176 v.AuxInt = off 17177 v.Aux = sym 17178 v.AddArg(ptr) 17179 v.AddArg(val) 17180 v.AddArg(mem) 17181 return true 17182 } 17183 // match: (MOVWstore [off1] {sym} (ADDconst [off2] ptr) val mem) 17184 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17185 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 17186 for { 17187 off1 := v.AuxInt 17188 sym := v.Aux 17189 _ = v.Args[2] 17190 v_0 := v.Args[0] 17191 if v_0.Op != OpARM64ADDconst { 17192 break 17193 } 17194 off2 := v_0.AuxInt 17195 ptr := v_0.Args[0] 17196 val := v.Args[1] 17197 mem := v.Args[2] 17198 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17199 break 17200 } 17201 v.reset(OpARM64MOVWstore) 17202 v.AuxInt = off1 + off2 17203 v.Aux = sym 17204 v.AddArg(ptr) 17205 v.AddArg(val) 17206 v.AddArg(mem) 17207 return true 17208 } 17209 // match: (MOVWstore [off] {sym} (ADD ptr idx) val mem) 17210 // cond: off == 0 && sym == nil 17211 // result: (MOVWstoreidx ptr idx val mem) 17212 for { 17213 off := v.AuxInt 17214 sym := v.Aux 17215 _ = v.Args[2] 17216 v_0 := v.Args[0] 17217 if v_0.Op != OpARM64ADD { 17218 break 17219 } 17220 _ = v_0.Args[1] 17221 ptr := v_0.Args[0] 17222 idx := v_0.Args[1] 17223 val := v.Args[1] 17224 mem := v.Args[2] 17225 if !(off == 0 && sym == nil) { 17226 break 17227 } 17228 v.reset(OpARM64MOVWstoreidx) 17229 v.AddArg(ptr) 17230 v.AddArg(idx) 17231 v.AddArg(val) 17232 v.AddArg(mem) 17233 return true 17234 } 17235 // match: (MOVWstore [off] {sym} (ADDshiftLL [2] ptr idx) val mem) 17236 // cond: off == 0 && sym == nil 17237 // result: (MOVWstoreidx4 ptr idx val mem) 17238 for { 17239 off := v.AuxInt 17240 sym := v.Aux 17241 _ = v.Args[2] 17242 v_0 := v.Args[0] 17243 if v_0.Op != OpARM64ADDshiftLL { 17244 break 17245 } 17246 if v_0.AuxInt != 2 { 17247 break 17248 } 17249 _ = v_0.Args[1] 17250 ptr := v_0.Args[0] 17251 idx := v_0.Args[1] 17252 val := v.Args[1] 17253 mem := v.Args[2] 17254 if !(off == 0 && sym == nil) { 17255 break 17256 } 17257 v.reset(OpARM64MOVWstoreidx4) 17258 v.AddArg(ptr) 17259 v.AddArg(idx) 17260 v.AddArg(val) 17261 v.AddArg(mem) 17262 return true 17263 } 17264 // match: (MOVWstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem) 17265 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17266 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 17267 for { 17268 off1 := v.AuxInt 17269 sym1 := v.Aux 17270 _ = v.Args[2] 17271 v_0 := v.Args[0] 17272 if v_0.Op != OpARM64MOVDaddr { 17273 break 17274 } 17275 off2 := v_0.AuxInt 17276 sym2 := v_0.Aux 17277 ptr := v_0.Args[0] 17278 val := v.Args[1] 17279 mem := v.Args[2] 17280 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17281 break 17282 } 17283 v.reset(OpARM64MOVWstore) 17284 v.AuxInt = off1 + off2 17285 v.Aux = mergeSym(sym1, sym2) 17286 v.AddArg(ptr) 17287 v.AddArg(val) 17288 v.AddArg(mem) 17289 return true 17290 } 17291 // match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem) 17292 // cond: 17293 // result: (MOVWstorezero [off] {sym} ptr mem) 17294 for { 17295 off := v.AuxInt 17296 sym := v.Aux 17297 _ = v.Args[2] 17298 ptr := v.Args[0] 17299 v_1 := v.Args[1] 17300 if v_1.Op != OpARM64MOVDconst { 17301 break 17302 } 17303 if v_1.AuxInt != 0 { 17304 break 17305 } 17306 mem := v.Args[2] 17307 v.reset(OpARM64MOVWstorezero) 17308 v.AuxInt = off 17309 v.Aux = sym 17310 v.AddArg(ptr) 17311 v.AddArg(mem) 17312 return true 17313 } 17314 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 17315 // cond: 17316 // result: (MOVWstore [off] {sym} ptr x mem) 17317 for { 17318 off := v.AuxInt 17319 sym := v.Aux 17320 _ = v.Args[2] 17321 ptr := v.Args[0] 17322 v_1 := v.Args[1] 17323 if v_1.Op != OpARM64MOVWreg { 17324 break 17325 } 17326 x := v_1.Args[0] 17327 mem := v.Args[2] 17328 v.reset(OpARM64MOVWstore) 17329 v.AuxInt = off 17330 v.Aux = sym 17331 v.AddArg(ptr) 17332 v.AddArg(x) 17333 v.AddArg(mem) 17334 return true 17335 } 17336 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 17337 // cond: 17338 // result: (MOVWstore [off] {sym} ptr x mem) 17339 for { 17340 off := v.AuxInt 17341 sym := v.Aux 17342 _ = v.Args[2] 17343 ptr := v.Args[0] 17344 v_1 := v.Args[1] 17345 if v_1.Op != OpARM64MOVWUreg { 17346 break 17347 } 17348 x := v_1.Args[0] 17349 mem := v.Args[2] 17350 v.reset(OpARM64MOVWstore) 17351 v.AuxInt = off 17352 v.Aux = sym 17353 v.AddArg(ptr) 17354 v.AddArg(x) 17355 v.AddArg(mem) 17356 return true 17357 } 17358 // match: (MOVWstore [i] {s} ptr0 (SRLconst [32] w) x:(MOVWstore [i-4] {s} ptr1 w mem)) 17359 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 17360 // result: (MOVDstore [i-4] {s} ptr0 w mem) 17361 for { 17362 i := v.AuxInt 17363 s := v.Aux 17364 _ = v.Args[2] 17365 ptr0 := v.Args[0] 17366 v_1 := v.Args[1] 17367 if v_1.Op != OpARM64SRLconst { 17368 break 17369 } 17370 if v_1.AuxInt != 32 { 17371 break 17372 } 17373 w := v_1.Args[0] 17374 x := v.Args[2] 17375 if x.Op != OpARM64MOVWstore { 17376 break 17377 } 17378 if x.AuxInt != i-4 { 17379 break 17380 } 17381 if x.Aux != s { 17382 break 17383 } 17384 _ = x.Args[2] 17385 ptr1 := x.Args[0] 17386 if w != x.Args[1] { 17387 break 17388 } 17389 mem := x.Args[2] 17390 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 17391 break 17392 } 17393 v.reset(OpARM64MOVDstore) 17394 v.AuxInt = i - 4 17395 v.Aux = s 17396 v.AddArg(ptr0) 17397 v.AddArg(w) 17398 v.AddArg(mem) 17399 return true 17400 } 17401 // match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx ptr1 idx1 w mem)) 17402 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 17403 // result: (MOVDstoreidx ptr1 idx1 w mem) 17404 for { 17405 if v.AuxInt != 4 { 17406 break 17407 } 17408 s := v.Aux 17409 _ = v.Args[2] 17410 v_0 := v.Args[0] 17411 if v_0.Op != OpARM64ADD { 17412 break 17413 } 17414 _ = v_0.Args[1] 17415 ptr0 := v_0.Args[0] 17416 idx0 := v_0.Args[1] 17417 v_1 := v.Args[1] 17418 if v_1.Op != OpARM64SRLconst { 17419 break 17420 } 17421 if v_1.AuxInt != 32 { 17422 break 17423 } 17424 w := v_1.Args[0] 17425 x := v.Args[2] 17426 if x.Op != OpARM64MOVWstoreidx { 17427 break 17428 } 17429 _ = x.Args[3] 17430 ptr1 := x.Args[0] 17431 idx1 := x.Args[1] 17432 if w != x.Args[2] { 17433 break 17434 } 17435 mem := x.Args[3] 17436 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 17437 break 17438 } 17439 v.reset(OpARM64MOVDstoreidx) 17440 v.AddArg(ptr1) 17441 v.AddArg(idx1) 17442 v.AddArg(w) 17443 v.AddArg(mem) 17444 return true 17445 } 17446 return false 17447 } 17448 func rewriteValueARM64_OpARM64MOVWstore_10(v *Value) bool { 17449 b := v.Block 17450 _ = b 17451 // match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [32] w) x:(MOVWstoreidx4 ptr1 idx1 w mem)) 17452 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 17453 // result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w mem) 17454 for { 17455 if v.AuxInt != 4 { 17456 break 17457 } 17458 s := v.Aux 17459 _ = v.Args[2] 17460 v_0 := v.Args[0] 17461 if v_0.Op != OpARM64ADDshiftLL { 17462 break 17463 } 17464 if v_0.AuxInt != 2 { 17465 break 17466 } 17467 _ = v_0.Args[1] 17468 ptr0 := v_0.Args[0] 17469 idx0 := v_0.Args[1] 17470 v_1 := v.Args[1] 17471 if v_1.Op != OpARM64SRLconst { 17472 break 17473 } 17474 if v_1.AuxInt != 32 { 17475 break 17476 } 17477 w := v_1.Args[0] 17478 x := v.Args[2] 17479 if x.Op != OpARM64MOVWstoreidx4 { 17480 break 17481 } 17482 _ = x.Args[3] 17483 ptr1 := x.Args[0] 17484 idx1 := x.Args[1] 17485 if w != x.Args[2] { 17486 break 17487 } 17488 mem := x.Args[3] 17489 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 17490 break 17491 } 17492 v.reset(OpARM64MOVDstoreidx) 17493 v.AddArg(ptr1) 17494 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 17495 v0.AuxInt = 2 17496 v0.AddArg(idx1) 17497 v.AddArg(v0) 17498 v.AddArg(w) 17499 v.AddArg(mem) 17500 return true 17501 } 17502 // match: (MOVWstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVWstore [i-4] {s} ptr1 w0:(SRLconst [j-32] w) mem)) 17503 // cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x) 17504 // result: (MOVDstore [i-4] {s} ptr0 w0 mem) 17505 for { 17506 i := v.AuxInt 17507 s := v.Aux 17508 _ = v.Args[2] 17509 ptr0 := v.Args[0] 17510 v_1 := v.Args[1] 17511 if v_1.Op != OpARM64SRLconst { 17512 break 17513 } 17514 j := v_1.AuxInt 17515 w := v_1.Args[0] 17516 x := v.Args[2] 17517 if x.Op != OpARM64MOVWstore { 17518 break 17519 } 17520 if x.AuxInt != i-4 { 17521 break 17522 } 17523 if x.Aux != s { 17524 break 17525 } 17526 _ = x.Args[2] 17527 ptr1 := x.Args[0] 17528 w0 := x.Args[1] 17529 if w0.Op != OpARM64SRLconst { 17530 break 17531 } 17532 if w0.AuxInt != j-32 { 17533 break 17534 } 17535 if w != w0.Args[0] { 17536 break 17537 } 17538 mem := x.Args[2] 17539 if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) { 17540 break 17541 } 17542 v.reset(OpARM64MOVDstore) 17543 v.AuxInt = i - 4 17544 v.Aux = s 17545 v.AddArg(ptr0) 17546 v.AddArg(w0) 17547 v.AddArg(mem) 17548 return true 17549 } 17550 // match: (MOVWstore [4] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx ptr1 idx1 w0:(SRLconst [j-32] w) mem)) 17551 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 17552 // result: (MOVDstoreidx ptr1 idx1 w0 mem) 17553 for { 17554 if v.AuxInt != 4 { 17555 break 17556 } 17557 s := v.Aux 17558 _ = v.Args[2] 17559 v_0 := v.Args[0] 17560 if v_0.Op != OpARM64ADD { 17561 break 17562 } 17563 _ = v_0.Args[1] 17564 ptr0 := v_0.Args[0] 17565 idx0 := v_0.Args[1] 17566 v_1 := v.Args[1] 17567 if v_1.Op != OpARM64SRLconst { 17568 break 17569 } 17570 j := v_1.AuxInt 17571 w := v_1.Args[0] 17572 x := v.Args[2] 17573 if x.Op != OpARM64MOVWstoreidx { 17574 break 17575 } 17576 _ = x.Args[3] 17577 ptr1 := x.Args[0] 17578 idx1 := x.Args[1] 17579 w0 := x.Args[2] 17580 if w0.Op != OpARM64SRLconst { 17581 break 17582 } 17583 if w0.AuxInt != j-32 { 17584 break 17585 } 17586 if w != w0.Args[0] { 17587 break 17588 } 17589 mem := x.Args[3] 17590 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 17591 break 17592 } 17593 v.reset(OpARM64MOVDstoreidx) 17594 v.AddArg(ptr1) 17595 v.AddArg(idx1) 17596 v.AddArg(w0) 17597 v.AddArg(mem) 17598 return true 17599 } 17600 // match: (MOVWstore [4] {s} (ADDshiftLL [2] ptr0 idx0) (SRLconst [j] w) x:(MOVWstoreidx4 ptr1 idx1 w0:(SRLconst [j-32] w) mem)) 17601 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 17602 // result: (MOVDstoreidx ptr1 (SLLconst <idx1.Type> [2] idx1) w0 mem) 17603 for { 17604 if v.AuxInt != 4 { 17605 break 17606 } 17607 s := v.Aux 17608 _ = v.Args[2] 17609 v_0 := v.Args[0] 17610 if v_0.Op != OpARM64ADDshiftLL { 17611 break 17612 } 17613 if v_0.AuxInt != 2 { 17614 break 17615 } 17616 _ = v_0.Args[1] 17617 ptr0 := v_0.Args[0] 17618 idx0 := v_0.Args[1] 17619 v_1 := v.Args[1] 17620 if v_1.Op != OpARM64SRLconst { 17621 break 17622 } 17623 j := v_1.AuxInt 17624 w := v_1.Args[0] 17625 x := v.Args[2] 17626 if x.Op != OpARM64MOVWstoreidx4 { 17627 break 17628 } 17629 _ = x.Args[3] 17630 ptr1 := x.Args[0] 17631 idx1 := x.Args[1] 17632 w0 := x.Args[2] 17633 if w0.Op != OpARM64SRLconst { 17634 break 17635 } 17636 if w0.AuxInt != j-32 { 17637 break 17638 } 17639 if w != w0.Args[0] { 17640 break 17641 } 17642 mem := x.Args[3] 17643 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 17644 break 17645 } 17646 v.reset(OpARM64MOVDstoreidx) 17647 v.AddArg(ptr1) 17648 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 17649 v0.AuxInt = 2 17650 v0.AddArg(idx1) 17651 v.AddArg(v0) 17652 v.AddArg(w0) 17653 v.AddArg(mem) 17654 return true 17655 } 17656 return false 17657 } 17658 func rewriteValueARM64_OpARM64MOVWstoreidx_0(v *Value) bool { 17659 // match: (MOVWstoreidx ptr (MOVDconst [c]) val mem) 17660 // cond: 17661 // result: (MOVWstore [c] ptr val mem) 17662 for { 17663 _ = v.Args[3] 17664 ptr := v.Args[0] 17665 v_1 := v.Args[1] 17666 if v_1.Op != OpARM64MOVDconst { 17667 break 17668 } 17669 c := v_1.AuxInt 17670 val := v.Args[2] 17671 mem := v.Args[3] 17672 v.reset(OpARM64MOVWstore) 17673 v.AuxInt = c 17674 v.AddArg(ptr) 17675 v.AddArg(val) 17676 v.AddArg(mem) 17677 return true 17678 } 17679 // match: (MOVWstoreidx (MOVDconst [c]) idx val mem) 17680 // cond: 17681 // result: (MOVWstore [c] idx val mem) 17682 for { 17683 _ = v.Args[3] 17684 v_0 := v.Args[0] 17685 if v_0.Op != OpARM64MOVDconst { 17686 break 17687 } 17688 c := v_0.AuxInt 17689 idx := v.Args[1] 17690 val := v.Args[2] 17691 mem := v.Args[3] 17692 v.reset(OpARM64MOVWstore) 17693 v.AuxInt = c 17694 v.AddArg(idx) 17695 v.AddArg(val) 17696 v.AddArg(mem) 17697 return true 17698 } 17699 // match: (MOVWstoreidx ptr (SLLconst [2] idx) val mem) 17700 // cond: 17701 // result: (MOVWstoreidx4 ptr idx val mem) 17702 for { 17703 _ = v.Args[3] 17704 ptr := v.Args[0] 17705 v_1 := v.Args[1] 17706 if v_1.Op != OpARM64SLLconst { 17707 break 17708 } 17709 if v_1.AuxInt != 2 { 17710 break 17711 } 17712 idx := v_1.Args[0] 17713 val := v.Args[2] 17714 mem := v.Args[3] 17715 v.reset(OpARM64MOVWstoreidx4) 17716 v.AddArg(ptr) 17717 v.AddArg(idx) 17718 v.AddArg(val) 17719 v.AddArg(mem) 17720 return true 17721 } 17722 // match: (MOVWstoreidx (SLLconst [2] idx) ptr val mem) 17723 // cond: 17724 // result: (MOVWstoreidx4 ptr idx val mem) 17725 for { 17726 _ = v.Args[3] 17727 v_0 := v.Args[0] 17728 if v_0.Op != OpARM64SLLconst { 17729 break 17730 } 17731 if v_0.AuxInt != 2 { 17732 break 17733 } 17734 idx := v_0.Args[0] 17735 ptr := v.Args[1] 17736 val := v.Args[2] 17737 mem := v.Args[3] 17738 v.reset(OpARM64MOVWstoreidx4) 17739 v.AddArg(ptr) 17740 v.AddArg(idx) 17741 v.AddArg(val) 17742 v.AddArg(mem) 17743 return true 17744 } 17745 // match: (MOVWstoreidx ptr idx (MOVDconst [0]) mem) 17746 // cond: 17747 // result: (MOVWstorezeroidx ptr idx mem) 17748 for { 17749 _ = v.Args[3] 17750 ptr := v.Args[0] 17751 idx := v.Args[1] 17752 v_2 := v.Args[2] 17753 if v_2.Op != OpARM64MOVDconst { 17754 break 17755 } 17756 if v_2.AuxInt != 0 { 17757 break 17758 } 17759 mem := v.Args[3] 17760 v.reset(OpARM64MOVWstorezeroidx) 17761 v.AddArg(ptr) 17762 v.AddArg(idx) 17763 v.AddArg(mem) 17764 return true 17765 } 17766 // match: (MOVWstoreidx ptr idx (MOVWreg x) mem) 17767 // cond: 17768 // result: (MOVWstoreidx ptr idx x mem) 17769 for { 17770 _ = v.Args[3] 17771 ptr := v.Args[0] 17772 idx := v.Args[1] 17773 v_2 := v.Args[2] 17774 if v_2.Op != OpARM64MOVWreg { 17775 break 17776 } 17777 x := v_2.Args[0] 17778 mem := v.Args[3] 17779 v.reset(OpARM64MOVWstoreidx) 17780 v.AddArg(ptr) 17781 v.AddArg(idx) 17782 v.AddArg(x) 17783 v.AddArg(mem) 17784 return true 17785 } 17786 // match: (MOVWstoreidx ptr idx (MOVWUreg x) mem) 17787 // cond: 17788 // result: (MOVWstoreidx ptr idx x mem) 17789 for { 17790 _ = v.Args[3] 17791 ptr := v.Args[0] 17792 idx := v.Args[1] 17793 v_2 := v.Args[2] 17794 if v_2.Op != OpARM64MOVWUreg { 17795 break 17796 } 17797 x := v_2.Args[0] 17798 mem := v.Args[3] 17799 v.reset(OpARM64MOVWstoreidx) 17800 v.AddArg(ptr) 17801 v.AddArg(idx) 17802 v.AddArg(x) 17803 v.AddArg(mem) 17804 return true 17805 } 17806 // match: (MOVWstoreidx ptr (ADDconst [4] idx) (SRLconst [32] w) x:(MOVWstoreidx ptr idx w mem)) 17807 // cond: x.Uses == 1 && clobber(x) 17808 // result: (MOVDstoreidx ptr idx w mem) 17809 for { 17810 _ = v.Args[3] 17811 ptr := v.Args[0] 17812 v_1 := v.Args[1] 17813 if v_1.Op != OpARM64ADDconst { 17814 break 17815 } 17816 if v_1.AuxInt != 4 { 17817 break 17818 } 17819 idx := v_1.Args[0] 17820 v_2 := v.Args[2] 17821 if v_2.Op != OpARM64SRLconst { 17822 break 17823 } 17824 if v_2.AuxInt != 32 { 17825 break 17826 } 17827 w := v_2.Args[0] 17828 x := v.Args[3] 17829 if x.Op != OpARM64MOVWstoreidx { 17830 break 17831 } 17832 _ = x.Args[3] 17833 if ptr != x.Args[0] { 17834 break 17835 } 17836 if idx != x.Args[1] { 17837 break 17838 } 17839 if w != x.Args[2] { 17840 break 17841 } 17842 mem := x.Args[3] 17843 if !(x.Uses == 1 && clobber(x)) { 17844 break 17845 } 17846 v.reset(OpARM64MOVDstoreidx) 17847 v.AddArg(ptr) 17848 v.AddArg(idx) 17849 v.AddArg(w) 17850 v.AddArg(mem) 17851 return true 17852 } 17853 return false 17854 } 17855 func rewriteValueARM64_OpARM64MOVWstoreidx4_0(v *Value) bool { 17856 // match: (MOVWstoreidx4 ptr (MOVDconst [c]) val mem) 17857 // cond: 17858 // result: (MOVWstore [c<<2] ptr val mem) 17859 for { 17860 _ = v.Args[3] 17861 ptr := v.Args[0] 17862 v_1 := v.Args[1] 17863 if v_1.Op != OpARM64MOVDconst { 17864 break 17865 } 17866 c := v_1.AuxInt 17867 val := v.Args[2] 17868 mem := v.Args[3] 17869 v.reset(OpARM64MOVWstore) 17870 v.AuxInt = c << 2 17871 v.AddArg(ptr) 17872 v.AddArg(val) 17873 v.AddArg(mem) 17874 return true 17875 } 17876 // match: (MOVWstoreidx4 ptr idx (MOVDconst [0]) mem) 17877 // cond: 17878 // result: (MOVWstorezeroidx4 ptr idx mem) 17879 for { 17880 _ = v.Args[3] 17881 ptr := v.Args[0] 17882 idx := v.Args[1] 17883 v_2 := v.Args[2] 17884 if v_2.Op != OpARM64MOVDconst { 17885 break 17886 } 17887 if v_2.AuxInt != 0 { 17888 break 17889 } 17890 mem := v.Args[3] 17891 v.reset(OpARM64MOVWstorezeroidx4) 17892 v.AddArg(ptr) 17893 v.AddArg(idx) 17894 v.AddArg(mem) 17895 return true 17896 } 17897 // match: (MOVWstoreidx4 ptr idx (MOVWreg x) mem) 17898 // cond: 17899 // result: (MOVWstoreidx4 ptr idx x mem) 17900 for { 17901 _ = v.Args[3] 17902 ptr := v.Args[0] 17903 idx := v.Args[1] 17904 v_2 := v.Args[2] 17905 if v_2.Op != OpARM64MOVWreg { 17906 break 17907 } 17908 x := v_2.Args[0] 17909 mem := v.Args[3] 17910 v.reset(OpARM64MOVWstoreidx4) 17911 v.AddArg(ptr) 17912 v.AddArg(idx) 17913 v.AddArg(x) 17914 v.AddArg(mem) 17915 return true 17916 } 17917 // match: (MOVWstoreidx4 ptr idx (MOVWUreg x) mem) 17918 // cond: 17919 // result: (MOVWstoreidx4 ptr idx x mem) 17920 for { 17921 _ = v.Args[3] 17922 ptr := v.Args[0] 17923 idx := v.Args[1] 17924 v_2 := v.Args[2] 17925 if v_2.Op != OpARM64MOVWUreg { 17926 break 17927 } 17928 x := v_2.Args[0] 17929 mem := v.Args[3] 17930 v.reset(OpARM64MOVWstoreidx4) 17931 v.AddArg(ptr) 17932 v.AddArg(idx) 17933 v.AddArg(x) 17934 v.AddArg(mem) 17935 return true 17936 } 17937 return false 17938 } 17939 func rewriteValueARM64_OpARM64MOVWstorezero_0(v *Value) bool { 17940 b := v.Block 17941 _ = b 17942 config := b.Func.Config 17943 _ = config 17944 // match: (MOVWstorezero [off1] {sym} (ADDconst [off2] ptr) mem) 17945 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17946 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 17947 for { 17948 off1 := v.AuxInt 17949 sym := v.Aux 17950 _ = v.Args[1] 17951 v_0 := v.Args[0] 17952 if v_0.Op != OpARM64ADDconst { 17953 break 17954 } 17955 off2 := v_0.AuxInt 17956 ptr := v_0.Args[0] 17957 mem := v.Args[1] 17958 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17959 break 17960 } 17961 v.reset(OpARM64MOVWstorezero) 17962 v.AuxInt = off1 + off2 17963 v.Aux = sym 17964 v.AddArg(ptr) 17965 v.AddArg(mem) 17966 return true 17967 } 17968 // match: (MOVWstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem) 17969 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 17970 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 17971 for { 17972 off1 := v.AuxInt 17973 sym1 := v.Aux 17974 _ = v.Args[1] 17975 v_0 := v.Args[0] 17976 if v_0.Op != OpARM64MOVDaddr { 17977 break 17978 } 17979 off2 := v_0.AuxInt 17980 sym2 := v_0.Aux 17981 ptr := v_0.Args[0] 17982 mem := v.Args[1] 17983 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 17984 break 17985 } 17986 v.reset(OpARM64MOVWstorezero) 17987 v.AuxInt = off1 + off2 17988 v.Aux = mergeSym(sym1, sym2) 17989 v.AddArg(ptr) 17990 v.AddArg(mem) 17991 return true 17992 } 17993 // match: (MOVWstorezero [off] {sym} (ADD ptr idx) mem) 17994 // cond: off == 0 && sym == nil 17995 // result: (MOVWstorezeroidx ptr idx mem) 17996 for { 17997 off := v.AuxInt 17998 sym := v.Aux 17999 _ = v.Args[1] 18000 v_0 := v.Args[0] 18001 if v_0.Op != OpARM64ADD { 18002 break 18003 } 18004 _ = v_0.Args[1] 18005 ptr := v_0.Args[0] 18006 idx := v_0.Args[1] 18007 mem := v.Args[1] 18008 if !(off == 0 && sym == nil) { 18009 break 18010 } 18011 v.reset(OpARM64MOVWstorezeroidx) 18012 v.AddArg(ptr) 18013 v.AddArg(idx) 18014 v.AddArg(mem) 18015 return true 18016 } 18017 // match: (MOVWstorezero [off] {sym} (ADDshiftLL [2] ptr idx) mem) 18018 // cond: off == 0 && sym == nil 18019 // result: (MOVWstorezeroidx4 ptr idx mem) 18020 for { 18021 off := v.AuxInt 18022 sym := v.Aux 18023 _ = v.Args[1] 18024 v_0 := v.Args[0] 18025 if v_0.Op != OpARM64ADDshiftLL { 18026 break 18027 } 18028 if v_0.AuxInt != 2 { 18029 break 18030 } 18031 _ = v_0.Args[1] 18032 ptr := v_0.Args[0] 18033 idx := v_0.Args[1] 18034 mem := v.Args[1] 18035 if !(off == 0 && sym == nil) { 18036 break 18037 } 18038 v.reset(OpARM64MOVWstorezeroidx4) 18039 v.AddArg(ptr) 18040 v.AddArg(idx) 18041 v.AddArg(mem) 18042 return true 18043 } 18044 // match: (MOVWstorezero [i] {s} ptr0 x:(MOVWstorezero [j] {s} ptr1 mem)) 18045 // cond: x.Uses == 1 && areAdjacentOffsets(i,j,4) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x) 18046 // result: (MOVDstorezero [min(i,j)] {s} ptr0 mem) 18047 for { 18048 i := v.AuxInt 18049 s := v.Aux 18050 _ = v.Args[1] 18051 ptr0 := v.Args[0] 18052 x := v.Args[1] 18053 if x.Op != OpARM64MOVWstorezero { 18054 break 18055 } 18056 j := x.AuxInt 18057 if x.Aux != s { 18058 break 18059 } 18060 _ = x.Args[1] 18061 ptr1 := x.Args[0] 18062 mem := x.Args[1] 18063 if !(x.Uses == 1 && areAdjacentOffsets(i, j, 4) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) { 18064 break 18065 } 18066 v.reset(OpARM64MOVDstorezero) 18067 v.AuxInt = min(i, j) 18068 v.Aux = s 18069 v.AddArg(ptr0) 18070 v.AddArg(mem) 18071 return true 18072 } 18073 // match: (MOVWstorezero [4] {s} (ADD ptr0 idx0) x:(MOVWstorezeroidx ptr1 idx1 mem)) 18074 // cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x) 18075 // result: (MOVDstorezeroidx ptr1 idx1 mem) 18076 for { 18077 if v.AuxInt != 4 { 18078 break 18079 } 18080 s := v.Aux 18081 _ = v.Args[1] 18082 v_0 := v.Args[0] 18083 if v_0.Op != OpARM64ADD { 18084 break 18085 } 18086 _ = v_0.Args[1] 18087 ptr0 := v_0.Args[0] 18088 idx0 := v_0.Args[1] 18089 x := v.Args[1] 18090 if x.Op != OpARM64MOVWstorezeroidx { 18091 break 18092 } 18093 _ = x.Args[2] 18094 ptr1 := x.Args[0] 18095 idx1 := x.Args[1] 18096 mem := x.Args[2] 18097 if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) { 18098 break 18099 } 18100 v.reset(OpARM64MOVDstorezeroidx) 18101 v.AddArg(ptr1) 18102 v.AddArg(idx1) 18103 v.AddArg(mem) 18104 return true 18105 } 18106 // match: (MOVWstorezero [4] {s} (ADDshiftLL [2] ptr0 idx0) x:(MOVWstorezeroidx4 ptr1 idx1 mem)) 18107 // cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x) 18108 // result: (MOVDstorezeroidx ptr1 (SLLconst <idx1.Type> [2] idx1) mem) 18109 for { 18110 if v.AuxInt != 4 { 18111 break 18112 } 18113 s := v.Aux 18114 _ = v.Args[1] 18115 v_0 := v.Args[0] 18116 if v_0.Op != OpARM64ADDshiftLL { 18117 break 18118 } 18119 if v_0.AuxInt != 2 { 18120 break 18121 } 18122 _ = v_0.Args[1] 18123 ptr0 := v_0.Args[0] 18124 idx0 := v_0.Args[1] 18125 x := v.Args[1] 18126 if x.Op != OpARM64MOVWstorezeroidx4 { 18127 break 18128 } 18129 _ = x.Args[2] 18130 ptr1 := x.Args[0] 18131 idx1 := x.Args[1] 18132 mem := x.Args[2] 18133 if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) { 18134 break 18135 } 18136 v.reset(OpARM64MOVDstorezeroidx) 18137 v.AddArg(ptr1) 18138 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, idx1.Type) 18139 v0.AuxInt = 2 18140 v0.AddArg(idx1) 18141 v.AddArg(v0) 18142 v.AddArg(mem) 18143 return true 18144 } 18145 return false 18146 } 18147 func rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v *Value) bool { 18148 // match: (MOVWstorezeroidx ptr (MOVDconst [c]) mem) 18149 // cond: 18150 // result: (MOVWstorezero [c] ptr mem) 18151 for { 18152 _ = v.Args[2] 18153 ptr := v.Args[0] 18154 v_1 := v.Args[1] 18155 if v_1.Op != OpARM64MOVDconst { 18156 break 18157 } 18158 c := v_1.AuxInt 18159 mem := v.Args[2] 18160 v.reset(OpARM64MOVWstorezero) 18161 v.AuxInt = c 18162 v.AddArg(ptr) 18163 v.AddArg(mem) 18164 return true 18165 } 18166 // match: (MOVWstorezeroidx (MOVDconst [c]) idx mem) 18167 // cond: 18168 // result: (MOVWstorezero [c] idx mem) 18169 for { 18170 _ = v.Args[2] 18171 v_0 := v.Args[0] 18172 if v_0.Op != OpARM64MOVDconst { 18173 break 18174 } 18175 c := v_0.AuxInt 18176 idx := v.Args[1] 18177 mem := v.Args[2] 18178 v.reset(OpARM64MOVWstorezero) 18179 v.AuxInt = c 18180 v.AddArg(idx) 18181 v.AddArg(mem) 18182 return true 18183 } 18184 // match: (MOVWstorezeroidx ptr (SLLconst [2] idx) mem) 18185 // cond: 18186 // result: (MOVWstorezeroidx4 ptr idx mem) 18187 for { 18188 _ = v.Args[2] 18189 ptr := v.Args[0] 18190 v_1 := v.Args[1] 18191 if v_1.Op != OpARM64SLLconst { 18192 break 18193 } 18194 if v_1.AuxInt != 2 { 18195 break 18196 } 18197 idx := v_1.Args[0] 18198 mem := v.Args[2] 18199 v.reset(OpARM64MOVWstorezeroidx4) 18200 v.AddArg(ptr) 18201 v.AddArg(idx) 18202 v.AddArg(mem) 18203 return true 18204 } 18205 // match: (MOVWstorezeroidx (SLLconst [2] idx) ptr mem) 18206 // cond: 18207 // result: (MOVWstorezeroidx4 ptr idx mem) 18208 for { 18209 _ = v.Args[2] 18210 v_0 := v.Args[0] 18211 if v_0.Op != OpARM64SLLconst { 18212 break 18213 } 18214 if v_0.AuxInt != 2 { 18215 break 18216 } 18217 idx := v_0.Args[0] 18218 ptr := v.Args[1] 18219 mem := v.Args[2] 18220 v.reset(OpARM64MOVWstorezeroidx4) 18221 v.AddArg(ptr) 18222 v.AddArg(idx) 18223 v.AddArg(mem) 18224 return true 18225 } 18226 // match: (MOVWstorezeroidx ptr (ADDconst [4] idx) x:(MOVWstorezeroidx ptr idx mem)) 18227 // cond: x.Uses == 1 && clobber(x) 18228 // result: (MOVDstorezeroidx ptr idx mem) 18229 for { 18230 _ = v.Args[2] 18231 ptr := v.Args[0] 18232 v_1 := v.Args[1] 18233 if v_1.Op != OpARM64ADDconst { 18234 break 18235 } 18236 if v_1.AuxInt != 4 { 18237 break 18238 } 18239 idx := v_1.Args[0] 18240 x := v.Args[2] 18241 if x.Op != OpARM64MOVWstorezeroidx { 18242 break 18243 } 18244 _ = x.Args[2] 18245 if ptr != x.Args[0] { 18246 break 18247 } 18248 if idx != x.Args[1] { 18249 break 18250 } 18251 mem := x.Args[2] 18252 if !(x.Uses == 1 && clobber(x)) { 18253 break 18254 } 18255 v.reset(OpARM64MOVDstorezeroidx) 18256 v.AddArg(ptr) 18257 v.AddArg(idx) 18258 v.AddArg(mem) 18259 return true 18260 } 18261 return false 18262 } 18263 func rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v *Value) bool { 18264 // match: (MOVWstorezeroidx4 ptr (MOVDconst [c]) mem) 18265 // cond: 18266 // result: (MOVWstorezero [c<<2] ptr mem) 18267 for { 18268 _ = v.Args[2] 18269 ptr := v.Args[0] 18270 v_1 := v.Args[1] 18271 if v_1.Op != OpARM64MOVDconst { 18272 break 18273 } 18274 c := v_1.AuxInt 18275 mem := v.Args[2] 18276 v.reset(OpARM64MOVWstorezero) 18277 v.AuxInt = c << 2 18278 v.AddArg(ptr) 18279 v.AddArg(mem) 18280 return true 18281 } 18282 return false 18283 } 18284 func rewriteValueARM64_OpARM64MSUB_0(v *Value) bool { 18285 b := v.Block 18286 _ = b 18287 // match: (MSUB a x (MOVDconst [-1])) 18288 // cond: 18289 // result: (ADD a x) 18290 for { 18291 _ = v.Args[2] 18292 a := v.Args[0] 18293 x := v.Args[1] 18294 v_2 := v.Args[2] 18295 if v_2.Op != OpARM64MOVDconst { 18296 break 18297 } 18298 if v_2.AuxInt != -1 { 18299 break 18300 } 18301 v.reset(OpARM64ADD) 18302 v.AddArg(a) 18303 v.AddArg(x) 18304 return true 18305 } 18306 // match: (MSUB a _ (MOVDconst [0])) 18307 // cond: 18308 // result: a 18309 for { 18310 _ = v.Args[2] 18311 a := v.Args[0] 18312 v_2 := v.Args[2] 18313 if v_2.Op != OpARM64MOVDconst { 18314 break 18315 } 18316 if v_2.AuxInt != 0 { 18317 break 18318 } 18319 v.reset(OpCopy) 18320 v.Type = a.Type 18321 v.AddArg(a) 18322 return true 18323 } 18324 // match: (MSUB a x (MOVDconst [1])) 18325 // cond: 18326 // result: (SUB a x) 18327 for { 18328 _ = v.Args[2] 18329 a := v.Args[0] 18330 x := v.Args[1] 18331 v_2 := v.Args[2] 18332 if v_2.Op != OpARM64MOVDconst { 18333 break 18334 } 18335 if v_2.AuxInt != 1 { 18336 break 18337 } 18338 v.reset(OpARM64SUB) 18339 v.AddArg(a) 18340 v.AddArg(x) 18341 return true 18342 } 18343 // match: (MSUB a x (MOVDconst [c])) 18344 // cond: isPowerOfTwo(c) 18345 // result: (SUBshiftLL a x [log2(c)]) 18346 for { 18347 _ = v.Args[2] 18348 a := v.Args[0] 18349 x := v.Args[1] 18350 v_2 := v.Args[2] 18351 if v_2.Op != OpARM64MOVDconst { 18352 break 18353 } 18354 c := v_2.AuxInt 18355 if !(isPowerOfTwo(c)) { 18356 break 18357 } 18358 v.reset(OpARM64SUBshiftLL) 18359 v.AuxInt = log2(c) 18360 v.AddArg(a) 18361 v.AddArg(x) 18362 return true 18363 } 18364 // match: (MSUB a x (MOVDconst [c])) 18365 // cond: isPowerOfTwo(c-1) && c>=3 18366 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 18367 for { 18368 _ = v.Args[2] 18369 a := v.Args[0] 18370 x := v.Args[1] 18371 v_2 := v.Args[2] 18372 if v_2.Op != OpARM64MOVDconst { 18373 break 18374 } 18375 c := v_2.AuxInt 18376 if !(isPowerOfTwo(c-1) && c >= 3) { 18377 break 18378 } 18379 v.reset(OpARM64SUB) 18380 v.AddArg(a) 18381 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18382 v0.AuxInt = log2(c - 1) 18383 v0.AddArg(x) 18384 v0.AddArg(x) 18385 v.AddArg(v0) 18386 return true 18387 } 18388 // match: (MSUB a x (MOVDconst [c])) 18389 // cond: isPowerOfTwo(c+1) && c>=7 18390 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 18391 for { 18392 _ = v.Args[2] 18393 a := v.Args[0] 18394 x := v.Args[1] 18395 v_2 := v.Args[2] 18396 if v_2.Op != OpARM64MOVDconst { 18397 break 18398 } 18399 c := v_2.AuxInt 18400 if !(isPowerOfTwo(c+1) && c >= 7) { 18401 break 18402 } 18403 v.reset(OpARM64ADD) 18404 v.AddArg(a) 18405 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18406 v0.AuxInt = log2(c + 1) 18407 v0.AddArg(x) 18408 v0.AddArg(x) 18409 v.AddArg(v0) 18410 return true 18411 } 18412 // match: (MSUB a x (MOVDconst [c])) 18413 // cond: c%3 == 0 && isPowerOfTwo(c/3) 18414 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 18415 for { 18416 _ = v.Args[2] 18417 a := v.Args[0] 18418 x := v.Args[1] 18419 v_2 := v.Args[2] 18420 if v_2.Op != OpARM64MOVDconst { 18421 break 18422 } 18423 c := v_2.AuxInt 18424 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 18425 break 18426 } 18427 v.reset(OpARM64ADDshiftLL) 18428 v.AuxInt = log2(c / 3) 18429 v.AddArg(a) 18430 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18431 v0.AuxInt = 2 18432 v0.AddArg(x) 18433 v0.AddArg(x) 18434 v.AddArg(v0) 18435 return true 18436 } 18437 // match: (MSUB a x (MOVDconst [c])) 18438 // cond: c%5 == 0 && isPowerOfTwo(c/5) 18439 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 18440 for { 18441 _ = v.Args[2] 18442 a := v.Args[0] 18443 x := v.Args[1] 18444 v_2 := v.Args[2] 18445 if v_2.Op != OpARM64MOVDconst { 18446 break 18447 } 18448 c := v_2.AuxInt 18449 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 18450 break 18451 } 18452 v.reset(OpARM64SUBshiftLL) 18453 v.AuxInt = log2(c / 5) 18454 v.AddArg(a) 18455 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18456 v0.AuxInt = 2 18457 v0.AddArg(x) 18458 v0.AddArg(x) 18459 v.AddArg(v0) 18460 return true 18461 } 18462 // match: (MSUB a x (MOVDconst [c])) 18463 // cond: c%7 == 0 && isPowerOfTwo(c/7) 18464 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 18465 for { 18466 _ = v.Args[2] 18467 a := v.Args[0] 18468 x := v.Args[1] 18469 v_2 := v.Args[2] 18470 if v_2.Op != OpARM64MOVDconst { 18471 break 18472 } 18473 c := v_2.AuxInt 18474 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 18475 break 18476 } 18477 v.reset(OpARM64ADDshiftLL) 18478 v.AuxInt = log2(c / 7) 18479 v.AddArg(a) 18480 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18481 v0.AuxInt = 3 18482 v0.AddArg(x) 18483 v0.AddArg(x) 18484 v.AddArg(v0) 18485 return true 18486 } 18487 // match: (MSUB a x (MOVDconst [c])) 18488 // cond: c%9 == 0 && isPowerOfTwo(c/9) 18489 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 18490 for { 18491 _ = v.Args[2] 18492 a := v.Args[0] 18493 x := v.Args[1] 18494 v_2 := v.Args[2] 18495 if v_2.Op != OpARM64MOVDconst { 18496 break 18497 } 18498 c := v_2.AuxInt 18499 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 18500 break 18501 } 18502 v.reset(OpARM64SUBshiftLL) 18503 v.AuxInt = log2(c / 9) 18504 v.AddArg(a) 18505 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18506 v0.AuxInt = 3 18507 v0.AddArg(x) 18508 v0.AddArg(x) 18509 v.AddArg(v0) 18510 return true 18511 } 18512 return false 18513 } 18514 func rewriteValueARM64_OpARM64MSUB_10(v *Value) bool { 18515 b := v.Block 18516 _ = b 18517 // match: (MSUB a (MOVDconst [-1]) x) 18518 // cond: 18519 // result: (ADD a x) 18520 for { 18521 _ = v.Args[2] 18522 a := v.Args[0] 18523 v_1 := v.Args[1] 18524 if v_1.Op != OpARM64MOVDconst { 18525 break 18526 } 18527 if v_1.AuxInt != -1 { 18528 break 18529 } 18530 x := v.Args[2] 18531 v.reset(OpARM64ADD) 18532 v.AddArg(a) 18533 v.AddArg(x) 18534 return true 18535 } 18536 // match: (MSUB a (MOVDconst [0]) _) 18537 // cond: 18538 // result: a 18539 for { 18540 _ = v.Args[2] 18541 a := v.Args[0] 18542 v_1 := v.Args[1] 18543 if v_1.Op != OpARM64MOVDconst { 18544 break 18545 } 18546 if v_1.AuxInt != 0 { 18547 break 18548 } 18549 v.reset(OpCopy) 18550 v.Type = a.Type 18551 v.AddArg(a) 18552 return true 18553 } 18554 // match: (MSUB a (MOVDconst [1]) x) 18555 // cond: 18556 // result: (SUB a x) 18557 for { 18558 _ = v.Args[2] 18559 a := v.Args[0] 18560 v_1 := v.Args[1] 18561 if v_1.Op != OpARM64MOVDconst { 18562 break 18563 } 18564 if v_1.AuxInt != 1 { 18565 break 18566 } 18567 x := v.Args[2] 18568 v.reset(OpARM64SUB) 18569 v.AddArg(a) 18570 v.AddArg(x) 18571 return true 18572 } 18573 // match: (MSUB a (MOVDconst [c]) x) 18574 // cond: isPowerOfTwo(c) 18575 // result: (SUBshiftLL a x [log2(c)]) 18576 for { 18577 _ = v.Args[2] 18578 a := v.Args[0] 18579 v_1 := v.Args[1] 18580 if v_1.Op != OpARM64MOVDconst { 18581 break 18582 } 18583 c := v_1.AuxInt 18584 x := v.Args[2] 18585 if !(isPowerOfTwo(c)) { 18586 break 18587 } 18588 v.reset(OpARM64SUBshiftLL) 18589 v.AuxInt = log2(c) 18590 v.AddArg(a) 18591 v.AddArg(x) 18592 return true 18593 } 18594 // match: (MSUB a (MOVDconst [c]) x) 18595 // cond: isPowerOfTwo(c-1) && c>=3 18596 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 18597 for { 18598 _ = v.Args[2] 18599 a := v.Args[0] 18600 v_1 := v.Args[1] 18601 if v_1.Op != OpARM64MOVDconst { 18602 break 18603 } 18604 c := v_1.AuxInt 18605 x := v.Args[2] 18606 if !(isPowerOfTwo(c-1) && c >= 3) { 18607 break 18608 } 18609 v.reset(OpARM64SUB) 18610 v.AddArg(a) 18611 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18612 v0.AuxInt = log2(c - 1) 18613 v0.AddArg(x) 18614 v0.AddArg(x) 18615 v.AddArg(v0) 18616 return true 18617 } 18618 // match: (MSUB a (MOVDconst [c]) x) 18619 // cond: isPowerOfTwo(c+1) && c>=7 18620 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 18621 for { 18622 _ = v.Args[2] 18623 a := v.Args[0] 18624 v_1 := v.Args[1] 18625 if v_1.Op != OpARM64MOVDconst { 18626 break 18627 } 18628 c := v_1.AuxInt 18629 x := v.Args[2] 18630 if !(isPowerOfTwo(c+1) && c >= 7) { 18631 break 18632 } 18633 v.reset(OpARM64ADD) 18634 v.AddArg(a) 18635 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18636 v0.AuxInt = log2(c + 1) 18637 v0.AddArg(x) 18638 v0.AddArg(x) 18639 v.AddArg(v0) 18640 return true 18641 } 18642 // match: (MSUB a (MOVDconst [c]) x) 18643 // cond: c%3 == 0 && isPowerOfTwo(c/3) 18644 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 18645 for { 18646 _ = v.Args[2] 18647 a := v.Args[0] 18648 v_1 := v.Args[1] 18649 if v_1.Op != OpARM64MOVDconst { 18650 break 18651 } 18652 c := v_1.AuxInt 18653 x := v.Args[2] 18654 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 18655 break 18656 } 18657 v.reset(OpARM64ADDshiftLL) 18658 v.AuxInt = log2(c / 3) 18659 v.AddArg(a) 18660 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18661 v0.AuxInt = 2 18662 v0.AddArg(x) 18663 v0.AddArg(x) 18664 v.AddArg(v0) 18665 return true 18666 } 18667 // match: (MSUB a (MOVDconst [c]) x) 18668 // cond: c%5 == 0 && isPowerOfTwo(c/5) 18669 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 18670 for { 18671 _ = v.Args[2] 18672 a := v.Args[0] 18673 v_1 := v.Args[1] 18674 if v_1.Op != OpARM64MOVDconst { 18675 break 18676 } 18677 c := v_1.AuxInt 18678 x := v.Args[2] 18679 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 18680 break 18681 } 18682 v.reset(OpARM64SUBshiftLL) 18683 v.AuxInt = log2(c / 5) 18684 v.AddArg(a) 18685 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18686 v0.AuxInt = 2 18687 v0.AddArg(x) 18688 v0.AddArg(x) 18689 v.AddArg(v0) 18690 return true 18691 } 18692 // match: (MSUB a (MOVDconst [c]) x) 18693 // cond: c%7 == 0 && isPowerOfTwo(c/7) 18694 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 18695 for { 18696 _ = v.Args[2] 18697 a := v.Args[0] 18698 v_1 := v.Args[1] 18699 if v_1.Op != OpARM64MOVDconst { 18700 break 18701 } 18702 c := v_1.AuxInt 18703 x := v.Args[2] 18704 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 18705 break 18706 } 18707 v.reset(OpARM64ADDshiftLL) 18708 v.AuxInt = log2(c / 7) 18709 v.AddArg(a) 18710 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18711 v0.AuxInt = 3 18712 v0.AddArg(x) 18713 v0.AddArg(x) 18714 v.AddArg(v0) 18715 return true 18716 } 18717 // match: (MSUB a (MOVDconst [c]) x) 18718 // cond: c%9 == 0 && isPowerOfTwo(c/9) 18719 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 18720 for { 18721 _ = v.Args[2] 18722 a := v.Args[0] 18723 v_1 := v.Args[1] 18724 if v_1.Op != OpARM64MOVDconst { 18725 break 18726 } 18727 c := v_1.AuxInt 18728 x := v.Args[2] 18729 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 18730 break 18731 } 18732 v.reset(OpARM64SUBshiftLL) 18733 v.AuxInt = log2(c / 9) 18734 v.AddArg(a) 18735 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18736 v0.AuxInt = 3 18737 v0.AddArg(x) 18738 v0.AddArg(x) 18739 v.AddArg(v0) 18740 return true 18741 } 18742 return false 18743 } 18744 func rewriteValueARM64_OpARM64MSUB_20(v *Value) bool { 18745 b := v.Block 18746 _ = b 18747 // match: (MSUB (MOVDconst [c]) x y) 18748 // cond: 18749 // result: (ADDconst [c] (MNEG <x.Type> x y)) 18750 for { 18751 _ = v.Args[2] 18752 v_0 := v.Args[0] 18753 if v_0.Op != OpARM64MOVDconst { 18754 break 18755 } 18756 c := v_0.AuxInt 18757 x := v.Args[1] 18758 y := v.Args[2] 18759 v.reset(OpARM64ADDconst) 18760 v.AuxInt = c 18761 v0 := b.NewValue0(v.Pos, OpARM64MNEG, x.Type) 18762 v0.AddArg(x) 18763 v0.AddArg(y) 18764 v.AddArg(v0) 18765 return true 18766 } 18767 // match: (MSUB a (MOVDconst [c]) (MOVDconst [d])) 18768 // cond: 18769 // result: (SUBconst [c*d] a) 18770 for { 18771 _ = v.Args[2] 18772 a := v.Args[0] 18773 v_1 := v.Args[1] 18774 if v_1.Op != OpARM64MOVDconst { 18775 break 18776 } 18777 c := v_1.AuxInt 18778 v_2 := v.Args[2] 18779 if v_2.Op != OpARM64MOVDconst { 18780 break 18781 } 18782 d := v_2.AuxInt 18783 v.reset(OpARM64SUBconst) 18784 v.AuxInt = c * d 18785 v.AddArg(a) 18786 return true 18787 } 18788 return false 18789 } 18790 func rewriteValueARM64_OpARM64MSUBW_0(v *Value) bool { 18791 b := v.Block 18792 _ = b 18793 // match: (MSUBW a x (MOVDconst [c])) 18794 // cond: int32(c)==-1 18795 // result: (ADD a x) 18796 for { 18797 _ = v.Args[2] 18798 a := v.Args[0] 18799 x := v.Args[1] 18800 v_2 := v.Args[2] 18801 if v_2.Op != OpARM64MOVDconst { 18802 break 18803 } 18804 c := v_2.AuxInt 18805 if !(int32(c) == -1) { 18806 break 18807 } 18808 v.reset(OpARM64ADD) 18809 v.AddArg(a) 18810 v.AddArg(x) 18811 return true 18812 } 18813 // match: (MSUBW a _ (MOVDconst [c])) 18814 // cond: int32(c)==0 18815 // result: a 18816 for { 18817 _ = v.Args[2] 18818 a := v.Args[0] 18819 v_2 := v.Args[2] 18820 if v_2.Op != OpARM64MOVDconst { 18821 break 18822 } 18823 c := v_2.AuxInt 18824 if !(int32(c) == 0) { 18825 break 18826 } 18827 v.reset(OpCopy) 18828 v.Type = a.Type 18829 v.AddArg(a) 18830 return true 18831 } 18832 // match: (MSUBW a x (MOVDconst [c])) 18833 // cond: int32(c)==1 18834 // result: (SUB a x) 18835 for { 18836 _ = v.Args[2] 18837 a := v.Args[0] 18838 x := v.Args[1] 18839 v_2 := v.Args[2] 18840 if v_2.Op != OpARM64MOVDconst { 18841 break 18842 } 18843 c := v_2.AuxInt 18844 if !(int32(c) == 1) { 18845 break 18846 } 18847 v.reset(OpARM64SUB) 18848 v.AddArg(a) 18849 v.AddArg(x) 18850 return true 18851 } 18852 // match: (MSUBW a x (MOVDconst [c])) 18853 // cond: isPowerOfTwo(c) 18854 // result: (SUBshiftLL a x [log2(c)]) 18855 for { 18856 _ = v.Args[2] 18857 a := v.Args[0] 18858 x := v.Args[1] 18859 v_2 := v.Args[2] 18860 if v_2.Op != OpARM64MOVDconst { 18861 break 18862 } 18863 c := v_2.AuxInt 18864 if !(isPowerOfTwo(c)) { 18865 break 18866 } 18867 v.reset(OpARM64SUBshiftLL) 18868 v.AuxInt = log2(c) 18869 v.AddArg(a) 18870 v.AddArg(x) 18871 return true 18872 } 18873 // match: (MSUBW a x (MOVDconst [c])) 18874 // cond: isPowerOfTwo(c-1) && int32(c)>=3 18875 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 18876 for { 18877 _ = v.Args[2] 18878 a := v.Args[0] 18879 x := v.Args[1] 18880 v_2 := v.Args[2] 18881 if v_2.Op != OpARM64MOVDconst { 18882 break 18883 } 18884 c := v_2.AuxInt 18885 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 18886 break 18887 } 18888 v.reset(OpARM64SUB) 18889 v.AddArg(a) 18890 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18891 v0.AuxInt = log2(c - 1) 18892 v0.AddArg(x) 18893 v0.AddArg(x) 18894 v.AddArg(v0) 18895 return true 18896 } 18897 // match: (MSUBW a x (MOVDconst [c])) 18898 // cond: isPowerOfTwo(c+1) && int32(c)>=7 18899 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 18900 for { 18901 _ = v.Args[2] 18902 a := v.Args[0] 18903 x := v.Args[1] 18904 v_2 := v.Args[2] 18905 if v_2.Op != OpARM64MOVDconst { 18906 break 18907 } 18908 c := v_2.AuxInt 18909 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 18910 break 18911 } 18912 v.reset(OpARM64ADD) 18913 v.AddArg(a) 18914 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18915 v0.AuxInt = log2(c + 1) 18916 v0.AddArg(x) 18917 v0.AddArg(x) 18918 v.AddArg(v0) 18919 return true 18920 } 18921 // match: (MSUBW a x (MOVDconst [c])) 18922 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 18923 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 18924 for { 18925 _ = v.Args[2] 18926 a := v.Args[0] 18927 x := v.Args[1] 18928 v_2 := v.Args[2] 18929 if v_2.Op != OpARM64MOVDconst { 18930 break 18931 } 18932 c := v_2.AuxInt 18933 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 18934 break 18935 } 18936 v.reset(OpARM64ADDshiftLL) 18937 v.AuxInt = log2(c / 3) 18938 v.AddArg(a) 18939 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18940 v0.AuxInt = 2 18941 v0.AddArg(x) 18942 v0.AddArg(x) 18943 v.AddArg(v0) 18944 return true 18945 } 18946 // match: (MSUBW a x (MOVDconst [c])) 18947 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 18948 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 18949 for { 18950 _ = v.Args[2] 18951 a := v.Args[0] 18952 x := v.Args[1] 18953 v_2 := v.Args[2] 18954 if v_2.Op != OpARM64MOVDconst { 18955 break 18956 } 18957 c := v_2.AuxInt 18958 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 18959 break 18960 } 18961 v.reset(OpARM64SUBshiftLL) 18962 v.AuxInt = log2(c / 5) 18963 v.AddArg(a) 18964 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 18965 v0.AuxInt = 2 18966 v0.AddArg(x) 18967 v0.AddArg(x) 18968 v.AddArg(v0) 18969 return true 18970 } 18971 // match: (MSUBW a x (MOVDconst [c])) 18972 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 18973 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 18974 for { 18975 _ = v.Args[2] 18976 a := v.Args[0] 18977 x := v.Args[1] 18978 v_2 := v.Args[2] 18979 if v_2.Op != OpARM64MOVDconst { 18980 break 18981 } 18982 c := v_2.AuxInt 18983 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 18984 break 18985 } 18986 v.reset(OpARM64ADDshiftLL) 18987 v.AuxInt = log2(c / 7) 18988 v.AddArg(a) 18989 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 18990 v0.AuxInt = 3 18991 v0.AddArg(x) 18992 v0.AddArg(x) 18993 v.AddArg(v0) 18994 return true 18995 } 18996 // match: (MSUBW a x (MOVDconst [c])) 18997 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 18998 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 18999 for { 19000 _ = v.Args[2] 19001 a := v.Args[0] 19002 x := v.Args[1] 19003 v_2 := v.Args[2] 19004 if v_2.Op != OpARM64MOVDconst { 19005 break 19006 } 19007 c := v_2.AuxInt 19008 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 19009 break 19010 } 19011 v.reset(OpARM64SUBshiftLL) 19012 v.AuxInt = log2(c / 9) 19013 v.AddArg(a) 19014 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19015 v0.AuxInt = 3 19016 v0.AddArg(x) 19017 v0.AddArg(x) 19018 v.AddArg(v0) 19019 return true 19020 } 19021 return false 19022 } 19023 func rewriteValueARM64_OpARM64MSUBW_10(v *Value) bool { 19024 b := v.Block 19025 _ = b 19026 // match: (MSUBW a (MOVDconst [c]) x) 19027 // cond: int32(c)==-1 19028 // result: (ADD a x) 19029 for { 19030 _ = v.Args[2] 19031 a := v.Args[0] 19032 v_1 := v.Args[1] 19033 if v_1.Op != OpARM64MOVDconst { 19034 break 19035 } 19036 c := v_1.AuxInt 19037 x := v.Args[2] 19038 if !(int32(c) == -1) { 19039 break 19040 } 19041 v.reset(OpARM64ADD) 19042 v.AddArg(a) 19043 v.AddArg(x) 19044 return true 19045 } 19046 // match: (MSUBW a (MOVDconst [c]) _) 19047 // cond: int32(c)==0 19048 // result: a 19049 for { 19050 _ = v.Args[2] 19051 a := v.Args[0] 19052 v_1 := v.Args[1] 19053 if v_1.Op != OpARM64MOVDconst { 19054 break 19055 } 19056 c := v_1.AuxInt 19057 if !(int32(c) == 0) { 19058 break 19059 } 19060 v.reset(OpCopy) 19061 v.Type = a.Type 19062 v.AddArg(a) 19063 return true 19064 } 19065 // match: (MSUBW a (MOVDconst [c]) x) 19066 // cond: int32(c)==1 19067 // result: (SUB a x) 19068 for { 19069 _ = v.Args[2] 19070 a := v.Args[0] 19071 v_1 := v.Args[1] 19072 if v_1.Op != OpARM64MOVDconst { 19073 break 19074 } 19075 c := v_1.AuxInt 19076 x := v.Args[2] 19077 if !(int32(c) == 1) { 19078 break 19079 } 19080 v.reset(OpARM64SUB) 19081 v.AddArg(a) 19082 v.AddArg(x) 19083 return true 19084 } 19085 // match: (MSUBW a (MOVDconst [c]) x) 19086 // cond: isPowerOfTwo(c) 19087 // result: (SUBshiftLL a x [log2(c)]) 19088 for { 19089 _ = v.Args[2] 19090 a := v.Args[0] 19091 v_1 := v.Args[1] 19092 if v_1.Op != OpARM64MOVDconst { 19093 break 19094 } 19095 c := v_1.AuxInt 19096 x := v.Args[2] 19097 if !(isPowerOfTwo(c)) { 19098 break 19099 } 19100 v.reset(OpARM64SUBshiftLL) 19101 v.AuxInt = log2(c) 19102 v.AddArg(a) 19103 v.AddArg(x) 19104 return true 19105 } 19106 // match: (MSUBW a (MOVDconst [c]) x) 19107 // cond: isPowerOfTwo(c-1) && int32(c)>=3 19108 // result: (SUB a (ADDshiftLL <x.Type> x x [log2(c-1)])) 19109 for { 19110 _ = v.Args[2] 19111 a := v.Args[0] 19112 v_1 := v.Args[1] 19113 if v_1.Op != OpARM64MOVDconst { 19114 break 19115 } 19116 c := v_1.AuxInt 19117 x := v.Args[2] 19118 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 19119 break 19120 } 19121 v.reset(OpARM64SUB) 19122 v.AddArg(a) 19123 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19124 v0.AuxInt = log2(c - 1) 19125 v0.AddArg(x) 19126 v0.AddArg(x) 19127 v.AddArg(v0) 19128 return true 19129 } 19130 // match: (MSUBW a (MOVDconst [c]) x) 19131 // cond: isPowerOfTwo(c+1) && int32(c)>=7 19132 // result: (ADD a (SUBshiftLL <x.Type> x x [log2(c+1)])) 19133 for { 19134 _ = v.Args[2] 19135 a := v.Args[0] 19136 v_1 := v.Args[1] 19137 if v_1.Op != OpARM64MOVDconst { 19138 break 19139 } 19140 c := v_1.AuxInt 19141 x := v.Args[2] 19142 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 19143 break 19144 } 19145 v.reset(OpARM64ADD) 19146 v.AddArg(a) 19147 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 19148 v0.AuxInt = log2(c + 1) 19149 v0.AddArg(x) 19150 v0.AddArg(x) 19151 v.AddArg(v0) 19152 return true 19153 } 19154 // match: (MSUBW a (MOVDconst [c]) x) 19155 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 19156 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)]) 19157 for { 19158 _ = v.Args[2] 19159 a := v.Args[0] 19160 v_1 := v.Args[1] 19161 if v_1.Op != OpARM64MOVDconst { 19162 break 19163 } 19164 c := v_1.AuxInt 19165 x := v.Args[2] 19166 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 19167 break 19168 } 19169 v.reset(OpARM64ADDshiftLL) 19170 v.AuxInt = log2(c / 3) 19171 v.AddArg(a) 19172 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 19173 v0.AuxInt = 2 19174 v0.AddArg(x) 19175 v0.AddArg(x) 19176 v.AddArg(v0) 19177 return true 19178 } 19179 // match: (MSUBW a (MOVDconst [c]) x) 19180 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 19181 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)]) 19182 for { 19183 _ = v.Args[2] 19184 a := v.Args[0] 19185 v_1 := v.Args[1] 19186 if v_1.Op != OpARM64MOVDconst { 19187 break 19188 } 19189 c := v_1.AuxInt 19190 x := v.Args[2] 19191 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 19192 break 19193 } 19194 v.reset(OpARM64SUBshiftLL) 19195 v.AuxInt = log2(c / 5) 19196 v.AddArg(a) 19197 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19198 v0.AuxInt = 2 19199 v0.AddArg(x) 19200 v0.AddArg(x) 19201 v.AddArg(v0) 19202 return true 19203 } 19204 // match: (MSUBW a (MOVDconst [c]) x) 19205 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 19206 // result: (ADDshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)]) 19207 for { 19208 _ = v.Args[2] 19209 a := v.Args[0] 19210 v_1 := v.Args[1] 19211 if v_1.Op != OpARM64MOVDconst { 19212 break 19213 } 19214 c := v_1.AuxInt 19215 x := v.Args[2] 19216 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 19217 break 19218 } 19219 v.reset(OpARM64ADDshiftLL) 19220 v.AuxInt = log2(c / 7) 19221 v.AddArg(a) 19222 v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type) 19223 v0.AuxInt = 3 19224 v0.AddArg(x) 19225 v0.AddArg(x) 19226 v.AddArg(v0) 19227 return true 19228 } 19229 // match: (MSUBW a (MOVDconst [c]) x) 19230 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 19231 // result: (SUBshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)]) 19232 for { 19233 _ = v.Args[2] 19234 a := v.Args[0] 19235 v_1 := v.Args[1] 19236 if v_1.Op != OpARM64MOVDconst { 19237 break 19238 } 19239 c := v_1.AuxInt 19240 x := v.Args[2] 19241 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 19242 break 19243 } 19244 v.reset(OpARM64SUBshiftLL) 19245 v.AuxInt = log2(c / 9) 19246 v.AddArg(a) 19247 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19248 v0.AuxInt = 3 19249 v0.AddArg(x) 19250 v0.AddArg(x) 19251 v.AddArg(v0) 19252 return true 19253 } 19254 return false 19255 } 19256 func rewriteValueARM64_OpARM64MSUBW_20(v *Value) bool { 19257 b := v.Block 19258 _ = b 19259 // match: (MSUBW (MOVDconst [c]) x y) 19260 // cond: 19261 // result: (ADDconst [c] (MNEGW <x.Type> x y)) 19262 for { 19263 _ = v.Args[2] 19264 v_0 := v.Args[0] 19265 if v_0.Op != OpARM64MOVDconst { 19266 break 19267 } 19268 c := v_0.AuxInt 19269 x := v.Args[1] 19270 y := v.Args[2] 19271 v.reset(OpARM64ADDconst) 19272 v.AuxInt = c 19273 v0 := b.NewValue0(v.Pos, OpARM64MNEGW, x.Type) 19274 v0.AddArg(x) 19275 v0.AddArg(y) 19276 v.AddArg(v0) 19277 return true 19278 } 19279 // match: (MSUBW a (MOVDconst [c]) (MOVDconst [d])) 19280 // cond: 19281 // result: (SUBconst [int64(int32(c)*int32(d))] a) 19282 for { 19283 _ = v.Args[2] 19284 a := v.Args[0] 19285 v_1 := v.Args[1] 19286 if v_1.Op != OpARM64MOVDconst { 19287 break 19288 } 19289 c := v_1.AuxInt 19290 v_2 := v.Args[2] 19291 if v_2.Op != OpARM64MOVDconst { 19292 break 19293 } 19294 d := v_2.AuxInt 19295 v.reset(OpARM64SUBconst) 19296 v.AuxInt = int64(int32(c) * int32(d)) 19297 v.AddArg(a) 19298 return true 19299 } 19300 return false 19301 } 19302 func rewriteValueARM64_OpARM64MUL_0(v *Value) bool { 19303 // match: (MUL (NEG x) y) 19304 // cond: 19305 // result: (MNEG x y) 19306 for { 19307 _ = v.Args[1] 19308 v_0 := v.Args[0] 19309 if v_0.Op != OpARM64NEG { 19310 break 19311 } 19312 x := v_0.Args[0] 19313 y := v.Args[1] 19314 v.reset(OpARM64MNEG) 19315 v.AddArg(x) 19316 v.AddArg(y) 19317 return true 19318 } 19319 // match: (MUL y (NEG x)) 19320 // cond: 19321 // result: (MNEG x y) 19322 for { 19323 _ = v.Args[1] 19324 y := v.Args[0] 19325 v_1 := v.Args[1] 19326 if v_1.Op != OpARM64NEG { 19327 break 19328 } 19329 x := v_1.Args[0] 19330 v.reset(OpARM64MNEG) 19331 v.AddArg(x) 19332 v.AddArg(y) 19333 return true 19334 } 19335 // match: (MUL x (MOVDconst [-1])) 19336 // cond: 19337 // result: (NEG x) 19338 for { 19339 _ = v.Args[1] 19340 x := v.Args[0] 19341 v_1 := v.Args[1] 19342 if v_1.Op != OpARM64MOVDconst { 19343 break 19344 } 19345 if v_1.AuxInt != -1 { 19346 break 19347 } 19348 v.reset(OpARM64NEG) 19349 v.AddArg(x) 19350 return true 19351 } 19352 // match: (MUL (MOVDconst [-1]) x) 19353 // cond: 19354 // result: (NEG x) 19355 for { 19356 _ = v.Args[1] 19357 v_0 := v.Args[0] 19358 if v_0.Op != OpARM64MOVDconst { 19359 break 19360 } 19361 if v_0.AuxInt != -1 { 19362 break 19363 } 19364 x := v.Args[1] 19365 v.reset(OpARM64NEG) 19366 v.AddArg(x) 19367 return true 19368 } 19369 // match: (MUL _ (MOVDconst [0])) 19370 // cond: 19371 // result: (MOVDconst [0]) 19372 for { 19373 _ = v.Args[1] 19374 v_1 := v.Args[1] 19375 if v_1.Op != OpARM64MOVDconst { 19376 break 19377 } 19378 if v_1.AuxInt != 0 { 19379 break 19380 } 19381 v.reset(OpARM64MOVDconst) 19382 v.AuxInt = 0 19383 return true 19384 } 19385 // match: (MUL (MOVDconst [0]) _) 19386 // cond: 19387 // result: (MOVDconst [0]) 19388 for { 19389 _ = v.Args[1] 19390 v_0 := v.Args[0] 19391 if v_0.Op != OpARM64MOVDconst { 19392 break 19393 } 19394 if v_0.AuxInt != 0 { 19395 break 19396 } 19397 v.reset(OpARM64MOVDconst) 19398 v.AuxInt = 0 19399 return true 19400 } 19401 // match: (MUL x (MOVDconst [1])) 19402 // cond: 19403 // result: x 19404 for { 19405 _ = v.Args[1] 19406 x := v.Args[0] 19407 v_1 := v.Args[1] 19408 if v_1.Op != OpARM64MOVDconst { 19409 break 19410 } 19411 if v_1.AuxInt != 1 { 19412 break 19413 } 19414 v.reset(OpCopy) 19415 v.Type = x.Type 19416 v.AddArg(x) 19417 return true 19418 } 19419 // match: (MUL (MOVDconst [1]) x) 19420 // cond: 19421 // result: x 19422 for { 19423 _ = v.Args[1] 19424 v_0 := v.Args[0] 19425 if v_0.Op != OpARM64MOVDconst { 19426 break 19427 } 19428 if v_0.AuxInt != 1 { 19429 break 19430 } 19431 x := v.Args[1] 19432 v.reset(OpCopy) 19433 v.Type = x.Type 19434 v.AddArg(x) 19435 return true 19436 } 19437 // match: (MUL x (MOVDconst [c])) 19438 // cond: isPowerOfTwo(c) 19439 // result: (SLLconst [log2(c)] x) 19440 for { 19441 _ = v.Args[1] 19442 x := v.Args[0] 19443 v_1 := v.Args[1] 19444 if v_1.Op != OpARM64MOVDconst { 19445 break 19446 } 19447 c := v_1.AuxInt 19448 if !(isPowerOfTwo(c)) { 19449 break 19450 } 19451 v.reset(OpARM64SLLconst) 19452 v.AuxInt = log2(c) 19453 v.AddArg(x) 19454 return true 19455 } 19456 // match: (MUL (MOVDconst [c]) x) 19457 // cond: isPowerOfTwo(c) 19458 // result: (SLLconst [log2(c)] x) 19459 for { 19460 _ = v.Args[1] 19461 v_0 := v.Args[0] 19462 if v_0.Op != OpARM64MOVDconst { 19463 break 19464 } 19465 c := v_0.AuxInt 19466 x := v.Args[1] 19467 if !(isPowerOfTwo(c)) { 19468 break 19469 } 19470 v.reset(OpARM64SLLconst) 19471 v.AuxInt = log2(c) 19472 v.AddArg(x) 19473 return true 19474 } 19475 return false 19476 } 19477 func rewriteValueARM64_OpARM64MUL_10(v *Value) bool { 19478 b := v.Block 19479 _ = b 19480 // match: (MUL x (MOVDconst [c])) 19481 // cond: isPowerOfTwo(c-1) && c >= 3 19482 // result: (ADDshiftLL x x [log2(c-1)]) 19483 for { 19484 _ = v.Args[1] 19485 x := v.Args[0] 19486 v_1 := v.Args[1] 19487 if v_1.Op != OpARM64MOVDconst { 19488 break 19489 } 19490 c := v_1.AuxInt 19491 if !(isPowerOfTwo(c-1) && c >= 3) { 19492 break 19493 } 19494 v.reset(OpARM64ADDshiftLL) 19495 v.AuxInt = log2(c - 1) 19496 v.AddArg(x) 19497 v.AddArg(x) 19498 return true 19499 } 19500 // match: (MUL (MOVDconst [c]) x) 19501 // cond: isPowerOfTwo(c-1) && c >= 3 19502 // result: (ADDshiftLL x x [log2(c-1)]) 19503 for { 19504 _ = v.Args[1] 19505 v_0 := v.Args[0] 19506 if v_0.Op != OpARM64MOVDconst { 19507 break 19508 } 19509 c := v_0.AuxInt 19510 x := v.Args[1] 19511 if !(isPowerOfTwo(c-1) && c >= 3) { 19512 break 19513 } 19514 v.reset(OpARM64ADDshiftLL) 19515 v.AuxInt = log2(c - 1) 19516 v.AddArg(x) 19517 v.AddArg(x) 19518 return true 19519 } 19520 // match: (MUL x (MOVDconst [c])) 19521 // cond: isPowerOfTwo(c+1) && c >= 7 19522 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 19523 for { 19524 _ = v.Args[1] 19525 x := v.Args[0] 19526 v_1 := v.Args[1] 19527 if v_1.Op != OpARM64MOVDconst { 19528 break 19529 } 19530 c := v_1.AuxInt 19531 if !(isPowerOfTwo(c+1) && c >= 7) { 19532 break 19533 } 19534 v.reset(OpARM64ADDshiftLL) 19535 v.AuxInt = log2(c + 1) 19536 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19537 v0.AddArg(x) 19538 v.AddArg(v0) 19539 v.AddArg(x) 19540 return true 19541 } 19542 // match: (MUL (MOVDconst [c]) x) 19543 // cond: isPowerOfTwo(c+1) && c >= 7 19544 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 19545 for { 19546 _ = v.Args[1] 19547 v_0 := v.Args[0] 19548 if v_0.Op != OpARM64MOVDconst { 19549 break 19550 } 19551 c := v_0.AuxInt 19552 x := v.Args[1] 19553 if !(isPowerOfTwo(c+1) && c >= 7) { 19554 break 19555 } 19556 v.reset(OpARM64ADDshiftLL) 19557 v.AuxInt = log2(c + 1) 19558 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19559 v0.AddArg(x) 19560 v.AddArg(v0) 19561 v.AddArg(x) 19562 return true 19563 } 19564 // match: (MUL x (MOVDconst [c])) 19565 // cond: c%3 == 0 && isPowerOfTwo(c/3) 19566 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 19567 for { 19568 _ = v.Args[1] 19569 x := v.Args[0] 19570 v_1 := v.Args[1] 19571 if v_1.Op != OpARM64MOVDconst { 19572 break 19573 } 19574 c := v_1.AuxInt 19575 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 19576 break 19577 } 19578 v.reset(OpARM64SLLconst) 19579 v.AuxInt = log2(c / 3) 19580 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19581 v0.AuxInt = 1 19582 v0.AddArg(x) 19583 v0.AddArg(x) 19584 v.AddArg(v0) 19585 return true 19586 } 19587 // match: (MUL (MOVDconst [c]) x) 19588 // cond: c%3 == 0 && isPowerOfTwo(c/3) 19589 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 19590 for { 19591 _ = v.Args[1] 19592 v_0 := v.Args[0] 19593 if v_0.Op != OpARM64MOVDconst { 19594 break 19595 } 19596 c := v_0.AuxInt 19597 x := v.Args[1] 19598 if !(c%3 == 0 && isPowerOfTwo(c/3)) { 19599 break 19600 } 19601 v.reset(OpARM64SLLconst) 19602 v.AuxInt = log2(c / 3) 19603 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19604 v0.AuxInt = 1 19605 v0.AddArg(x) 19606 v0.AddArg(x) 19607 v.AddArg(v0) 19608 return true 19609 } 19610 // match: (MUL x (MOVDconst [c])) 19611 // cond: c%5 == 0 && isPowerOfTwo(c/5) 19612 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 19613 for { 19614 _ = v.Args[1] 19615 x := v.Args[0] 19616 v_1 := v.Args[1] 19617 if v_1.Op != OpARM64MOVDconst { 19618 break 19619 } 19620 c := v_1.AuxInt 19621 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 19622 break 19623 } 19624 v.reset(OpARM64SLLconst) 19625 v.AuxInt = log2(c / 5) 19626 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19627 v0.AuxInt = 2 19628 v0.AddArg(x) 19629 v0.AddArg(x) 19630 v.AddArg(v0) 19631 return true 19632 } 19633 // match: (MUL (MOVDconst [c]) x) 19634 // cond: c%5 == 0 && isPowerOfTwo(c/5) 19635 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 19636 for { 19637 _ = v.Args[1] 19638 v_0 := v.Args[0] 19639 if v_0.Op != OpARM64MOVDconst { 19640 break 19641 } 19642 c := v_0.AuxInt 19643 x := v.Args[1] 19644 if !(c%5 == 0 && isPowerOfTwo(c/5)) { 19645 break 19646 } 19647 v.reset(OpARM64SLLconst) 19648 v.AuxInt = log2(c / 5) 19649 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19650 v0.AuxInt = 2 19651 v0.AddArg(x) 19652 v0.AddArg(x) 19653 v.AddArg(v0) 19654 return true 19655 } 19656 // match: (MUL x (MOVDconst [c])) 19657 // cond: c%7 == 0 && isPowerOfTwo(c/7) 19658 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 19659 for { 19660 _ = v.Args[1] 19661 x := v.Args[0] 19662 v_1 := v.Args[1] 19663 if v_1.Op != OpARM64MOVDconst { 19664 break 19665 } 19666 c := v_1.AuxInt 19667 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 19668 break 19669 } 19670 v.reset(OpARM64SLLconst) 19671 v.AuxInt = log2(c / 7) 19672 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19673 v0.AuxInt = 3 19674 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19675 v1.AddArg(x) 19676 v0.AddArg(v1) 19677 v0.AddArg(x) 19678 v.AddArg(v0) 19679 return true 19680 } 19681 // match: (MUL (MOVDconst [c]) x) 19682 // cond: c%7 == 0 && isPowerOfTwo(c/7) 19683 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 19684 for { 19685 _ = v.Args[1] 19686 v_0 := v.Args[0] 19687 if v_0.Op != OpARM64MOVDconst { 19688 break 19689 } 19690 c := v_0.AuxInt 19691 x := v.Args[1] 19692 if !(c%7 == 0 && isPowerOfTwo(c/7)) { 19693 break 19694 } 19695 v.reset(OpARM64SLLconst) 19696 v.AuxInt = log2(c / 7) 19697 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19698 v0.AuxInt = 3 19699 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 19700 v1.AddArg(x) 19701 v0.AddArg(v1) 19702 v0.AddArg(x) 19703 v.AddArg(v0) 19704 return true 19705 } 19706 return false 19707 } 19708 func rewriteValueARM64_OpARM64MUL_20(v *Value) bool { 19709 b := v.Block 19710 _ = b 19711 // match: (MUL x (MOVDconst [c])) 19712 // cond: c%9 == 0 && isPowerOfTwo(c/9) 19713 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 19714 for { 19715 _ = v.Args[1] 19716 x := v.Args[0] 19717 v_1 := v.Args[1] 19718 if v_1.Op != OpARM64MOVDconst { 19719 break 19720 } 19721 c := v_1.AuxInt 19722 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 19723 break 19724 } 19725 v.reset(OpARM64SLLconst) 19726 v.AuxInt = log2(c / 9) 19727 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19728 v0.AuxInt = 3 19729 v0.AddArg(x) 19730 v0.AddArg(x) 19731 v.AddArg(v0) 19732 return true 19733 } 19734 // match: (MUL (MOVDconst [c]) x) 19735 // cond: c%9 == 0 && isPowerOfTwo(c/9) 19736 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 19737 for { 19738 _ = v.Args[1] 19739 v_0 := v.Args[0] 19740 if v_0.Op != OpARM64MOVDconst { 19741 break 19742 } 19743 c := v_0.AuxInt 19744 x := v.Args[1] 19745 if !(c%9 == 0 && isPowerOfTwo(c/9)) { 19746 break 19747 } 19748 v.reset(OpARM64SLLconst) 19749 v.AuxInt = log2(c / 9) 19750 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 19751 v0.AuxInt = 3 19752 v0.AddArg(x) 19753 v0.AddArg(x) 19754 v.AddArg(v0) 19755 return true 19756 } 19757 // match: (MUL (MOVDconst [c]) (MOVDconst [d])) 19758 // cond: 19759 // result: (MOVDconst [c*d]) 19760 for { 19761 _ = v.Args[1] 19762 v_0 := v.Args[0] 19763 if v_0.Op != OpARM64MOVDconst { 19764 break 19765 } 19766 c := v_0.AuxInt 19767 v_1 := v.Args[1] 19768 if v_1.Op != OpARM64MOVDconst { 19769 break 19770 } 19771 d := v_1.AuxInt 19772 v.reset(OpARM64MOVDconst) 19773 v.AuxInt = c * d 19774 return true 19775 } 19776 // match: (MUL (MOVDconst [d]) (MOVDconst [c])) 19777 // cond: 19778 // result: (MOVDconst [c*d]) 19779 for { 19780 _ = v.Args[1] 19781 v_0 := v.Args[0] 19782 if v_0.Op != OpARM64MOVDconst { 19783 break 19784 } 19785 d := v_0.AuxInt 19786 v_1 := v.Args[1] 19787 if v_1.Op != OpARM64MOVDconst { 19788 break 19789 } 19790 c := v_1.AuxInt 19791 v.reset(OpARM64MOVDconst) 19792 v.AuxInt = c * d 19793 return true 19794 } 19795 return false 19796 } 19797 func rewriteValueARM64_OpARM64MULW_0(v *Value) bool { 19798 // match: (MULW (NEG x) y) 19799 // cond: 19800 // result: (MNEGW x y) 19801 for { 19802 _ = v.Args[1] 19803 v_0 := v.Args[0] 19804 if v_0.Op != OpARM64NEG { 19805 break 19806 } 19807 x := v_0.Args[0] 19808 y := v.Args[1] 19809 v.reset(OpARM64MNEGW) 19810 v.AddArg(x) 19811 v.AddArg(y) 19812 return true 19813 } 19814 // match: (MULW y (NEG x)) 19815 // cond: 19816 // result: (MNEGW x y) 19817 for { 19818 _ = v.Args[1] 19819 y := v.Args[0] 19820 v_1 := v.Args[1] 19821 if v_1.Op != OpARM64NEG { 19822 break 19823 } 19824 x := v_1.Args[0] 19825 v.reset(OpARM64MNEGW) 19826 v.AddArg(x) 19827 v.AddArg(y) 19828 return true 19829 } 19830 // match: (MULW x (MOVDconst [c])) 19831 // cond: int32(c)==-1 19832 // result: (NEG x) 19833 for { 19834 _ = v.Args[1] 19835 x := v.Args[0] 19836 v_1 := v.Args[1] 19837 if v_1.Op != OpARM64MOVDconst { 19838 break 19839 } 19840 c := v_1.AuxInt 19841 if !(int32(c) == -1) { 19842 break 19843 } 19844 v.reset(OpARM64NEG) 19845 v.AddArg(x) 19846 return true 19847 } 19848 // match: (MULW (MOVDconst [c]) x) 19849 // cond: int32(c)==-1 19850 // result: (NEG x) 19851 for { 19852 _ = v.Args[1] 19853 v_0 := v.Args[0] 19854 if v_0.Op != OpARM64MOVDconst { 19855 break 19856 } 19857 c := v_0.AuxInt 19858 x := v.Args[1] 19859 if !(int32(c) == -1) { 19860 break 19861 } 19862 v.reset(OpARM64NEG) 19863 v.AddArg(x) 19864 return true 19865 } 19866 // match: (MULW _ (MOVDconst [c])) 19867 // cond: int32(c)==0 19868 // result: (MOVDconst [0]) 19869 for { 19870 _ = v.Args[1] 19871 v_1 := v.Args[1] 19872 if v_1.Op != OpARM64MOVDconst { 19873 break 19874 } 19875 c := v_1.AuxInt 19876 if !(int32(c) == 0) { 19877 break 19878 } 19879 v.reset(OpARM64MOVDconst) 19880 v.AuxInt = 0 19881 return true 19882 } 19883 // match: (MULW (MOVDconst [c]) _) 19884 // cond: int32(c)==0 19885 // result: (MOVDconst [0]) 19886 for { 19887 _ = v.Args[1] 19888 v_0 := v.Args[0] 19889 if v_0.Op != OpARM64MOVDconst { 19890 break 19891 } 19892 c := v_0.AuxInt 19893 if !(int32(c) == 0) { 19894 break 19895 } 19896 v.reset(OpARM64MOVDconst) 19897 v.AuxInt = 0 19898 return true 19899 } 19900 // match: (MULW x (MOVDconst [c])) 19901 // cond: int32(c)==1 19902 // result: x 19903 for { 19904 _ = v.Args[1] 19905 x := v.Args[0] 19906 v_1 := v.Args[1] 19907 if v_1.Op != OpARM64MOVDconst { 19908 break 19909 } 19910 c := v_1.AuxInt 19911 if !(int32(c) == 1) { 19912 break 19913 } 19914 v.reset(OpCopy) 19915 v.Type = x.Type 19916 v.AddArg(x) 19917 return true 19918 } 19919 // match: (MULW (MOVDconst [c]) x) 19920 // cond: int32(c)==1 19921 // result: x 19922 for { 19923 _ = v.Args[1] 19924 v_0 := v.Args[0] 19925 if v_0.Op != OpARM64MOVDconst { 19926 break 19927 } 19928 c := v_0.AuxInt 19929 x := v.Args[1] 19930 if !(int32(c) == 1) { 19931 break 19932 } 19933 v.reset(OpCopy) 19934 v.Type = x.Type 19935 v.AddArg(x) 19936 return true 19937 } 19938 // match: (MULW x (MOVDconst [c])) 19939 // cond: isPowerOfTwo(c) 19940 // result: (SLLconst [log2(c)] x) 19941 for { 19942 _ = v.Args[1] 19943 x := v.Args[0] 19944 v_1 := v.Args[1] 19945 if v_1.Op != OpARM64MOVDconst { 19946 break 19947 } 19948 c := v_1.AuxInt 19949 if !(isPowerOfTwo(c)) { 19950 break 19951 } 19952 v.reset(OpARM64SLLconst) 19953 v.AuxInt = log2(c) 19954 v.AddArg(x) 19955 return true 19956 } 19957 // match: (MULW (MOVDconst [c]) x) 19958 // cond: isPowerOfTwo(c) 19959 // result: (SLLconst [log2(c)] x) 19960 for { 19961 _ = v.Args[1] 19962 v_0 := v.Args[0] 19963 if v_0.Op != OpARM64MOVDconst { 19964 break 19965 } 19966 c := v_0.AuxInt 19967 x := v.Args[1] 19968 if !(isPowerOfTwo(c)) { 19969 break 19970 } 19971 v.reset(OpARM64SLLconst) 19972 v.AuxInt = log2(c) 19973 v.AddArg(x) 19974 return true 19975 } 19976 return false 19977 } 19978 func rewriteValueARM64_OpARM64MULW_10(v *Value) bool { 19979 b := v.Block 19980 _ = b 19981 // match: (MULW x (MOVDconst [c])) 19982 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 19983 // result: (ADDshiftLL x x [log2(c-1)]) 19984 for { 19985 _ = v.Args[1] 19986 x := v.Args[0] 19987 v_1 := v.Args[1] 19988 if v_1.Op != OpARM64MOVDconst { 19989 break 19990 } 19991 c := v_1.AuxInt 19992 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 19993 break 19994 } 19995 v.reset(OpARM64ADDshiftLL) 19996 v.AuxInt = log2(c - 1) 19997 v.AddArg(x) 19998 v.AddArg(x) 19999 return true 20000 } 20001 // match: (MULW (MOVDconst [c]) x) 20002 // cond: isPowerOfTwo(c-1) && int32(c) >= 3 20003 // result: (ADDshiftLL x x [log2(c-1)]) 20004 for { 20005 _ = v.Args[1] 20006 v_0 := v.Args[0] 20007 if v_0.Op != OpARM64MOVDconst { 20008 break 20009 } 20010 c := v_0.AuxInt 20011 x := v.Args[1] 20012 if !(isPowerOfTwo(c-1) && int32(c) >= 3) { 20013 break 20014 } 20015 v.reset(OpARM64ADDshiftLL) 20016 v.AuxInt = log2(c - 1) 20017 v.AddArg(x) 20018 v.AddArg(x) 20019 return true 20020 } 20021 // match: (MULW x (MOVDconst [c])) 20022 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 20023 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 20024 for { 20025 _ = v.Args[1] 20026 x := v.Args[0] 20027 v_1 := v.Args[1] 20028 if v_1.Op != OpARM64MOVDconst { 20029 break 20030 } 20031 c := v_1.AuxInt 20032 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 20033 break 20034 } 20035 v.reset(OpARM64ADDshiftLL) 20036 v.AuxInt = log2(c + 1) 20037 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20038 v0.AddArg(x) 20039 v.AddArg(v0) 20040 v.AddArg(x) 20041 return true 20042 } 20043 // match: (MULW (MOVDconst [c]) x) 20044 // cond: isPowerOfTwo(c+1) && int32(c) >= 7 20045 // result: (ADDshiftLL (NEG <x.Type> x) x [log2(c+1)]) 20046 for { 20047 _ = v.Args[1] 20048 v_0 := v.Args[0] 20049 if v_0.Op != OpARM64MOVDconst { 20050 break 20051 } 20052 c := v_0.AuxInt 20053 x := v.Args[1] 20054 if !(isPowerOfTwo(c+1) && int32(c) >= 7) { 20055 break 20056 } 20057 v.reset(OpARM64ADDshiftLL) 20058 v.AuxInt = log2(c + 1) 20059 v0 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20060 v0.AddArg(x) 20061 v.AddArg(v0) 20062 v.AddArg(x) 20063 return true 20064 } 20065 // match: (MULW x (MOVDconst [c])) 20066 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 20067 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 20068 for { 20069 _ = v.Args[1] 20070 x := v.Args[0] 20071 v_1 := v.Args[1] 20072 if v_1.Op != OpARM64MOVDconst { 20073 break 20074 } 20075 c := v_1.AuxInt 20076 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 20077 break 20078 } 20079 v.reset(OpARM64SLLconst) 20080 v.AuxInt = log2(c / 3) 20081 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20082 v0.AuxInt = 1 20083 v0.AddArg(x) 20084 v0.AddArg(x) 20085 v.AddArg(v0) 20086 return true 20087 } 20088 // match: (MULW (MOVDconst [c]) x) 20089 // cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c) 20090 // result: (SLLconst [log2(c/3)] (ADDshiftLL <x.Type> x x [1])) 20091 for { 20092 _ = v.Args[1] 20093 v_0 := v.Args[0] 20094 if v_0.Op != OpARM64MOVDconst { 20095 break 20096 } 20097 c := v_0.AuxInt 20098 x := v.Args[1] 20099 if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) { 20100 break 20101 } 20102 v.reset(OpARM64SLLconst) 20103 v.AuxInt = log2(c / 3) 20104 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20105 v0.AuxInt = 1 20106 v0.AddArg(x) 20107 v0.AddArg(x) 20108 v.AddArg(v0) 20109 return true 20110 } 20111 // match: (MULW x (MOVDconst [c])) 20112 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 20113 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 20114 for { 20115 _ = v.Args[1] 20116 x := v.Args[0] 20117 v_1 := v.Args[1] 20118 if v_1.Op != OpARM64MOVDconst { 20119 break 20120 } 20121 c := v_1.AuxInt 20122 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 20123 break 20124 } 20125 v.reset(OpARM64SLLconst) 20126 v.AuxInt = log2(c / 5) 20127 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20128 v0.AuxInt = 2 20129 v0.AddArg(x) 20130 v0.AddArg(x) 20131 v.AddArg(v0) 20132 return true 20133 } 20134 // match: (MULW (MOVDconst [c]) x) 20135 // cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c) 20136 // result: (SLLconst [log2(c/5)] (ADDshiftLL <x.Type> x x [2])) 20137 for { 20138 _ = v.Args[1] 20139 v_0 := v.Args[0] 20140 if v_0.Op != OpARM64MOVDconst { 20141 break 20142 } 20143 c := v_0.AuxInt 20144 x := v.Args[1] 20145 if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) { 20146 break 20147 } 20148 v.reset(OpARM64SLLconst) 20149 v.AuxInt = log2(c / 5) 20150 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20151 v0.AuxInt = 2 20152 v0.AddArg(x) 20153 v0.AddArg(x) 20154 v.AddArg(v0) 20155 return true 20156 } 20157 // match: (MULW x (MOVDconst [c])) 20158 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 20159 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 20160 for { 20161 _ = v.Args[1] 20162 x := v.Args[0] 20163 v_1 := v.Args[1] 20164 if v_1.Op != OpARM64MOVDconst { 20165 break 20166 } 20167 c := v_1.AuxInt 20168 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 20169 break 20170 } 20171 v.reset(OpARM64SLLconst) 20172 v.AuxInt = log2(c / 7) 20173 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20174 v0.AuxInt = 3 20175 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20176 v1.AddArg(x) 20177 v0.AddArg(v1) 20178 v0.AddArg(x) 20179 v.AddArg(v0) 20180 return true 20181 } 20182 // match: (MULW (MOVDconst [c]) x) 20183 // cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c) 20184 // result: (SLLconst [log2(c/7)] (ADDshiftLL <x.Type> (NEG <x.Type> x) x [3])) 20185 for { 20186 _ = v.Args[1] 20187 v_0 := v.Args[0] 20188 if v_0.Op != OpARM64MOVDconst { 20189 break 20190 } 20191 c := v_0.AuxInt 20192 x := v.Args[1] 20193 if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) { 20194 break 20195 } 20196 v.reset(OpARM64SLLconst) 20197 v.AuxInt = log2(c / 7) 20198 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20199 v0.AuxInt = 3 20200 v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type) 20201 v1.AddArg(x) 20202 v0.AddArg(v1) 20203 v0.AddArg(x) 20204 v.AddArg(v0) 20205 return true 20206 } 20207 return false 20208 } 20209 func rewriteValueARM64_OpARM64MULW_20(v *Value) bool { 20210 b := v.Block 20211 _ = b 20212 // match: (MULW x (MOVDconst [c])) 20213 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 20214 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 20215 for { 20216 _ = v.Args[1] 20217 x := v.Args[0] 20218 v_1 := v.Args[1] 20219 if v_1.Op != OpARM64MOVDconst { 20220 break 20221 } 20222 c := v_1.AuxInt 20223 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 20224 break 20225 } 20226 v.reset(OpARM64SLLconst) 20227 v.AuxInt = log2(c / 9) 20228 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20229 v0.AuxInt = 3 20230 v0.AddArg(x) 20231 v0.AddArg(x) 20232 v.AddArg(v0) 20233 return true 20234 } 20235 // match: (MULW (MOVDconst [c]) x) 20236 // cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c) 20237 // result: (SLLconst [log2(c/9)] (ADDshiftLL <x.Type> x x [3])) 20238 for { 20239 _ = v.Args[1] 20240 v_0 := v.Args[0] 20241 if v_0.Op != OpARM64MOVDconst { 20242 break 20243 } 20244 c := v_0.AuxInt 20245 x := v.Args[1] 20246 if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) { 20247 break 20248 } 20249 v.reset(OpARM64SLLconst) 20250 v.AuxInt = log2(c / 9) 20251 v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type) 20252 v0.AuxInt = 3 20253 v0.AddArg(x) 20254 v0.AddArg(x) 20255 v.AddArg(v0) 20256 return true 20257 } 20258 // match: (MULW (MOVDconst [c]) (MOVDconst [d])) 20259 // cond: 20260 // result: (MOVDconst [int64(int32(c)*int32(d))]) 20261 for { 20262 _ = v.Args[1] 20263 v_0 := v.Args[0] 20264 if v_0.Op != OpARM64MOVDconst { 20265 break 20266 } 20267 c := v_0.AuxInt 20268 v_1 := v.Args[1] 20269 if v_1.Op != OpARM64MOVDconst { 20270 break 20271 } 20272 d := v_1.AuxInt 20273 v.reset(OpARM64MOVDconst) 20274 v.AuxInt = int64(int32(c) * int32(d)) 20275 return true 20276 } 20277 // match: (MULW (MOVDconst [d]) (MOVDconst [c])) 20278 // cond: 20279 // result: (MOVDconst [int64(int32(c)*int32(d))]) 20280 for { 20281 _ = v.Args[1] 20282 v_0 := v.Args[0] 20283 if v_0.Op != OpARM64MOVDconst { 20284 break 20285 } 20286 d := v_0.AuxInt 20287 v_1 := v.Args[1] 20288 if v_1.Op != OpARM64MOVDconst { 20289 break 20290 } 20291 c := v_1.AuxInt 20292 v.reset(OpARM64MOVDconst) 20293 v.AuxInt = int64(int32(c) * int32(d)) 20294 return true 20295 } 20296 return false 20297 } 20298 func rewriteValueARM64_OpARM64MVN_0(v *Value) bool { 20299 // match: (MVN (MOVDconst [c])) 20300 // cond: 20301 // result: (MOVDconst [^c]) 20302 for { 20303 v_0 := v.Args[0] 20304 if v_0.Op != OpARM64MOVDconst { 20305 break 20306 } 20307 c := v_0.AuxInt 20308 v.reset(OpARM64MOVDconst) 20309 v.AuxInt = ^c 20310 return true 20311 } 20312 // match: (MVN x:(SLLconst [c] y)) 20313 // cond: clobberIfDead(x) 20314 // result: (MVNshiftLL [c] y) 20315 for { 20316 x := v.Args[0] 20317 if x.Op != OpARM64SLLconst { 20318 break 20319 } 20320 c := x.AuxInt 20321 y := x.Args[0] 20322 if !(clobberIfDead(x)) { 20323 break 20324 } 20325 v.reset(OpARM64MVNshiftLL) 20326 v.AuxInt = c 20327 v.AddArg(y) 20328 return true 20329 } 20330 // match: (MVN x:(SRLconst [c] y)) 20331 // cond: clobberIfDead(x) 20332 // result: (MVNshiftRL [c] y) 20333 for { 20334 x := v.Args[0] 20335 if x.Op != OpARM64SRLconst { 20336 break 20337 } 20338 c := x.AuxInt 20339 y := x.Args[0] 20340 if !(clobberIfDead(x)) { 20341 break 20342 } 20343 v.reset(OpARM64MVNshiftRL) 20344 v.AuxInt = c 20345 v.AddArg(y) 20346 return true 20347 } 20348 // match: (MVN x:(SRAconst [c] y)) 20349 // cond: clobberIfDead(x) 20350 // result: (MVNshiftRA [c] y) 20351 for { 20352 x := v.Args[0] 20353 if x.Op != OpARM64SRAconst { 20354 break 20355 } 20356 c := x.AuxInt 20357 y := x.Args[0] 20358 if !(clobberIfDead(x)) { 20359 break 20360 } 20361 v.reset(OpARM64MVNshiftRA) 20362 v.AuxInt = c 20363 v.AddArg(y) 20364 return true 20365 } 20366 return false 20367 } 20368 func rewriteValueARM64_OpARM64MVNshiftLL_0(v *Value) bool { 20369 // match: (MVNshiftLL (MOVDconst [c]) [d]) 20370 // cond: 20371 // result: (MOVDconst [^int64(uint64(c)<<uint64(d))]) 20372 for { 20373 d := v.AuxInt 20374 v_0 := v.Args[0] 20375 if v_0.Op != OpARM64MOVDconst { 20376 break 20377 } 20378 c := v_0.AuxInt 20379 v.reset(OpARM64MOVDconst) 20380 v.AuxInt = ^int64(uint64(c) << uint64(d)) 20381 return true 20382 } 20383 return false 20384 } 20385 func rewriteValueARM64_OpARM64MVNshiftRA_0(v *Value) bool { 20386 // match: (MVNshiftRA (MOVDconst [c]) [d]) 20387 // cond: 20388 // result: (MOVDconst [^(c>>uint64(d))]) 20389 for { 20390 d := v.AuxInt 20391 v_0 := v.Args[0] 20392 if v_0.Op != OpARM64MOVDconst { 20393 break 20394 } 20395 c := v_0.AuxInt 20396 v.reset(OpARM64MOVDconst) 20397 v.AuxInt = ^(c >> uint64(d)) 20398 return true 20399 } 20400 return false 20401 } 20402 func rewriteValueARM64_OpARM64MVNshiftRL_0(v *Value) bool { 20403 // match: (MVNshiftRL (MOVDconst [c]) [d]) 20404 // cond: 20405 // result: (MOVDconst [^int64(uint64(c)>>uint64(d))]) 20406 for { 20407 d := v.AuxInt 20408 v_0 := v.Args[0] 20409 if v_0.Op != OpARM64MOVDconst { 20410 break 20411 } 20412 c := v_0.AuxInt 20413 v.reset(OpARM64MOVDconst) 20414 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 20415 return true 20416 } 20417 return false 20418 } 20419 func rewriteValueARM64_OpARM64NEG_0(v *Value) bool { 20420 // match: (NEG (MUL x y)) 20421 // cond: 20422 // result: (MNEG x y) 20423 for { 20424 v_0 := v.Args[0] 20425 if v_0.Op != OpARM64MUL { 20426 break 20427 } 20428 _ = v_0.Args[1] 20429 x := v_0.Args[0] 20430 y := v_0.Args[1] 20431 v.reset(OpARM64MNEG) 20432 v.AddArg(x) 20433 v.AddArg(y) 20434 return true 20435 } 20436 // match: (NEG (MULW x y)) 20437 // cond: 20438 // result: (MNEGW x y) 20439 for { 20440 v_0 := v.Args[0] 20441 if v_0.Op != OpARM64MULW { 20442 break 20443 } 20444 _ = v_0.Args[1] 20445 x := v_0.Args[0] 20446 y := v_0.Args[1] 20447 v.reset(OpARM64MNEGW) 20448 v.AddArg(x) 20449 v.AddArg(y) 20450 return true 20451 } 20452 // match: (NEG (MOVDconst [c])) 20453 // cond: 20454 // result: (MOVDconst [-c]) 20455 for { 20456 v_0 := v.Args[0] 20457 if v_0.Op != OpARM64MOVDconst { 20458 break 20459 } 20460 c := v_0.AuxInt 20461 v.reset(OpARM64MOVDconst) 20462 v.AuxInt = -c 20463 return true 20464 } 20465 // match: (NEG x:(SLLconst [c] y)) 20466 // cond: clobberIfDead(x) 20467 // result: (NEGshiftLL [c] y) 20468 for { 20469 x := v.Args[0] 20470 if x.Op != OpARM64SLLconst { 20471 break 20472 } 20473 c := x.AuxInt 20474 y := x.Args[0] 20475 if !(clobberIfDead(x)) { 20476 break 20477 } 20478 v.reset(OpARM64NEGshiftLL) 20479 v.AuxInt = c 20480 v.AddArg(y) 20481 return true 20482 } 20483 // match: (NEG x:(SRLconst [c] y)) 20484 // cond: clobberIfDead(x) 20485 // result: (NEGshiftRL [c] y) 20486 for { 20487 x := v.Args[0] 20488 if x.Op != OpARM64SRLconst { 20489 break 20490 } 20491 c := x.AuxInt 20492 y := x.Args[0] 20493 if !(clobberIfDead(x)) { 20494 break 20495 } 20496 v.reset(OpARM64NEGshiftRL) 20497 v.AuxInt = c 20498 v.AddArg(y) 20499 return true 20500 } 20501 // match: (NEG x:(SRAconst [c] y)) 20502 // cond: clobberIfDead(x) 20503 // result: (NEGshiftRA [c] y) 20504 for { 20505 x := v.Args[0] 20506 if x.Op != OpARM64SRAconst { 20507 break 20508 } 20509 c := x.AuxInt 20510 y := x.Args[0] 20511 if !(clobberIfDead(x)) { 20512 break 20513 } 20514 v.reset(OpARM64NEGshiftRA) 20515 v.AuxInt = c 20516 v.AddArg(y) 20517 return true 20518 } 20519 return false 20520 } 20521 func rewriteValueARM64_OpARM64NEGshiftLL_0(v *Value) bool { 20522 // match: (NEGshiftLL (MOVDconst [c]) [d]) 20523 // cond: 20524 // result: (MOVDconst [-int64(uint64(c)<<uint64(d))]) 20525 for { 20526 d := v.AuxInt 20527 v_0 := v.Args[0] 20528 if v_0.Op != OpARM64MOVDconst { 20529 break 20530 } 20531 c := v_0.AuxInt 20532 v.reset(OpARM64MOVDconst) 20533 v.AuxInt = -int64(uint64(c) << uint64(d)) 20534 return true 20535 } 20536 return false 20537 } 20538 func rewriteValueARM64_OpARM64NEGshiftRA_0(v *Value) bool { 20539 // match: (NEGshiftRA (MOVDconst [c]) [d]) 20540 // cond: 20541 // result: (MOVDconst [-(c>>uint64(d))]) 20542 for { 20543 d := v.AuxInt 20544 v_0 := v.Args[0] 20545 if v_0.Op != OpARM64MOVDconst { 20546 break 20547 } 20548 c := v_0.AuxInt 20549 v.reset(OpARM64MOVDconst) 20550 v.AuxInt = -(c >> uint64(d)) 20551 return true 20552 } 20553 return false 20554 } 20555 func rewriteValueARM64_OpARM64NEGshiftRL_0(v *Value) bool { 20556 // match: (NEGshiftRL (MOVDconst [c]) [d]) 20557 // cond: 20558 // result: (MOVDconst [-int64(uint64(c)>>uint64(d))]) 20559 for { 20560 d := v.AuxInt 20561 v_0 := v.Args[0] 20562 if v_0.Op != OpARM64MOVDconst { 20563 break 20564 } 20565 c := v_0.AuxInt 20566 v.reset(OpARM64MOVDconst) 20567 v.AuxInt = -int64(uint64(c) >> uint64(d)) 20568 return true 20569 } 20570 return false 20571 } 20572 func rewriteValueARM64_OpARM64NotEqual_0(v *Value) bool { 20573 // match: (NotEqual (FlagEQ)) 20574 // cond: 20575 // result: (MOVDconst [0]) 20576 for { 20577 v_0 := v.Args[0] 20578 if v_0.Op != OpARM64FlagEQ { 20579 break 20580 } 20581 v.reset(OpARM64MOVDconst) 20582 v.AuxInt = 0 20583 return true 20584 } 20585 // match: (NotEqual (FlagLT_ULT)) 20586 // cond: 20587 // result: (MOVDconst [1]) 20588 for { 20589 v_0 := v.Args[0] 20590 if v_0.Op != OpARM64FlagLT_ULT { 20591 break 20592 } 20593 v.reset(OpARM64MOVDconst) 20594 v.AuxInt = 1 20595 return true 20596 } 20597 // match: (NotEqual (FlagLT_UGT)) 20598 // cond: 20599 // result: (MOVDconst [1]) 20600 for { 20601 v_0 := v.Args[0] 20602 if v_0.Op != OpARM64FlagLT_UGT { 20603 break 20604 } 20605 v.reset(OpARM64MOVDconst) 20606 v.AuxInt = 1 20607 return true 20608 } 20609 // match: (NotEqual (FlagGT_ULT)) 20610 // cond: 20611 // result: (MOVDconst [1]) 20612 for { 20613 v_0 := v.Args[0] 20614 if v_0.Op != OpARM64FlagGT_ULT { 20615 break 20616 } 20617 v.reset(OpARM64MOVDconst) 20618 v.AuxInt = 1 20619 return true 20620 } 20621 // match: (NotEqual (FlagGT_UGT)) 20622 // cond: 20623 // result: (MOVDconst [1]) 20624 for { 20625 v_0 := v.Args[0] 20626 if v_0.Op != OpARM64FlagGT_UGT { 20627 break 20628 } 20629 v.reset(OpARM64MOVDconst) 20630 v.AuxInt = 1 20631 return true 20632 } 20633 // match: (NotEqual (InvertFlags x)) 20634 // cond: 20635 // result: (NotEqual x) 20636 for { 20637 v_0 := v.Args[0] 20638 if v_0.Op != OpARM64InvertFlags { 20639 break 20640 } 20641 x := v_0.Args[0] 20642 v.reset(OpARM64NotEqual) 20643 v.AddArg(x) 20644 return true 20645 } 20646 return false 20647 } 20648 func rewriteValueARM64_OpARM64OR_0(v *Value) bool { 20649 // match: (OR x (MOVDconst [c])) 20650 // cond: 20651 // result: (ORconst [c] x) 20652 for { 20653 _ = v.Args[1] 20654 x := v.Args[0] 20655 v_1 := v.Args[1] 20656 if v_1.Op != OpARM64MOVDconst { 20657 break 20658 } 20659 c := v_1.AuxInt 20660 v.reset(OpARM64ORconst) 20661 v.AuxInt = c 20662 v.AddArg(x) 20663 return true 20664 } 20665 // match: (OR (MOVDconst [c]) x) 20666 // cond: 20667 // result: (ORconst [c] x) 20668 for { 20669 _ = v.Args[1] 20670 v_0 := v.Args[0] 20671 if v_0.Op != OpARM64MOVDconst { 20672 break 20673 } 20674 c := v_0.AuxInt 20675 x := v.Args[1] 20676 v.reset(OpARM64ORconst) 20677 v.AuxInt = c 20678 v.AddArg(x) 20679 return true 20680 } 20681 // match: (OR x x) 20682 // cond: 20683 // result: x 20684 for { 20685 _ = v.Args[1] 20686 x := v.Args[0] 20687 if x != v.Args[1] { 20688 break 20689 } 20690 v.reset(OpCopy) 20691 v.Type = x.Type 20692 v.AddArg(x) 20693 return true 20694 } 20695 // match: (OR x (MVN y)) 20696 // cond: 20697 // result: (ORN x y) 20698 for { 20699 _ = v.Args[1] 20700 x := v.Args[0] 20701 v_1 := v.Args[1] 20702 if v_1.Op != OpARM64MVN { 20703 break 20704 } 20705 y := v_1.Args[0] 20706 v.reset(OpARM64ORN) 20707 v.AddArg(x) 20708 v.AddArg(y) 20709 return true 20710 } 20711 // match: (OR (MVN y) x) 20712 // cond: 20713 // result: (ORN x y) 20714 for { 20715 _ = v.Args[1] 20716 v_0 := v.Args[0] 20717 if v_0.Op != OpARM64MVN { 20718 break 20719 } 20720 y := v_0.Args[0] 20721 x := v.Args[1] 20722 v.reset(OpARM64ORN) 20723 v.AddArg(x) 20724 v.AddArg(y) 20725 return true 20726 } 20727 // match: (OR x0 x1:(SLLconst [c] y)) 20728 // cond: clobberIfDead(x1) 20729 // result: (ORshiftLL x0 y [c]) 20730 for { 20731 _ = v.Args[1] 20732 x0 := v.Args[0] 20733 x1 := v.Args[1] 20734 if x1.Op != OpARM64SLLconst { 20735 break 20736 } 20737 c := x1.AuxInt 20738 y := x1.Args[0] 20739 if !(clobberIfDead(x1)) { 20740 break 20741 } 20742 v.reset(OpARM64ORshiftLL) 20743 v.AuxInt = c 20744 v.AddArg(x0) 20745 v.AddArg(y) 20746 return true 20747 } 20748 // match: (OR x1:(SLLconst [c] y) x0) 20749 // cond: clobberIfDead(x1) 20750 // result: (ORshiftLL x0 y [c]) 20751 for { 20752 _ = v.Args[1] 20753 x1 := v.Args[0] 20754 if x1.Op != OpARM64SLLconst { 20755 break 20756 } 20757 c := x1.AuxInt 20758 y := x1.Args[0] 20759 x0 := v.Args[1] 20760 if !(clobberIfDead(x1)) { 20761 break 20762 } 20763 v.reset(OpARM64ORshiftLL) 20764 v.AuxInt = c 20765 v.AddArg(x0) 20766 v.AddArg(y) 20767 return true 20768 } 20769 // match: (OR x0 x1:(SRLconst [c] y)) 20770 // cond: clobberIfDead(x1) 20771 // result: (ORshiftRL x0 y [c]) 20772 for { 20773 _ = v.Args[1] 20774 x0 := v.Args[0] 20775 x1 := v.Args[1] 20776 if x1.Op != OpARM64SRLconst { 20777 break 20778 } 20779 c := x1.AuxInt 20780 y := x1.Args[0] 20781 if !(clobberIfDead(x1)) { 20782 break 20783 } 20784 v.reset(OpARM64ORshiftRL) 20785 v.AuxInt = c 20786 v.AddArg(x0) 20787 v.AddArg(y) 20788 return true 20789 } 20790 // match: (OR x1:(SRLconst [c] y) x0) 20791 // cond: clobberIfDead(x1) 20792 // result: (ORshiftRL x0 y [c]) 20793 for { 20794 _ = v.Args[1] 20795 x1 := v.Args[0] 20796 if x1.Op != OpARM64SRLconst { 20797 break 20798 } 20799 c := x1.AuxInt 20800 y := x1.Args[0] 20801 x0 := v.Args[1] 20802 if !(clobberIfDead(x1)) { 20803 break 20804 } 20805 v.reset(OpARM64ORshiftRL) 20806 v.AuxInt = c 20807 v.AddArg(x0) 20808 v.AddArg(y) 20809 return true 20810 } 20811 // match: (OR x0 x1:(SRAconst [c] y)) 20812 // cond: clobberIfDead(x1) 20813 // result: (ORshiftRA x0 y [c]) 20814 for { 20815 _ = v.Args[1] 20816 x0 := v.Args[0] 20817 x1 := v.Args[1] 20818 if x1.Op != OpARM64SRAconst { 20819 break 20820 } 20821 c := x1.AuxInt 20822 y := x1.Args[0] 20823 if !(clobberIfDead(x1)) { 20824 break 20825 } 20826 v.reset(OpARM64ORshiftRA) 20827 v.AuxInt = c 20828 v.AddArg(x0) 20829 v.AddArg(y) 20830 return true 20831 } 20832 return false 20833 } 20834 func rewriteValueARM64_OpARM64OR_10(v *Value) bool { 20835 b := v.Block 20836 _ = b 20837 typ := &b.Func.Config.Types 20838 _ = typ 20839 // match: (OR x1:(SRAconst [c] y) x0) 20840 // cond: clobberIfDead(x1) 20841 // result: (ORshiftRA x0 y [c]) 20842 for { 20843 _ = v.Args[1] 20844 x1 := v.Args[0] 20845 if x1.Op != OpARM64SRAconst { 20846 break 20847 } 20848 c := x1.AuxInt 20849 y := x1.Args[0] 20850 x0 := v.Args[1] 20851 if !(clobberIfDead(x1)) { 20852 break 20853 } 20854 v.reset(OpARM64ORshiftRA) 20855 v.AuxInt = c 20856 v.AddArg(x0) 20857 v.AddArg(y) 20858 return true 20859 } 20860 // match: (OR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 20861 // cond: cc.(Op) == OpARM64LessThanU 20862 // result: (ROR x (NEG <t> y)) 20863 for { 20864 _ = v.Args[1] 20865 v_0 := v.Args[0] 20866 if v_0.Op != OpARM64SLL { 20867 break 20868 } 20869 _ = v_0.Args[1] 20870 x := v_0.Args[0] 20871 v_0_1 := v_0.Args[1] 20872 if v_0_1.Op != OpARM64ANDconst { 20873 break 20874 } 20875 t := v_0_1.Type 20876 if v_0_1.AuxInt != 63 { 20877 break 20878 } 20879 y := v_0_1.Args[0] 20880 v_1 := v.Args[1] 20881 if v_1.Op != OpARM64CSEL0 { 20882 break 20883 } 20884 if v_1.Type != typ.UInt64 { 20885 break 20886 } 20887 cc := v_1.Aux 20888 _ = v_1.Args[1] 20889 v_1_0 := v_1.Args[0] 20890 if v_1_0.Op != OpARM64SRL { 20891 break 20892 } 20893 if v_1_0.Type != typ.UInt64 { 20894 break 20895 } 20896 _ = v_1_0.Args[1] 20897 if x != v_1_0.Args[0] { 20898 break 20899 } 20900 v_1_0_1 := v_1_0.Args[1] 20901 if v_1_0_1.Op != OpARM64SUB { 20902 break 20903 } 20904 if v_1_0_1.Type != t { 20905 break 20906 } 20907 _ = v_1_0_1.Args[1] 20908 v_1_0_1_0 := v_1_0_1.Args[0] 20909 if v_1_0_1_0.Op != OpARM64MOVDconst { 20910 break 20911 } 20912 if v_1_0_1_0.AuxInt != 64 { 20913 break 20914 } 20915 v_1_0_1_1 := v_1_0_1.Args[1] 20916 if v_1_0_1_1.Op != OpARM64ANDconst { 20917 break 20918 } 20919 if v_1_0_1_1.Type != t { 20920 break 20921 } 20922 if v_1_0_1_1.AuxInt != 63 { 20923 break 20924 } 20925 if y != v_1_0_1_1.Args[0] { 20926 break 20927 } 20928 v_1_1 := v_1.Args[1] 20929 if v_1_1.Op != OpARM64CMPconst { 20930 break 20931 } 20932 if v_1_1.AuxInt != 64 { 20933 break 20934 } 20935 v_1_1_0 := v_1_1.Args[0] 20936 if v_1_1_0.Op != OpARM64SUB { 20937 break 20938 } 20939 if v_1_1_0.Type != t { 20940 break 20941 } 20942 _ = v_1_1_0.Args[1] 20943 v_1_1_0_0 := v_1_1_0.Args[0] 20944 if v_1_1_0_0.Op != OpARM64MOVDconst { 20945 break 20946 } 20947 if v_1_1_0_0.AuxInt != 64 { 20948 break 20949 } 20950 v_1_1_0_1 := v_1_1_0.Args[1] 20951 if v_1_1_0_1.Op != OpARM64ANDconst { 20952 break 20953 } 20954 if v_1_1_0_1.Type != t { 20955 break 20956 } 20957 if v_1_1_0_1.AuxInt != 63 { 20958 break 20959 } 20960 if y != v_1_1_0_1.Args[0] { 20961 break 20962 } 20963 if !(cc.(Op) == OpARM64LessThanU) { 20964 break 20965 } 20966 v.reset(OpARM64ROR) 20967 v.AddArg(x) 20968 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 20969 v0.AddArg(y) 20970 v.AddArg(v0) 20971 return true 20972 } 20973 // match: (OR (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 20974 // cond: cc.(Op) == OpARM64LessThanU 20975 // result: (ROR x (NEG <t> y)) 20976 for { 20977 _ = v.Args[1] 20978 v_0 := v.Args[0] 20979 if v_0.Op != OpARM64CSEL0 { 20980 break 20981 } 20982 if v_0.Type != typ.UInt64 { 20983 break 20984 } 20985 cc := v_0.Aux 20986 _ = v_0.Args[1] 20987 v_0_0 := v_0.Args[0] 20988 if v_0_0.Op != OpARM64SRL { 20989 break 20990 } 20991 if v_0_0.Type != typ.UInt64 { 20992 break 20993 } 20994 _ = v_0_0.Args[1] 20995 x := v_0_0.Args[0] 20996 v_0_0_1 := v_0_0.Args[1] 20997 if v_0_0_1.Op != OpARM64SUB { 20998 break 20999 } 21000 t := v_0_0_1.Type 21001 _ = v_0_0_1.Args[1] 21002 v_0_0_1_0 := v_0_0_1.Args[0] 21003 if v_0_0_1_0.Op != OpARM64MOVDconst { 21004 break 21005 } 21006 if v_0_0_1_0.AuxInt != 64 { 21007 break 21008 } 21009 v_0_0_1_1 := v_0_0_1.Args[1] 21010 if v_0_0_1_1.Op != OpARM64ANDconst { 21011 break 21012 } 21013 if v_0_0_1_1.Type != t { 21014 break 21015 } 21016 if v_0_0_1_1.AuxInt != 63 { 21017 break 21018 } 21019 y := v_0_0_1_1.Args[0] 21020 v_0_1 := v_0.Args[1] 21021 if v_0_1.Op != OpARM64CMPconst { 21022 break 21023 } 21024 if v_0_1.AuxInt != 64 { 21025 break 21026 } 21027 v_0_1_0 := v_0_1.Args[0] 21028 if v_0_1_0.Op != OpARM64SUB { 21029 break 21030 } 21031 if v_0_1_0.Type != t { 21032 break 21033 } 21034 _ = v_0_1_0.Args[1] 21035 v_0_1_0_0 := v_0_1_0.Args[0] 21036 if v_0_1_0_0.Op != OpARM64MOVDconst { 21037 break 21038 } 21039 if v_0_1_0_0.AuxInt != 64 { 21040 break 21041 } 21042 v_0_1_0_1 := v_0_1_0.Args[1] 21043 if v_0_1_0_1.Op != OpARM64ANDconst { 21044 break 21045 } 21046 if v_0_1_0_1.Type != t { 21047 break 21048 } 21049 if v_0_1_0_1.AuxInt != 63 { 21050 break 21051 } 21052 if y != v_0_1_0_1.Args[0] { 21053 break 21054 } 21055 v_1 := v.Args[1] 21056 if v_1.Op != OpARM64SLL { 21057 break 21058 } 21059 _ = v_1.Args[1] 21060 if x != v_1.Args[0] { 21061 break 21062 } 21063 v_1_1 := v_1.Args[1] 21064 if v_1_1.Op != OpARM64ANDconst { 21065 break 21066 } 21067 if v_1_1.Type != t { 21068 break 21069 } 21070 if v_1_1.AuxInt != 63 { 21071 break 21072 } 21073 if y != v_1_1.Args[0] { 21074 break 21075 } 21076 if !(cc.(Op) == OpARM64LessThanU) { 21077 break 21078 } 21079 v.reset(OpARM64ROR) 21080 v.AddArg(x) 21081 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 21082 v0.AddArg(y) 21083 v.AddArg(v0) 21084 return true 21085 } 21086 // match: (OR (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 21087 // cond: cc.(Op) == OpARM64LessThanU 21088 // result: (ROR x y) 21089 for { 21090 _ = v.Args[1] 21091 v_0 := v.Args[0] 21092 if v_0.Op != OpARM64SRL { 21093 break 21094 } 21095 if v_0.Type != typ.UInt64 { 21096 break 21097 } 21098 _ = v_0.Args[1] 21099 x := v_0.Args[0] 21100 v_0_1 := v_0.Args[1] 21101 if v_0_1.Op != OpARM64ANDconst { 21102 break 21103 } 21104 t := v_0_1.Type 21105 if v_0_1.AuxInt != 63 { 21106 break 21107 } 21108 y := v_0_1.Args[0] 21109 v_1 := v.Args[1] 21110 if v_1.Op != OpARM64CSEL0 { 21111 break 21112 } 21113 if v_1.Type != typ.UInt64 { 21114 break 21115 } 21116 cc := v_1.Aux 21117 _ = v_1.Args[1] 21118 v_1_0 := v_1.Args[0] 21119 if v_1_0.Op != OpARM64SLL { 21120 break 21121 } 21122 _ = v_1_0.Args[1] 21123 if x != v_1_0.Args[0] { 21124 break 21125 } 21126 v_1_0_1 := v_1_0.Args[1] 21127 if v_1_0_1.Op != OpARM64SUB { 21128 break 21129 } 21130 if v_1_0_1.Type != t { 21131 break 21132 } 21133 _ = v_1_0_1.Args[1] 21134 v_1_0_1_0 := v_1_0_1.Args[0] 21135 if v_1_0_1_0.Op != OpARM64MOVDconst { 21136 break 21137 } 21138 if v_1_0_1_0.AuxInt != 64 { 21139 break 21140 } 21141 v_1_0_1_1 := v_1_0_1.Args[1] 21142 if v_1_0_1_1.Op != OpARM64ANDconst { 21143 break 21144 } 21145 if v_1_0_1_1.Type != t { 21146 break 21147 } 21148 if v_1_0_1_1.AuxInt != 63 { 21149 break 21150 } 21151 if y != v_1_0_1_1.Args[0] { 21152 break 21153 } 21154 v_1_1 := v_1.Args[1] 21155 if v_1_1.Op != OpARM64CMPconst { 21156 break 21157 } 21158 if v_1_1.AuxInt != 64 { 21159 break 21160 } 21161 v_1_1_0 := v_1_1.Args[0] 21162 if v_1_1_0.Op != OpARM64SUB { 21163 break 21164 } 21165 if v_1_1_0.Type != t { 21166 break 21167 } 21168 _ = v_1_1_0.Args[1] 21169 v_1_1_0_0 := v_1_1_0.Args[0] 21170 if v_1_1_0_0.Op != OpARM64MOVDconst { 21171 break 21172 } 21173 if v_1_1_0_0.AuxInt != 64 { 21174 break 21175 } 21176 v_1_1_0_1 := v_1_1_0.Args[1] 21177 if v_1_1_0_1.Op != OpARM64ANDconst { 21178 break 21179 } 21180 if v_1_1_0_1.Type != t { 21181 break 21182 } 21183 if v_1_1_0_1.AuxInt != 63 { 21184 break 21185 } 21186 if y != v_1_1_0_1.Args[0] { 21187 break 21188 } 21189 if !(cc.(Op) == OpARM64LessThanU) { 21190 break 21191 } 21192 v.reset(OpARM64ROR) 21193 v.AddArg(x) 21194 v.AddArg(y) 21195 return true 21196 } 21197 // match: (OR (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 21198 // cond: cc.(Op) == OpARM64LessThanU 21199 // result: (ROR x y) 21200 for { 21201 _ = v.Args[1] 21202 v_0 := v.Args[0] 21203 if v_0.Op != OpARM64CSEL0 { 21204 break 21205 } 21206 if v_0.Type != typ.UInt64 { 21207 break 21208 } 21209 cc := v_0.Aux 21210 _ = v_0.Args[1] 21211 v_0_0 := v_0.Args[0] 21212 if v_0_0.Op != OpARM64SLL { 21213 break 21214 } 21215 _ = v_0_0.Args[1] 21216 x := v_0_0.Args[0] 21217 v_0_0_1 := v_0_0.Args[1] 21218 if v_0_0_1.Op != OpARM64SUB { 21219 break 21220 } 21221 t := v_0_0_1.Type 21222 _ = v_0_0_1.Args[1] 21223 v_0_0_1_0 := v_0_0_1.Args[0] 21224 if v_0_0_1_0.Op != OpARM64MOVDconst { 21225 break 21226 } 21227 if v_0_0_1_0.AuxInt != 64 { 21228 break 21229 } 21230 v_0_0_1_1 := v_0_0_1.Args[1] 21231 if v_0_0_1_1.Op != OpARM64ANDconst { 21232 break 21233 } 21234 if v_0_0_1_1.Type != t { 21235 break 21236 } 21237 if v_0_0_1_1.AuxInt != 63 { 21238 break 21239 } 21240 y := v_0_0_1_1.Args[0] 21241 v_0_1 := v_0.Args[1] 21242 if v_0_1.Op != OpARM64CMPconst { 21243 break 21244 } 21245 if v_0_1.AuxInt != 64 { 21246 break 21247 } 21248 v_0_1_0 := v_0_1.Args[0] 21249 if v_0_1_0.Op != OpARM64SUB { 21250 break 21251 } 21252 if v_0_1_0.Type != t { 21253 break 21254 } 21255 _ = v_0_1_0.Args[1] 21256 v_0_1_0_0 := v_0_1_0.Args[0] 21257 if v_0_1_0_0.Op != OpARM64MOVDconst { 21258 break 21259 } 21260 if v_0_1_0_0.AuxInt != 64 { 21261 break 21262 } 21263 v_0_1_0_1 := v_0_1_0.Args[1] 21264 if v_0_1_0_1.Op != OpARM64ANDconst { 21265 break 21266 } 21267 if v_0_1_0_1.Type != t { 21268 break 21269 } 21270 if v_0_1_0_1.AuxInt != 63 { 21271 break 21272 } 21273 if y != v_0_1_0_1.Args[0] { 21274 break 21275 } 21276 v_1 := v.Args[1] 21277 if v_1.Op != OpARM64SRL { 21278 break 21279 } 21280 if v_1.Type != typ.UInt64 { 21281 break 21282 } 21283 _ = v_1.Args[1] 21284 if x != v_1.Args[0] { 21285 break 21286 } 21287 v_1_1 := v_1.Args[1] 21288 if v_1_1.Op != OpARM64ANDconst { 21289 break 21290 } 21291 if v_1_1.Type != t { 21292 break 21293 } 21294 if v_1_1.AuxInt != 63 { 21295 break 21296 } 21297 if y != v_1_1.Args[0] { 21298 break 21299 } 21300 if !(cc.(Op) == OpARM64LessThanU) { 21301 break 21302 } 21303 v.reset(OpARM64ROR) 21304 v.AddArg(x) 21305 v.AddArg(y) 21306 return true 21307 } 21308 // match: (OR (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 21309 // cond: cc.(Op) == OpARM64LessThanU 21310 // result: (RORW x (NEG <t> y)) 21311 for { 21312 _ = v.Args[1] 21313 v_0 := v.Args[0] 21314 if v_0.Op != OpARM64SLL { 21315 break 21316 } 21317 _ = v_0.Args[1] 21318 x := v_0.Args[0] 21319 v_0_1 := v_0.Args[1] 21320 if v_0_1.Op != OpARM64ANDconst { 21321 break 21322 } 21323 t := v_0_1.Type 21324 if v_0_1.AuxInt != 31 { 21325 break 21326 } 21327 y := v_0_1.Args[0] 21328 v_1 := v.Args[1] 21329 if v_1.Op != OpARM64CSEL0 { 21330 break 21331 } 21332 if v_1.Type != typ.UInt32 { 21333 break 21334 } 21335 cc := v_1.Aux 21336 _ = v_1.Args[1] 21337 v_1_0 := v_1.Args[0] 21338 if v_1_0.Op != OpARM64SRL { 21339 break 21340 } 21341 if v_1_0.Type != typ.UInt32 { 21342 break 21343 } 21344 _ = v_1_0.Args[1] 21345 v_1_0_0 := v_1_0.Args[0] 21346 if v_1_0_0.Op != OpARM64MOVWUreg { 21347 break 21348 } 21349 if x != v_1_0_0.Args[0] { 21350 break 21351 } 21352 v_1_0_1 := v_1_0.Args[1] 21353 if v_1_0_1.Op != OpARM64SUB { 21354 break 21355 } 21356 if v_1_0_1.Type != t { 21357 break 21358 } 21359 _ = v_1_0_1.Args[1] 21360 v_1_0_1_0 := v_1_0_1.Args[0] 21361 if v_1_0_1_0.Op != OpARM64MOVDconst { 21362 break 21363 } 21364 if v_1_0_1_0.AuxInt != 32 { 21365 break 21366 } 21367 v_1_0_1_1 := v_1_0_1.Args[1] 21368 if v_1_0_1_1.Op != OpARM64ANDconst { 21369 break 21370 } 21371 if v_1_0_1_1.Type != t { 21372 break 21373 } 21374 if v_1_0_1_1.AuxInt != 31 { 21375 break 21376 } 21377 if y != v_1_0_1_1.Args[0] { 21378 break 21379 } 21380 v_1_1 := v_1.Args[1] 21381 if v_1_1.Op != OpARM64CMPconst { 21382 break 21383 } 21384 if v_1_1.AuxInt != 64 { 21385 break 21386 } 21387 v_1_1_0 := v_1_1.Args[0] 21388 if v_1_1_0.Op != OpARM64SUB { 21389 break 21390 } 21391 if v_1_1_0.Type != t { 21392 break 21393 } 21394 _ = v_1_1_0.Args[1] 21395 v_1_1_0_0 := v_1_1_0.Args[0] 21396 if v_1_1_0_0.Op != OpARM64MOVDconst { 21397 break 21398 } 21399 if v_1_1_0_0.AuxInt != 32 { 21400 break 21401 } 21402 v_1_1_0_1 := v_1_1_0.Args[1] 21403 if v_1_1_0_1.Op != OpARM64ANDconst { 21404 break 21405 } 21406 if v_1_1_0_1.Type != t { 21407 break 21408 } 21409 if v_1_1_0_1.AuxInt != 31 { 21410 break 21411 } 21412 if y != v_1_1_0_1.Args[0] { 21413 break 21414 } 21415 if !(cc.(Op) == OpARM64LessThanU) { 21416 break 21417 } 21418 v.reset(OpARM64RORW) 21419 v.AddArg(x) 21420 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 21421 v0.AddArg(y) 21422 v.AddArg(v0) 21423 return true 21424 } 21425 // match: (OR (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 21426 // cond: cc.(Op) == OpARM64LessThanU 21427 // result: (RORW x (NEG <t> y)) 21428 for { 21429 _ = v.Args[1] 21430 v_0 := v.Args[0] 21431 if v_0.Op != OpARM64CSEL0 { 21432 break 21433 } 21434 if v_0.Type != typ.UInt32 { 21435 break 21436 } 21437 cc := v_0.Aux 21438 _ = v_0.Args[1] 21439 v_0_0 := v_0.Args[0] 21440 if v_0_0.Op != OpARM64SRL { 21441 break 21442 } 21443 if v_0_0.Type != typ.UInt32 { 21444 break 21445 } 21446 _ = v_0_0.Args[1] 21447 v_0_0_0 := v_0_0.Args[0] 21448 if v_0_0_0.Op != OpARM64MOVWUreg { 21449 break 21450 } 21451 x := v_0_0_0.Args[0] 21452 v_0_0_1 := v_0_0.Args[1] 21453 if v_0_0_1.Op != OpARM64SUB { 21454 break 21455 } 21456 t := v_0_0_1.Type 21457 _ = v_0_0_1.Args[1] 21458 v_0_0_1_0 := v_0_0_1.Args[0] 21459 if v_0_0_1_0.Op != OpARM64MOVDconst { 21460 break 21461 } 21462 if v_0_0_1_0.AuxInt != 32 { 21463 break 21464 } 21465 v_0_0_1_1 := v_0_0_1.Args[1] 21466 if v_0_0_1_1.Op != OpARM64ANDconst { 21467 break 21468 } 21469 if v_0_0_1_1.Type != t { 21470 break 21471 } 21472 if v_0_0_1_1.AuxInt != 31 { 21473 break 21474 } 21475 y := v_0_0_1_1.Args[0] 21476 v_0_1 := v_0.Args[1] 21477 if v_0_1.Op != OpARM64CMPconst { 21478 break 21479 } 21480 if v_0_1.AuxInt != 64 { 21481 break 21482 } 21483 v_0_1_0 := v_0_1.Args[0] 21484 if v_0_1_0.Op != OpARM64SUB { 21485 break 21486 } 21487 if v_0_1_0.Type != t { 21488 break 21489 } 21490 _ = v_0_1_0.Args[1] 21491 v_0_1_0_0 := v_0_1_0.Args[0] 21492 if v_0_1_0_0.Op != OpARM64MOVDconst { 21493 break 21494 } 21495 if v_0_1_0_0.AuxInt != 32 { 21496 break 21497 } 21498 v_0_1_0_1 := v_0_1_0.Args[1] 21499 if v_0_1_0_1.Op != OpARM64ANDconst { 21500 break 21501 } 21502 if v_0_1_0_1.Type != t { 21503 break 21504 } 21505 if v_0_1_0_1.AuxInt != 31 { 21506 break 21507 } 21508 if y != v_0_1_0_1.Args[0] { 21509 break 21510 } 21511 v_1 := v.Args[1] 21512 if v_1.Op != OpARM64SLL { 21513 break 21514 } 21515 _ = v_1.Args[1] 21516 if x != v_1.Args[0] { 21517 break 21518 } 21519 v_1_1 := v_1.Args[1] 21520 if v_1_1.Op != OpARM64ANDconst { 21521 break 21522 } 21523 if v_1_1.Type != t { 21524 break 21525 } 21526 if v_1_1.AuxInt != 31 { 21527 break 21528 } 21529 if y != v_1_1.Args[0] { 21530 break 21531 } 21532 if !(cc.(Op) == OpARM64LessThanU) { 21533 break 21534 } 21535 v.reset(OpARM64RORW) 21536 v.AddArg(x) 21537 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 21538 v0.AddArg(y) 21539 v.AddArg(v0) 21540 return true 21541 } 21542 // match: (OR (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 21543 // cond: cc.(Op) == OpARM64LessThanU 21544 // result: (RORW x y) 21545 for { 21546 _ = v.Args[1] 21547 v_0 := v.Args[0] 21548 if v_0.Op != OpARM64SRL { 21549 break 21550 } 21551 if v_0.Type != typ.UInt32 { 21552 break 21553 } 21554 _ = v_0.Args[1] 21555 v_0_0 := v_0.Args[0] 21556 if v_0_0.Op != OpARM64MOVWUreg { 21557 break 21558 } 21559 x := v_0_0.Args[0] 21560 v_0_1 := v_0.Args[1] 21561 if v_0_1.Op != OpARM64ANDconst { 21562 break 21563 } 21564 t := v_0_1.Type 21565 if v_0_1.AuxInt != 31 { 21566 break 21567 } 21568 y := v_0_1.Args[0] 21569 v_1 := v.Args[1] 21570 if v_1.Op != OpARM64CSEL0 { 21571 break 21572 } 21573 if v_1.Type != typ.UInt32 { 21574 break 21575 } 21576 cc := v_1.Aux 21577 _ = v_1.Args[1] 21578 v_1_0 := v_1.Args[0] 21579 if v_1_0.Op != OpARM64SLL { 21580 break 21581 } 21582 _ = v_1_0.Args[1] 21583 if x != v_1_0.Args[0] { 21584 break 21585 } 21586 v_1_0_1 := v_1_0.Args[1] 21587 if v_1_0_1.Op != OpARM64SUB { 21588 break 21589 } 21590 if v_1_0_1.Type != t { 21591 break 21592 } 21593 _ = v_1_0_1.Args[1] 21594 v_1_0_1_0 := v_1_0_1.Args[0] 21595 if v_1_0_1_0.Op != OpARM64MOVDconst { 21596 break 21597 } 21598 if v_1_0_1_0.AuxInt != 32 { 21599 break 21600 } 21601 v_1_0_1_1 := v_1_0_1.Args[1] 21602 if v_1_0_1_1.Op != OpARM64ANDconst { 21603 break 21604 } 21605 if v_1_0_1_1.Type != t { 21606 break 21607 } 21608 if v_1_0_1_1.AuxInt != 31 { 21609 break 21610 } 21611 if y != v_1_0_1_1.Args[0] { 21612 break 21613 } 21614 v_1_1 := v_1.Args[1] 21615 if v_1_1.Op != OpARM64CMPconst { 21616 break 21617 } 21618 if v_1_1.AuxInt != 64 { 21619 break 21620 } 21621 v_1_1_0 := v_1_1.Args[0] 21622 if v_1_1_0.Op != OpARM64SUB { 21623 break 21624 } 21625 if v_1_1_0.Type != t { 21626 break 21627 } 21628 _ = v_1_1_0.Args[1] 21629 v_1_1_0_0 := v_1_1_0.Args[0] 21630 if v_1_1_0_0.Op != OpARM64MOVDconst { 21631 break 21632 } 21633 if v_1_1_0_0.AuxInt != 32 { 21634 break 21635 } 21636 v_1_1_0_1 := v_1_1_0.Args[1] 21637 if v_1_1_0_1.Op != OpARM64ANDconst { 21638 break 21639 } 21640 if v_1_1_0_1.Type != t { 21641 break 21642 } 21643 if v_1_1_0_1.AuxInt != 31 { 21644 break 21645 } 21646 if y != v_1_1_0_1.Args[0] { 21647 break 21648 } 21649 if !(cc.(Op) == OpARM64LessThanU) { 21650 break 21651 } 21652 v.reset(OpARM64RORW) 21653 v.AddArg(x) 21654 v.AddArg(y) 21655 return true 21656 } 21657 // match: (OR (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 21658 // cond: cc.(Op) == OpARM64LessThanU 21659 // result: (RORW x y) 21660 for { 21661 _ = v.Args[1] 21662 v_0 := v.Args[0] 21663 if v_0.Op != OpARM64CSEL0 { 21664 break 21665 } 21666 if v_0.Type != typ.UInt32 { 21667 break 21668 } 21669 cc := v_0.Aux 21670 _ = v_0.Args[1] 21671 v_0_0 := v_0.Args[0] 21672 if v_0_0.Op != OpARM64SLL { 21673 break 21674 } 21675 _ = v_0_0.Args[1] 21676 x := v_0_0.Args[0] 21677 v_0_0_1 := v_0_0.Args[1] 21678 if v_0_0_1.Op != OpARM64SUB { 21679 break 21680 } 21681 t := v_0_0_1.Type 21682 _ = v_0_0_1.Args[1] 21683 v_0_0_1_0 := v_0_0_1.Args[0] 21684 if v_0_0_1_0.Op != OpARM64MOVDconst { 21685 break 21686 } 21687 if v_0_0_1_0.AuxInt != 32 { 21688 break 21689 } 21690 v_0_0_1_1 := v_0_0_1.Args[1] 21691 if v_0_0_1_1.Op != OpARM64ANDconst { 21692 break 21693 } 21694 if v_0_0_1_1.Type != t { 21695 break 21696 } 21697 if v_0_0_1_1.AuxInt != 31 { 21698 break 21699 } 21700 y := v_0_0_1_1.Args[0] 21701 v_0_1 := v_0.Args[1] 21702 if v_0_1.Op != OpARM64CMPconst { 21703 break 21704 } 21705 if v_0_1.AuxInt != 64 { 21706 break 21707 } 21708 v_0_1_0 := v_0_1.Args[0] 21709 if v_0_1_0.Op != OpARM64SUB { 21710 break 21711 } 21712 if v_0_1_0.Type != t { 21713 break 21714 } 21715 _ = v_0_1_0.Args[1] 21716 v_0_1_0_0 := v_0_1_0.Args[0] 21717 if v_0_1_0_0.Op != OpARM64MOVDconst { 21718 break 21719 } 21720 if v_0_1_0_0.AuxInt != 32 { 21721 break 21722 } 21723 v_0_1_0_1 := v_0_1_0.Args[1] 21724 if v_0_1_0_1.Op != OpARM64ANDconst { 21725 break 21726 } 21727 if v_0_1_0_1.Type != t { 21728 break 21729 } 21730 if v_0_1_0_1.AuxInt != 31 { 21731 break 21732 } 21733 if y != v_0_1_0_1.Args[0] { 21734 break 21735 } 21736 v_1 := v.Args[1] 21737 if v_1.Op != OpARM64SRL { 21738 break 21739 } 21740 if v_1.Type != typ.UInt32 { 21741 break 21742 } 21743 _ = v_1.Args[1] 21744 v_1_0 := v_1.Args[0] 21745 if v_1_0.Op != OpARM64MOVWUreg { 21746 break 21747 } 21748 if x != v_1_0.Args[0] { 21749 break 21750 } 21751 v_1_1 := v_1.Args[1] 21752 if v_1_1.Op != OpARM64ANDconst { 21753 break 21754 } 21755 if v_1_1.Type != t { 21756 break 21757 } 21758 if v_1_1.AuxInt != 31 { 21759 break 21760 } 21761 if y != v_1_1.Args[0] { 21762 break 21763 } 21764 if !(cc.(Op) == OpARM64LessThanU) { 21765 break 21766 } 21767 v.reset(OpARM64RORW) 21768 v.AddArg(x) 21769 v.AddArg(y) 21770 return true 21771 } 21772 // match: (OR (UBFIZ [bfc] x) (ANDconst [ac] y)) 21773 // cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc))) 21774 // result: (BFI [bfc] y x) 21775 for { 21776 _ = v.Args[1] 21777 v_0 := v.Args[0] 21778 if v_0.Op != OpARM64UBFIZ { 21779 break 21780 } 21781 bfc := v_0.AuxInt 21782 x := v_0.Args[0] 21783 v_1 := v.Args[1] 21784 if v_1.Op != OpARM64ANDconst { 21785 break 21786 } 21787 ac := v_1.AuxInt 21788 y := v_1.Args[0] 21789 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) { 21790 break 21791 } 21792 v.reset(OpARM64BFI) 21793 v.AuxInt = bfc 21794 v.AddArg(y) 21795 v.AddArg(x) 21796 return true 21797 } 21798 return false 21799 } 21800 func rewriteValueARM64_OpARM64OR_20(v *Value) bool { 21801 b := v.Block 21802 _ = b 21803 // match: (OR (ANDconst [ac] y) (UBFIZ [bfc] x)) 21804 // cond: ac == ^((1<<uint(getARM64BFwidth(bfc))-1) << uint(getARM64BFlsb(bfc))) 21805 // result: (BFI [bfc] y x) 21806 for { 21807 _ = v.Args[1] 21808 v_0 := v.Args[0] 21809 if v_0.Op != OpARM64ANDconst { 21810 break 21811 } 21812 ac := v_0.AuxInt 21813 y := v_0.Args[0] 21814 v_1 := v.Args[1] 21815 if v_1.Op != OpARM64UBFIZ { 21816 break 21817 } 21818 bfc := v_1.AuxInt 21819 x := v_1.Args[0] 21820 if !(ac == ^((1<<uint(getARM64BFwidth(bfc)) - 1) << uint(getARM64BFlsb(bfc)))) { 21821 break 21822 } 21823 v.reset(OpARM64BFI) 21824 v.AuxInt = bfc 21825 v.AddArg(y) 21826 v.AddArg(x) 21827 return true 21828 } 21829 // match: (OR (UBFX [bfc] x) (ANDconst [ac] y)) 21830 // cond: ac == ^(1<<uint(getARM64BFwidth(bfc))-1) 21831 // result: (BFXIL [bfc] y x) 21832 for { 21833 _ = v.Args[1] 21834 v_0 := v.Args[0] 21835 if v_0.Op != OpARM64UBFX { 21836 break 21837 } 21838 bfc := v_0.AuxInt 21839 x := v_0.Args[0] 21840 v_1 := v.Args[1] 21841 if v_1.Op != OpARM64ANDconst { 21842 break 21843 } 21844 ac := v_1.AuxInt 21845 y := v_1.Args[0] 21846 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) { 21847 break 21848 } 21849 v.reset(OpARM64BFXIL) 21850 v.AuxInt = bfc 21851 v.AddArg(y) 21852 v.AddArg(x) 21853 return true 21854 } 21855 // match: (OR (ANDconst [ac] y) (UBFX [bfc] x)) 21856 // cond: ac == ^(1<<uint(getARM64BFwidth(bfc))-1) 21857 // result: (BFXIL [bfc] y x) 21858 for { 21859 _ = v.Args[1] 21860 v_0 := v.Args[0] 21861 if v_0.Op != OpARM64ANDconst { 21862 break 21863 } 21864 ac := v_0.AuxInt 21865 y := v_0.Args[0] 21866 v_1 := v.Args[1] 21867 if v_1.Op != OpARM64UBFX { 21868 break 21869 } 21870 bfc := v_1.AuxInt 21871 x := v_1.Args[0] 21872 if !(ac == ^(1<<uint(getARM64BFwidth(bfc)) - 1)) { 21873 break 21874 } 21875 v.reset(OpARM64BFXIL) 21876 v.AuxInt = bfc 21877 v.AddArg(y) 21878 v.AddArg(x) 21879 return true 21880 } 21881 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem))) 21882 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 21883 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 21884 for { 21885 t := v.Type 21886 _ = v.Args[1] 21887 o0 := v.Args[0] 21888 if o0.Op != OpARM64ORshiftLL { 21889 break 21890 } 21891 if o0.AuxInt != 8 { 21892 break 21893 } 21894 _ = o0.Args[1] 21895 o1 := o0.Args[0] 21896 if o1.Op != OpARM64ORshiftLL { 21897 break 21898 } 21899 if o1.AuxInt != 16 { 21900 break 21901 } 21902 _ = o1.Args[1] 21903 s0 := o1.Args[0] 21904 if s0.Op != OpARM64SLLconst { 21905 break 21906 } 21907 if s0.AuxInt != 24 { 21908 break 21909 } 21910 y0 := s0.Args[0] 21911 if y0.Op != OpARM64MOVDnop { 21912 break 21913 } 21914 x0 := y0.Args[0] 21915 if x0.Op != OpARM64MOVBUload { 21916 break 21917 } 21918 i3 := x0.AuxInt 21919 s := x0.Aux 21920 _ = x0.Args[1] 21921 p := x0.Args[0] 21922 mem := x0.Args[1] 21923 y1 := o1.Args[1] 21924 if y1.Op != OpARM64MOVDnop { 21925 break 21926 } 21927 x1 := y1.Args[0] 21928 if x1.Op != OpARM64MOVBUload { 21929 break 21930 } 21931 i2 := x1.AuxInt 21932 if x1.Aux != s { 21933 break 21934 } 21935 _ = x1.Args[1] 21936 if p != x1.Args[0] { 21937 break 21938 } 21939 if mem != x1.Args[1] { 21940 break 21941 } 21942 y2 := o0.Args[1] 21943 if y2.Op != OpARM64MOVDnop { 21944 break 21945 } 21946 x2 := y2.Args[0] 21947 if x2.Op != OpARM64MOVBUload { 21948 break 21949 } 21950 i1 := x2.AuxInt 21951 if x2.Aux != s { 21952 break 21953 } 21954 _ = x2.Args[1] 21955 if p != x2.Args[0] { 21956 break 21957 } 21958 if mem != x2.Args[1] { 21959 break 21960 } 21961 y3 := v.Args[1] 21962 if y3.Op != OpARM64MOVDnop { 21963 break 21964 } 21965 x3 := y3.Args[0] 21966 if x3.Op != OpARM64MOVBUload { 21967 break 21968 } 21969 i0 := x3.AuxInt 21970 if x3.Aux != s { 21971 break 21972 } 21973 _ = x3.Args[1] 21974 if p != x3.Args[0] { 21975 break 21976 } 21977 if mem != x3.Args[1] { 21978 break 21979 } 21980 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 21981 break 21982 } 21983 b = mergePoint(b, x0, x1, x2, x3) 21984 v0 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t) 21985 v.reset(OpCopy) 21986 v.AddArg(v0) 21987 v0.Aux = s 21988 v1 := b.NewValue0(x3.Pos, OpOffPtr, p.Type) 21989 v1.AuxInt = i0 21990 v1.AddArg(p) 21991 v0.AddArg(v1) 21992 v0.AddArg(mem) 21993 return true 21994 } 21995 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i1] {s} p mem)))) 21996 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 21997 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 21998 for { 21999 t := v.Type 22000 _ = v.Args[1] 22001 y3 := v.Args[0] 22002 if y3.Op != OpARM64MOVDnop { 22003 break 22004 } 22005 x3 := y3.Args[0] 22006 if x3.Op != OpARM64MOVBUload { 22007 break 22008 } 22009 i0 := x3.AuxInt 22010 s := x3.Aux 22011 _ = x3.Args[1] 22012 p := x3.Args[0] 22013 mem := x3.Args[1] 22014 o0 := v.Args[1] 22015 if o0.Op != OpARM64ORshiftLL { 22016 break 22017 } 22018 if o0.AuxInt != 8 { 22019 break 22020 } 22021 _ = o0.Args[1] 22022 o1 := o0.Args[0] 22023 if o1.Op != OpARM64ORshiftLL { 22024 break 22025 } 22026 if o1.AuxInt != 16 { 22027 break 22028 } 22029 _ = o1.Args[1] 22030 s0 := o1.Args[0] 22031 if s0.Op != OpARM64SLLconst { 22032 break 22033 } 22034 if s0.AuxInt != 24 { 22035 break 22036 } 22037 y0 := s0.Args[0] 22038 if y0.Op != OpARM64MOVDnop { 22039 break 22040 } 22041 x0 := y0.Args[0] 22042 if x0.Op != OpARM64MOVBUload { 22043 break 22044 } 22045 i3 := x0.AuxInt 22046 if x0.Aux != s { 22047 break 22048 } 22049 _ = x0.Args[1] 22050 if p != x0.Args[0] { 22051 break 22052 } 22053 if mem != x0.Args[1] { 22054 break 22055 } 22056 y1 := o1.Args[1] 22057 if y1.Op != OpARM64MOVDnop { 22058 break 22059 } 22060 x1 := y1.Args[0] 22061 if x1.Op != OpARM64MOVBUload { 22062 break 22063 } 22064 i2 := x1.AuxInt 22065 if x1.Aux != s { 22066 break 22067 } 22068 _ = x1.Args[1] 22069 if p != x1.Args[0] { 22070 break 22071 } 22072 if mem != x1.Args[1] { 22073 break 22074 } 22075 y2 := o0.Args[1] 22076 if y2.Op != OpARM64MOVDnop { 22077 break 22078 } 22079 x2 := y2.Args[0] 22080 if x2.Op != OpARM64MOVBUload { 22081 break 22082 } 22083 i1 := x2.AuxInt 22084 if x2.Aux != s { 22085 break 22086 } 22087 _ = x2.Args[1] 22088 if p != x2.Args[0] { 22089 break 22090 } 22091 if mem != x2.Args[1] { 22092 break 22093 } 22094 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22095 break 22096 } 22097 b = mergePoint(b, x0, x1, x2, x3) 22098 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 22099 v.reset(OpCopy) 22100 v.AddArg(v0) 22101 v0.Aux = s 22102 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 22103 v1.AuxInt = i0 22104 v1.AddArg(p) 22105 v0.AddArg(v1) 22106 v0.AddArg(mem) 22107 return true 22108 } 22109 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem))) 22110 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22111 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem) 22112 for { 22113 t := v.Type 22114 _ = v.Args[1] 22115 o0 := v.Args[0] 22116 if o0.Op != OpARM64ORshiftLL { 22117 break 22118 } 22119 if o0.AuxInt != 8 { 22120 break 22121 } 22122 _ = o0.Args[1] 22123 o1 := o0.Args[0] 22124 if o1.Op != OpARM64ORshiftLL { 22125 break 22126 } 22127 if o1.AuxInt != 16 { 22128 break 22129 } 22130 _ = o1.Args[1] 22131 s0 := o1.Args[0] 22132 if s0.Op != OpARM64SLLconst { 22133 break 22134 } 22135 if s0.AuxInt != 24 { 22136 break 22137 } 22138 y0 := s0.Args[0] 22139 if y0.Op != OpARM64MOVDnop { 22140 break 22141 } 22142 x0 := y0.Args[0] 22143 if x0.Op != OpARM64MOVBUload { 22144 break 22145 } 22146 if x0.AuxInt != 3 { 22147 break 22148 } 22149 s := x0.Aux 22150 _ = x0.Args[1] 22151 p := x0.Args[0] 22152 mem := x0.Args[1] 22153 y1 := o1.Args[1] 22154 if y1.Op != OpARM64MOVDnop { 22155 break 22156 } 22157 x1 := y1.Args[0] 22158 if x1.Op != OpARM64MOVBUload { 22159 break 22160 } 22161 if x1.AuxInt != 2 { 22162 break 22163 } 22164 if x1.Aux != s { 22165 break 22166 } 22167 _ = x1.Args[1] 22168 if p != x1.Args[0] { 22169 break 22170 } 22171 if mem != x1.Args[1] { 22172 break 22173 } 22174 y2 := o0.Args[1] 22175 if y2.Op != OpARM64MOVDnop { 22176 break 22177 } 22178 x2 := y2.Args[0] 22179 if x2.Op != OpARM64MOVBUload { 22180 break 22181 } 22182 if x2.AuxInt != 1 { 22183 break 22184 } 22185 if x2.Aux != s { 22186 break 22187 } 22188 _ = x2.Args[1] 22189 p1 := x2.Args[0] 22190 if p1.Op != OpARM64ADD { 22191 break 22192 } 22193 _ = p1.Args[1] 22194 ptr1 := p1.Args[0] 22195 idx1 := p1.Args[1] 22196 if mem != x2.Args[1] { 22197 break 22198 } 22199 y3 := v.Args[1] 22200 if y3.Op != OpARM64MOVDnop { 22201 break 22202 } 22203 x3 := y3.Args[0] 22204 if x3.Op != OpARM64MOVBUloadidx { 22205 break 22206 } 22207 _ = x3.Args[2] 22208 ptr0 := x3.Args[0] 22209 idx0 := x3.Args[1] 22210 if mem != x3.Args[2] { 22211 break 22212 } 22213 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22214 break 22215 } 22216 b = mergePoint(b, x0, x1, x2, x3) 22217 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 22218 v.reset(OpCopy) 22219 v.AddArg(v0) 22220 v0.AddArg(ptr0) 22221 v0.AddArg(idx0) 22222 v0.AddArg(mem) 22223 return true 22224 } 22225 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr0 idx0 mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [3] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))) 22226 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22227 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr0 idx0 mem) 22228 for { 22229 t := v.Type 22230 _ = v.Args[1] 22231 y3 := v.Args[0] 22232 if y3.Op != OpARM64MOVDnop { 22233 break 22234 } 22235 x3 := y3.Args[0] 22236 if x3.Op != OpARM64MOVBUloadidx { 22237 break 22238 } 22239 _ = x3.Args[2] 22240 ptr0 := x3.Args[0] 22241 idx0 := x3.Args[1] 22242 mem := x3.Args[2] 22243 o0 := v.Args[1] 22244 if o0.Op != OpARM64ORshiftLL { 22245 break 22246 } 22247 if o0.AuxInt != 8 { 22248 break 22249 } 22250 _ = o0.Args[1] 22251 o1 := o0.Args[0] 22252 if o1.Op != OpARM64ORshiftLL { 22253 break 22254 } 22255 if o1.AuxInt != 16 { 22256 break 22257 } 22258 _ = o1.Args[1] 22259 s0 := o1.Args[0] 22260 if s0.Op != OpARM64SLLconst { 22261 break 22262 } 22263 if s0.AuxInt != 24 { 22264 break 22265 } 22266 y0 := s0.Args[0] 22267 if y0.Op != OpARM64MOVDnop { 22268 break 22269 } 22270 x0 := y0.Args[0] 22271 if x0.Op != OpARM64MOVBUload { 22272 break 22273 } 22274 if x0.AuxInt != 3 { 22275 break 22276 } 22277 s := x0.Aux 22278 _ = x0.Args[1] 22279 p := x0.Args[0] 22280 if mem != x0.Args[1] { 22281 break 22282 } 22283 y1 := o1.Args[1] 22284 if y1.Op != OpARM64MOVDnop { 22285 break 22286 } 22287 x1 := y1.Args[0] 22288 if x1.Op != OpARM64MOVBUload { 22289 break 22290 } 22291 if x1.AuxInt != 2 { 22292 break 22293 } 22294 if x1.Aux != s { 22295 break 22296 } 22297 _ = x1.Args[1] 22298 if p != x1.Args[0] { 22299 break 22300 } 22301 if mem != x1.Args[1] { 22302 break 22303 } 22304 y2 := o0.Args[1] 22305 if y2.Op != OpARM64MOVDnop { 22306 break 22307 } 22308 x2 := y2.Args[0] 22309 if x2.Op != OpARM64MOVBUload { 22310 break 22311 } 22312 if x2.AuxInt != 1 { 22313 break 22314 } 22315 if x2.Aux != s { 22316 break 22317 } 22318 _ = x2.Args[1] 22319 p1 := x2.Args[0] 22320 if p1.Op != OpARM64ADD { 22321 break 22322 } 22323 _ = p1.Args[1] 22324 ptr1 := p1.Args[0] 22325 idx1 := p1.Args[1] 22326 if mem != x2.Args[1] { 22327 break 22328 } 22329 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22330 break 22331 } 22332 b = mergePoint(b, x0, x1, x2, x3) 22333 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 22334 v.reset(OpCopy) 22335 v.AddArg(v0) 22336 v0.AddArg(ptr0) 22337 v0.AddArg(idx0) 22338 v0.AddArg(mem) 22339 return true 22340 } 22341 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem))) 22342 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22343 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem) 22344 for { 22345 t := v.Type 22346 _ = v.Args[1] 22347 o0 := v.Args[0] 22348 if o0.Op != OpARM64ORshiftLL { 22349 break 22350 } 22351 if o0.AuxInt != 8 { 22352 break 22353 } 22354 _ = o0.Args[1] 22355 o1 := o0.Args[0] 22356 if o1.Op != OpARM64ORshiftLL { 22357 break 22358 } 22359 if o1.AuxInt != 16 { 22360 break 22361 } 22362 _ = o1.Args[1] 22363 s0 := o1.Args[0] 22364 if s0.Op != OpARM64SLLconst { 22365 break 22366 } 22367 if s0.AuxInt != 24 { 22368 break 22369 } 22370 y0 := s0.Args[0] 22371 if y0.Op != OpARM64MOVDnop { 22372 break 22373 } 22374 x0 := y0.Args[0] 22375 if x0.Op != OpARM64MOVBUloadidx { 22376 break 22377 } 22378 _ = x0.Args[2] 22379 ptr := x0.Args[0] 22380 x0_1 := x0.Args[1] 22381 if x0_1.Op != OpARM64ADDconst { 22382 break 22383 } 22384 if x0_1.AuxInt != 3 { 22385 break 22386 } 22387 idx := x0_1.Args[0] 22388 mem := x0.Args[2] 22389 y1 := o1.Args[1] 22390 if y1.Op != OpARM64MOVDnop { 22391 break 22392 } 22393 x1 := y1.Args[0] 22394 if x1.Op != OpARM64MOVBUloadidx { 22395 break 22396 } 22397 _ = x1.Args[2] 22398 if ptr != x1.Args[0] { 22399 break 22400 } 22401 x1_1 := x1.Args[1] 22402 if x1_1.Op != OpARM64ADDconst { 22403 break 22404 } 22405 if x1_1.AuxInt != 2 { 22406 break 22407 } 22408 if idx != x1_1.Args[0] { 22409 break 22410 } 22411 if mem != x1.Args[2] { 22412 break 22413 } 22414 y2 := o0.Args[1] 22415 if y2.Op != OpARM64MOVDnop { 22416 break 22417 } 22418 x2 := y2.Args[0] 22419 if x2.Op != OpARM64MOVBUloadidx { 22420 break 22421 } 22422 _ = x2.Args[2] 22423 if ptr != x2.Args[0] { 22424 break 22425 } 22426 x2_1 := x2.Args[1] 22427 if x2_1.Op != OpARM64ADDconst { 22428 break 22429 } 22430 if x2_1.AuxInt != 1 { 22431 break 22432 } 22433 if idx != x2_1.Args[0] { 22434 break 22435 } 22436 if mem != x2.Args[2] { 22437 break 22438 } 22439 y3 := v.Args[1] 22440 if y3.Op != OpARM64MOVDnop { 22441 break 22442 } 22443 x3 := y3.Args[0] 22444 if x3.Op != OpARM64MOVBUloadidx { 22445 break 22446 } 22447 _ = x3.Args[2] 22448 if ptr != x3.Args[0] { 22449 break 22450 } 22451 if idx != x3.Args[1] { 22452 break 22453 } 22454 if mem != x3.Args[2] { 22455 break 22456 } 22457 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22458 break 22459 } 22460 b = mergePoint(b, x0, x1, x2, x3) 22461 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22462 v.reset(OpCopy) 22463 v.AddArg(v0) 22464 v0.AddArg(ptr) 22465 v0.AddArg(idx) 22466 v0.AddArg(mem) 22467 return true 22468 } 22469 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr idx mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))) 22470 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 22471 // result: @mergePoint(b,x0,x1,x2,x3) (MOVWUloadidx <t> ptr idx mem) 22472 for { 22473 t := v.Type 22474 _ = v.Args[1] 22475 y3 := v.Args[0] 22476 if y3.Op != OpARM64MOVDnop { 22477 break 22478 } 22479 x3 := y3.Args[0] 22480 if x3.Op != OpARM64MOVBUloadidx { 22481 break 22482 } 22483 _ = x3.Args[2] 22484 ptr := x3.Args[0] 22485 idx := x3.Args[1] 22486 mem := x3.Args[2] 22487 o0 := v.Args[1] 22488 if o0.Op != OpARM64ORshiftLL { 22489 break 22490 } 22491 if o0.AuxInt != 8 { 22492 break 22493 } 22494 _ = o0.Args[1] 22495 o1 := o0.Args[0] 22496 if o1.Op != OpARM64ORshiftLL { 22497 break 22498 } 22499 if o1.AuxInt != 16 { 22500 break 22501 } 22502 _ = o1.Args[1] 22503 s0 := o1.Args[0] 22504 if s0.Op != OpARM64SLLconst { 22505 break 22506 } 22507 if s0.AuxInt != 24 { 22508 break 22509 } 22510 y0 := s0.Args[0] 22511 if y0.Op != OpARM64MOVDnop { 22512 break 22513 } 22514 x0 := y0.Args[0] 22515 if x0.Op != OpARM64MOVBUloadidx { 22516 break 22517 } 22518 _ = x0.Args[2] 22519 if ptr != x0.Args[0] { 22520 break 22521 } 22522 x0_1 := x0.Args[1] 22523 if x0_1.Op != OpARM64ADDconst { 22524 break 22525 } 22526 if x0_1.AuxInt != 3 { 22527 break 22528 } 22529 if idx != x0_1.Args[0] { 22530 break 22531 } 22532 if mem != x0.Args[2] { 22533 break 22534 } 22535 y1 := o1.Args[1] 22536 if y1.Op != OpARM64MOVDnop { 22537 break 22538 } 22539 x1 := y1.Args[0] 22540 if x1.Op != OpARM64MOVBUloadidx { 22541 break 22542 } 22543 _ = x1.Args[2] 22544 if ptr != x1.Args[0] { 22545 break 22546 } 22547 x1_1 := x1.Args[1] 22548 if x1_1.Op != OpARM64ADDconst { 22549 break 22550 } 22551 if x1_1.AuxInt != 2 { 22552 break 22553 } 22554 if idx != x1_1.Args[0] { 22555 break 22556 } 22557 if mem != x1.Args[2] { 22558 break 22559 } 22560 y2 := o0.Args[1] 22561 if y2.Op != OpARM64MOVDnop { 22562 break 22563 } 22564 x2 := y2.Args[0] 22565 if x2.Op != OpARM64MOVBUloadidx { 22566 break 22567 } 22568 _ = x2.Args[2] 22569 if ptr != x2.Args[0] { 22570 break 22571 } 22572 x2_1 := x2.Args[1] 22573 if x2_1.Op != OpARM64ADDconst { 22574 break 22575 } 22576 if x2_1.AuxInt != 1 { 22577 break 22578 } 22579 if idx != x2_1.Args[0] { 22580 break 22581 } 22582 if mem != x2.Args[2] { 22583 break 22584 } 22585 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 22586 break 22587 } 22588 b = mergePoint(b, x0, x1, x2, x3) 22589 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 22590 v.reset(OpCopy) 22591 v.AddArg(v0) 22592 v0.AddArg(ptr) 22593 v0.AddArg(idx) 22594 v0.AddArg(mem) 22595 return true 22596 } 22597 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem))) 22598 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 22599 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22600 for { 22601 t := v.Type 22602 _ = v.Args[1] 22603 o0 := v.Args[0] 22604 if o0.Op != OpARM64ORshiftLL { 22605 break 22606 } 22607 if o0.AuxInt != 8 { 22608 break 22609 } 22610 _ = o0.Args[1] 22611 o1 := o0.Args[0] 22612 if o1.Op != OpARM64ORshiftLL { 22613 break 22614 } 22615 if o1.AuxInt != 16 { 22616 break 22617 } 22618 _ = o1.Args[1] 22619 o2 := o1.Args[0] 22620 if o2.Op != OpARM64ORshiftLL { 22621 break 22622 } 22623 if o2.AuxInt != 24 { 22624 break 22625 } 22626 _ = o2.Args[1] 22627 o3 := o2.Args[0] 22628 if o3.Op != OpARM64ORshiftLL { 22629 break 22630 } 22631 if o3.AuxInt != 32 { 22632 break 22633 } 22634 _ = o3.Args[1] 22635 o4 := o3.Args[0] 22636 if o4.Op != OpARM64ORshiftLL { 22637 break 22638 } 22639 if o4.AuxInt != 40 { 22640 break 22641 } 22642 _ = o4.Args[1] 22643 o5 := o4.Args[0] 22644 if o5.Op != OpARM64ORshiftLL { 22645 break 22646 } 22647 if o5.AuxInt != 48 { 22648 break 22649 } 22650 _ = o5.Args[1] 22651 s0 := o5.Args[0] 22652 if s0.Op != OpARM64SLLconst { 22653 break 22654 } 22655 if s0.AuxInt != 56 { 22656 break 22657 } 22658 y0 := s0.Args[0] 22659 if y0.Op != OpARM64MOVDnop { 22660 break 22661 } 22662 x0 := y0.Args[0] 22663 if x0.Op != OpARM64MOVBUload { 22664 break 22665 } 22666 i7 := x0.AuxInt 22667 s := x0.Aux 22668 _ = x0.Args[1] 22669 p := x0.Args[0] 22670 mem := x0.Args[1] 22671 y1 := o5.Args[1] 22672 if y1.Op != OpARM64MOVDnop { 22673 break 22674 } 22675 x1 := y1.Args[0] 22676 if x1.Op != OpARM64MOVBUload { 22677 break 22678 } 22679 i6 := x1.AuxInt 22680 if x1.Aux != s { 22681 break 22682 } 22683 _ = x1.Args[1] 22684 if p != x1.Args[0] { 22685 break 22686 } 22687 if mem != x1.Args[1] { 22688 break 22689 } 22690 y2 := o4.Args[1] 22691 if y2.Op != OpARM64MOVDnop { 22692 break 22693 } 22694 x2 := y2.Args[0] 22695 if x2.Op != OpARM64MOVBUload { 22696 break 22697 } 22698 i5 := x2.AuxInt 22699 if x2.Aux != s { 22700 break 22701 } 22702 _ = x2.Args[1] 22703 if p != x2.Args[0] { 22704 break 22705 } 22706 if mem != x2.Args[1] { 22707 break 22708 } 22709 y3 := o3.Args[1] 22710 if y3.Op != OpARM64MOVDnop { 22711 break 22712 } 22713 x3 := y3.Args[0] 22714 if x3.Op != OpARM64MOVBUload { 22715 break 22716 } 22717 i4 := x3.AuxInt 22718 if x3.Aux != s { 22719 break 22720 } 22721 _ = x3.Args[1] 22722 if p != x3.Args[0] { 22723 break 22724 } 22725 if mem != x3.Args[1] { 22726 break 22727 } 22728 y4 := o2.Args[1] 22729 if y4.Op != OpARM64MOVDnop { 22730 break 22731 } 22732 x4 := y4.Args[0] 22733 if x4.Op != OpARM64MOVBUload { 22734 break 22735 } 22736 i3 := x4.AuxInt 22737 if x4.Aux != s { 22738 break 22739 } 22740 _ = x4.Args[1] 22741 if p != x4.Args[0] { 22742 break 22743 } 22744 if mem != x4.Args[1] { 22745 break 22746 } 22747 y5 := o1.Args[1] 22748 if y5.Op != OpARM64MOVDnop { 22749 break 22750 } 22751 x5 := y5.Args[0] 22752 if x5.Op != OpARM64MOVBUload { 22753 break 22754 } 22755 i2 := x5.AuxInt 22756 if x5.Aux != s { 22757 break 22758 } 22759 _ = x5.Args[1] 22760 if p != x5.Args[0] { 22761 break 22762 } 22763 if mem != x5.Args[1] { 22764 break 22765 } 22766 y6 := o0.Args[1] 22767 if y6.Op != OpARM64MOVDnop { 22768 break 22769 } 22770 x6 := y6.Args[0] 22771 if x6.Op != OpARM64MOVBUload { 22772 break 22773 } 22774 i1 := x6.AuxInt 22775 if x6.Aux != s { 22776 break 22777 } 22778 _ = x6.Args[1] 22779 if p != x6.Args[0] { 22780 break 22781 } 22782 if mem != x6.Args[1] { 22783 break 22784 } 22785 y7 := v.Args[1] 22786 if y7.Op != OpARM64MOVDnop { 22787 break 22788 } 22789 x7 := y7.Args[0] 22790 if x7.Op != OpARM64MOVBUload { 22791 break 22792 } 22793 i0 := x7.AuxInt 22794 if x7.Aux != s { 22795 break 22796 } 22797 _ = x7.Args[1] 22798 if p != x7.Args[0] { 22799 break 22800 } 22801 if mem != x7.Args[1] { 22802 break 22803 } 22804 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 22805 break 22806 } 22807 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 22808 v0 := b.NewValue0(x7.Pos, OpARM64MOVDload, t) 22809 v.reset(OpCopy) 22810 v.AddArg(v0) 22811 v0.Aux = s 22812 v1 := b.NewValue0(x7.Pos, OpOffPtr, p.Type) 22813 v1.AuxInt = i0 22814 v1.AddArg(p) 22815 v0.AddArg(v1) 22816 v0.AddArg(mem) 22817 return true 22818 } 22819 return false 22820 } 22821 func rewriteValueARM64_OpARM64OR_30(v *Value) bool { 22822 b := v.Block 22823 _ = b 22824 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i0] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i1] {s} p mem)))) 22825 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 22826 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 22827 for { 22828 t := v.Type 22829 _ = v.Args[1] 22830 y7 := v.Args[0] 22831 if y7.Op != OpARM64MOVDnop { 22832 break 22833 } 22834 x7 := y7.Args[0] 22835 if x7.Op != OpARM64MOVBUload { 22836 break 22837 } 22838 i0 := x7.AuxInt 22839 s := x7.Aux 22840 _ = x7.Args[1] 22841 p := x7.Args[0] 22842 mem := x7.Args[1] 22843 o0 := v.Args[1] 22844 if o0.Op != OpARM64ORshiftLL { 22845 break 22846 } 22847 if o0.AuxInt != 8 { 22848 break 22849 } 22850 _ = o0.Args[1] 22851 o1 := o0.Args[0] 22852 if o1.Op != OpARM64ORshiftLL { 22853 break 22854 } 22855 if o1.AuxInt != 16 { 22856 break 22857 } 22858 _ = o1.Args[1] 22859 o2 := o1.Args[0] 22860 if o2.Op != OpARM64ORshiftLL { 22861 break 22862 } 22863 if o2.AuxInt != 24 { 22864 break 22865 } 22866 _ = o2.Args[1] 22867 o3 := o2.Args[0] 22868 if o3.Op != OpARM64ORshiftLL { 22869 break 22870 } 22871 if o3.AuxInt != 32 { 22872 break 22873 } 22874 _ = o3.Args[1] 22875 o4 := o3.Args[0] 22876 if o4.Op != OpARM64ORshiftLL { 22877 break 22878 } 22879 if o4.AuxInt != 40 { 22880 break 22881 } 22882 _ = o4.Args[1] 22883 o5 := o4.Args[0] 22884 if o5.Op != OpARM64ORshiftLL { 22885 break 22886 } 22887 if o5.AuxInt != 48 { 22888 break 22889 } 22890 _ = o5.Args[1] 22891 s0 := o5.Args[0] 22892 if s0.Op != OpARM64SLLconst { 22893 break 22894 } 22895 if s0.AuxInt != 56 { 22896 break 22897 } 22898 y0 := s0.Args[0] 22899 if y0.Op != OpARM64MOVDnop { 22900 break 22901 } 22902 x0 := y0.Args[0] 22903 if x0.Op != OpARM64MOVBUload { 22904 break 22905 } 22906 i7 := x0.AuxInt 22907 if x0.Aux != s { 22908 break 22909 } 22910 _ = x0.Args[1] 22911 if p != x0.Args[0] { 22912 break 22913 } 22914 if mem != x0.Args[1] { 22915 break 22916 } 22917 y1 := o5.Args[1] 22918 if y1.Op != OpARM64MOVDnop { 22919 break 22920 } 22921 x1 := y1.Args[0] 22922 if x1.Op != OpARM64MOVBUload { 22923 break 22924 } 22925 i6 := x1.AuxInt 22926 if x1.Aux != s { 22927 break 22928 } 22929 _ = x1.Args[1] 22930 if p != x1.Args[0] { 22931 break 22932 } 22933 if mem != x1.Args[1] { 22934 break 22935 } 22936 y2 := o4.Args[1] 22937 if y2.Op != OpARM64MOVDnop { 22938 break 22939 } 22940 x2 := y2.Args[0] 22941 if x2.Op != OpARM64MOVBUload { 22942 break 22943 } 22944 i5 := x2.AuxInt 22945 if x2.Aux != s { 22946 break 22947 } 22948 _ = x2.Args[1] 22949 if p != x2.Args[0] { 22950 break 22951 } 22952 if mem != x2.Args[1] { 22953 break 22954 } 22955 y3 := o3.Args[1] 22956 if y3.Op != OpARM64MOVDnop { 22957 break 22958 } 22959 x3 := y3.Args[0] 22960 if x3.Op != OpARM64MOVBUload { 22961 break 22962 } 22963 i4 := x3.AuxInt 22964 if x3.Aux != s { 22965 break 22966 } 22967 _ = x3.Args[1] 22968 if p != x3.Args[0] { 22969 break 22970 } 22971 if mem != x3.Args[1] { 22972 break 22973 } 22974 y4 := o2.Args[1] 22975 if y4.Op != OpARM64MOVDnop { 22976 break 22977 } 22978 x4 := y4.Args[0] 22979 if x4.Op != OpARM64MOVBUload { 22980 break 22981 } 22982 i3 := x4.AuxInt 22983 if x4.Aux != s { 22984 break 22985 } 22986 _ = x4.Args[1] 22987 if p != x4.Args[0] { 22988 break 22989 } 22990 if mem != x4.Args[1] { 22991 break 22992 } 22993 y5 := o1.Args[1] 22994 if y5.Op != OpARM64MOVDnop { 22995 break 22996 } 22997 x5 := y5.Args[0] 22998 if x5.Op != OpARM64MOVBUload { 22999 break 23000 } 23001 i2 := x5.AuxInt 23002 if x5.Aux != s { 23003 break 23004 } 23005 _ = x5.Args[1] 23006 if p != x5.Args[0] { 23007 break 23008 } 23009 if mem != x5.Args[1] { 23010 break 23011 } 23012 y6 := o0.Args[1] 23013 if y6.Op != OpARM64MOVDnop { 23014 break 23015 } 23016 x6 := y6.Args[0] 23017 if x6.Op != OpARM64MOVBUload { 23018 break 23019 } 23020 i1 := x6.AuxInt 23021 if x6.Aux != s { 23022 break 23023 } 23024 _ = x6.Args[1] 23025 if p != x6.Args[0] { 23026 break 23027 } 23028 if mem != x6.Args[1] { 23029 break 23030 } 23031 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23032 break 23033 } 23034 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23035 v0 := b.NewValue0(x6.Pos, OpARM64MOVDload, t) 23036 v.reset(OpCopy) 23037 v.AddArg(v0) 23038 v0.Aux = s 23039 v1 := b.NewValue0(x6.Pos, OpOffPtr, p.Type) 23040 v1.AuxInt = i0 23041 v1.AddArg(p) 23042 v0.AddArg(v1) 23043 v0.AddArg(mem) 23044 return true 23045 } 23046 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem))) 23047 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23048 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem) 23049 for { 23050 t := v.Type 23051 _ = v.Args[1] 23052 o0 := v.Args[0] 23053 if o0.Op != OpARM64ORshiftLL { 23054 break 23055 } 23056 if o0.AuxInt != 8 { 23057 break 23058 } 23059 _ = o0.Args[1] 23060 o1 := o0.Args[0] 23061 if o1.Op != OpARM64ORshiftLL { 23062 break 23063 } 23064 if o1.AuxInt != 16 { 23065 break 23066 } 23067 _ = o1.Args[1] 23068 o2 := o1.Args[0] 23069 if o2.Op != OpARM64ORshiftLL { 23070 break 23071 } 23072 if o2.AuxInt != 24 { 23073 break 23074 } 23075 _ = o2.Args[1] 23076 o3 := o2.Args[0] 23077 if o3.Op != OpARM64ORshiftLL { 23078 break 23079 } 23080 if o3.AuxInt != 32 { 23081 break 23082 } 23083 _ = o3.Args[1] 23084 o4 := o3.Args[0] 23085 if o4.Op != OpARM64ORshiftLL { 23086 break 23087 } 23088 if o4.AuxInt != 40 { 23089 break 23090 } 23091 _ = o4.Args[1] 23092 o5 := o4.Args[0] 23093 if o5.Op != OpARM64ORshiftLL { 23094 break 23095 } 23096 if o5.AuxInt != 48 { 23097 break 23098 } 23099 _ = o5.Args[1] 23100 s0 := o5.Args[0] 23101 if s0.Op != OpARM64SLLconst { 23102 break 23103 } 23104 if s0.AuxInt != 56 { 23105 break 23106 } 23107 y0 := s0.Args[0] 23108 if y0.Op != OpARM64MOVDnop { 23109 break 23110 } 23111 x0 := y0.Args[0] 23112 if x0.Op != OpARM64MOVBUload { 23113 break 23114 } 23115 if x0.AuxInt != 7 { 23116 break 23117 } 23118 s := x0.Aux 23119 _ = x0.Args[1] 23120 p := x0.Args[0] 23121 mem := x0.Args[1] 23122 y1 := o5.Args[1] 23123 if y1.Op != OpARM64MOVDnop { 23124 break 23125 } 23126 x1 := y1.Args[0] 23127 if x1.Op != OpARM64MOVBUload { 23128 break 23129 } 23130 if x1.AuxInt != 6 { 23131 break 23132 } 23133 if x1.Aux != s { 23134 break 23135 } 23136 _ = x1.Args[1] 23137 if p != x1.Args[0] { 23138 break 23139 } 23140 if mem != x1.Args[1] { 23141 break 23142 } 23143 y2 := o4.Args[1] 23144 if y2.Op != OpARM64MOVDnop { 23145 break 23146 } 23147 x2 := y2.Args[0] 23148 if x2.Op != OpARM64MOVBUload { 23149 break 23150 } 23151 if x2.AuxInt != 5 { 23152 break 23153 } 23154 if x2.Aux != s { 23155 break 23156 } 23157 _ = x2.Args[1] 23158 if p != x2.Args[0] { 23159 break 23160 } 23161 if mem != x2.Args[1] { 23162 break 23163 } 23164 y3 := o3.Args[1] 23165 if y3.Op != OpARM64MOVDnop { 23166 break 23167 } 23168 x3 := y3.Args[0] 23169 if x3.Op != OpARM64MOVBUload { 23170 break 23171 } 23172 if x3.AuxInt != 4 { 23173 break 23174 } 23175 if x3.Aux != s { 23176 break 23177 } 23178 _ = x3.Args[1] 23179 if p != x3.Args[0] { 23180 break 23181 } 23182 if mem != x3.Args[1] { 23183 break 23184 } 23185 y4 := o2.Args[1] 23186 if y4.Op != OpARM64MOVDnop { 23187 break 23188 } 23189 x4 := y4.Args[0] 23190 if x4.Op != OpARM64MOVBUload { 23191 break 23192 } 23193 if x4.AuxInt != 3 { 23194 break 23195 } 23196 if x4.Aux != s { 23197 break 23198 } 23199 _ = x4.Args[1] 23200 if p != x4.Args[0] { 23201 break 23202 } 23203 if mem != x4.Args[1] { 23204 break 23205 } 23206 y5 := o1.Args[1] 23207 if y5.Op != OpARM64MOVDnop { 23208 break 23209 } 23210 x5 := y5.Args[0] 23211 if x5.Op != OpARM64MOVBUload { 23212 break 23213 } 23214 if x5.AuxInt != 2 { 23215 break 23216 } 23217 if x5.Aux != s { 23218 break 23219 } 23220 _ = x5.Args[1] 23221 if p != x5.Args[0] { 23222 break 23223 } 23224 if mem != x5.Args[1] { 23225 break 23226 } 23227 y6 := o0.Args[1] 23228 if y6.Op != OpARM64MOVDnop { 23229 break 23230 } 23231 x6 := y6.Args[0] 23232 if x6.Op != OpARM64MOVBUload { 23233 break 23234 } 23235 if x6.AuxInt != 1 { 23236 break 23237 } 23238 if x6.Aux != s { 23239 break 23240 } 23241 _ = x6.Args[1] 23242 p1 := x6.Args[0] 23243 if p1.Op != OpARM64ADD { 23244 break 23245 } 23246 _ = p1.Args[1] 23247 ptr1 := p1.Args[0] 23248 idx1 := p1.Args[1] 23249 if mem != x6.Args[1] { 23250 break 23251 } 23252 y7 := v.Args[1] 23253 if y7.Op != OpARM64MOVDnop { 23254 break 23255 } 23256 x7 := y7.Args[0] 23257 if x7.Op != OpARM64MOVBUloadidx { 23258 break 23259 } 23260 _ = x7.Args[2] 23261 ptr0 := x7.Args[0] 23262 idx0 := x7.Args[1] 23263 if mem != x7.Args[2] { 23264 break 23265 } 23266 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23267 break 23268 } 23269 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23270 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t) 23271 v.reset(OpCopy) 23272 v.AddArg(v0) 23273 v0.AddArg(ptr0) 23274 v0.AddArg(idx0) 23275 v0.AddArg(mem) 23276 return true 23277 } 23278 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr0 idx0 mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [7] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [6] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [4] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [3] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [2] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)))) 23279 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23280 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr0 idx0 mem) 23281 for { 23282 t := v.Type 23283 _ = v.Args[1] 23284 y7 := v.Args[0] 23285 if y7.Op != OpARM64MOVDnop { 23286 break 23287 } 23288 x7 := y7.Args[0] 23289 if x7.Op != OpARM64MOVBUloadidx { 23290 break 23291 } 23292 _ = x7.Args[2] 23293 ptr0 := x7.Args[0] 23294 idx0 := x7.Args[1] 23295 mem := x7.Args[2] 23296 o0 := v.Args[1] 23297 if o0.Op != OpARM64ORshiftLL { 23298 break 23299 } 23300 if o0.AuxInt != 8 { 23301 break 23302 } 23303 _ = o0.Args[1] 23304 o1 := o0.Args[0] 23305 if o1.Op != OpARM64ORshiftLL { 23306 break 23307 } 23308 if o1.AuxInt != 16 { 23309 break 23310 } 23311 _ = o1.Args[1] 23312 o2 := o1.Args[0] 23313 if o2.Op != OpARM64ORshiftLL { 23314 break 23315 } 23316 if o2.AuxInt != 24 { 23317 break 23318 } 23319 _ = o2.Args[1] 23320 o3 := o2.Args[0] 23321 if o3.Op != OpARM64ORshiftLL { 23322 break 23323 } 23324 if o3.AuxInt != 32 { 23325 break 23326 } 23327 _ = o3.Args[1] 23328 o4 := o3.Args[0] 23329 if o4.Op != OpARM64ORshiftLL { 23330 break 23331 } 23332 if o4.AuxInt != 40 { 23333 break 23334 } 23335 _ = o4.Args[1] 23336 o5 := o4.Args[0] 23337 if o5.Op != OpARM64ORshiftLL { 23338 break 23339 } 23340 if o5.AuxInt != 48 { 23341 break 23342 } 23343 _ = o5.Args[1] 23344 s0 := o5.Args[0] 23345 if s0.Op != OpARM64SLLconst { 23346 break 23347 } 23348 if s0.AuxInt != 56 { 23349 break 23350 } 23351 y0 := s0.Args[0] 23352 if y0.Op != OpARM64MOVDnop { 23353 break 23354 } 23355 x0 := y0.Args[0] 23356 if x0.Op != OpARM64MOVBUload { 23357 break 23358 } 23359 if x0.AuxInt != 7 { 23360 break 23361 } 23362 s := x0.Aux 23363 _ = x0.Args[1] 23364 p := x0.Args[0] 23365 if mem != x0.Args[1] { 23366 break 23367 } 23368 y1 := o5.Args[1] 23369 if y1.Op != OpARM64MOVDnop { 23370 break 23371 } 23372 x1 := y1.Args[0] 23373 if x1.Op != OpARM64MOVBUload { 23374 break 23375 } 23376 if x1.AuxInt != 6 { 23377 break 23378 } 23379 if x1.Aux != s { 23380 break 23381 } 23382 _ = x1.Args[1] 23383 if p != x1.Args[0] { 23384 break 23385 } 23386 if mem != x1.Args[1] { 23387 break 23388 } 23389 y2 := o4.Args[1] 23390 if y2.Op != OpARM64MOVDnop { 23391 break 23392 } 23393 x2 := y2.Args[0] 23394 if x2.Op != OpARM64MOVBUload { 23395 break 23396 } 23397 if x2.AuxInt != 5 { 23398 break 23399 } 23400 if x2.Aux != s { 23401 break 23402 } 23403 _ = x2.Args[1] 23404 if p != x2.Args[0] { 23405 break 23406 } 23407 if mem != x2.Args[1] { 23408 break 23409 } 23410 y3 := o3.Args[1] 23411 if y3.Op != OpARM64MOVDnop { 23412 break 23413 } 23414 x3 := y3.Args[0] 23415 if x3.Op != OpARM64MOVBUload { 23416 break 23417 } 23418 if x3.AuxInt != 4 { 23419 break 23420 } 23421 if x3.Aux != s { 23422 break 23423 } 23424 _ = x3.Args[1] 23425 if p != x3.Args[0] { 23426 break 23427 } 23428 if mem != x3.Args[1] { 23429 break 23430 } 23431 y4 := o2.Args[1] 23432 if y4.Op != OpARM64MOVDnop { 23433 break 23434 } 23435 x4 := y4.Args[0] 23436 if x4.Op != OpARM64MOVBUload { 23437 break 23438 } 23439 if x4.AuxInt != 3 { 23440 break 23441 } 23442 if x4.Aux != s { 23443 break 23444 } 23445 _ = x4.Args[1] 23446 if p != x4.Args[0] { 23447 break 23448 } 23449 if mem != x4.Args[1] { 23450 break 23451 } 23452 y5 := o1.Args[1] 23453 if y5.Op != OpARM64MOVDnop { 23454 break 23455 } 23456 x5 := y5.Args[0] 23457 if x5.Op != OpARM64MOVBUload { 23458 break 23459 } 23460 if x5.AuxInt != 2 { 23461 break 23462 } 23463 if x5.Aux != s { 23464 break 23465 } 23466 _ = x5.Args[1] 23467 if p != x5.Args[0] { 23468 break 23469 } 23470 if mem != x5.Args[1] { 23471 break 23472 } 23473 y6 := o0.Args[1] 23474 if y6.Op != OpARM64MOVDnop { 23475 break 23476 } 23477 x6 := y6.Args[0] 23478 if x6.Op != OpARM64MOVBUload { 23479 break 23480 } 23481 if x6.AuxInt != 1 { 23482 break 23483 } 23484 if x6.Aux != s { 23485 break 23486 } 23487 _ = x6.Args[1] 23488 p1 := x6.Args[0] 23489 if p1.Op != OpARM64ADD { 23490 break 23491 } 23492 _ = p1.Args[1] 23493 ptr1 := p1.Args[0] 23494 idx1 := p1.Args[1] 23495 if mem != x6.Args[1] { 23496 break 23497 } 23498 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23499 break 23500 } 23501 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23502 v0 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t) 23503 v.reset(OpCopy) 23504 v.AddArg(v0) 23505 v0.AddArg(ptr0) 23506 v0.AddArg(idx0) 23507 v0.AddArg(mem) 23508 return true 23509 } 23510 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem))) 23511 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23512 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem) 23513 for { 23514 t := v.Type 23515 _ = v.Args[1] 23516 o0 := v.Args[0] 23517 if o0.Op != OpARM64ORshiftLL { 23518 break 23519 } 23520 if o0.AuxInt != 8 { 23521 break 23522 } 23523 _ = o0.Args[1] 23524 o1 := o0.Args[0] 23525 if o1.Op != OpARM64ORshiftLL { 23526 break 23527 } 23528 if o1.AuxInt != 16 { 23529 break 23530 } 23531 _ = o1.Args[1] 23532 o2 := o1.Args[0] 23533 if o2.Op != OpARM64ORshiftLL { 23534 break 23535 } 23536 if o2.AuxInt != 24 { 23537 break 23538 } 23539 _ = o2.Args[1] 23540 o3 := o2.Args[0] 23541 if o3.Op != OpARM64ORshiftLL { 23542 break 23543 } 23544 if o3.AuxInt != 32 { 23545 break 23546 } 23547 _ = o3.Args[1] 23548 o4 := o3.Args[0] 23549 if o4.Op != OpARM64ORshiftLL { 23550 break 23551 } 23552 if o4.AuxInt != 40 { 23553 break 23554 } 23555 _ = o4.Args[1] 23556 o5 := o4.Args[0] 23557 if o5.Op != OpARM64ORshiftLL { 23558 break 23559 } 23560 if o5.AuxInt != 48 { 23561 break 23562 } 23563 _ = o5.Args[1] 23564 s0 := o5.Args[0] 23565 if s0.Op != OpARM64SLLconst { 23566 break 23567 } 23568 if s0.AuxInt != 56 { 23569 break 23570 } 23571 y0 := s0.Args[0] 23572 if y0.Op != OpARM64MOVDnop { 23573 break 23574 } 23575 x0 := y0.Args[0] 23576 if x0.Op != OpARM64MOVBUloadidx { 23577 break 23578 } 23579 _ = x0.Args[2] 23580 ptr := x0.Args[0] 23581 x0_1 := x0.Args[1] 23582 if x0_1.Op != OpARM64ADDconst { 23583 break 23584 } 23585 if x0_1.AuxInt != 7 { 23586 break 23587 } 23588 idx := x0_1.Args[0] 23589 mem := x0.Args[2] 23590 y1 := o5.Args[1] 23591 if y1.Op != OpARM64MOVDnop { 23592 break 23593 } 23594 x1 := y1.Args[0] 23595 if x1.Op != OpARM64MOVBUloadidx { 23596 break 23597 } 23598 _ = x1.Args[2] 23599 if ptr != x1.Args[0] { 23600 break 23601 } 23602 x1_1 := x1.Args[1] 23603 if x1_1.Op != OpARM64ADDconst { 23604 break 23605 } 23606 if x1_1.AuxInt != 6 { 23607 break 23608 } 23609 if idx != x1_1.Args[0] { 23610 break 23611 } 23612 if mem != x1.Args[2] { 23613 break 23614 } 23615 y2 := o4.Args[1] 23616 if y2.Op != OpARM64MOVDnop { 23617 break 23618 } 23619 x2 := y2.Args[0] 23620 if x2.Op != OpARM64MOVBUloadidx { 23621 break 23622 } 23623 _ = x2.Args[2] 23624 if ptr != x2.Args[0] { 23625 break 23626 } 23627 x2_1 := x2.Args[1] 23628 if x2_1.Op != OpARM64ADDconst { 23629 break 23630 } 23631 if x2_1.AuxInt != 5 { 23632 break 23633 } 23634 if idx != x2_1.Args[0] { 23635 break 23636 } 23637 if mem != x2.Args[2] { 23638 break 23639 } 23640 y3 := o3.Args[1] 23641 if y3.Op != OpARM64MOVDnop { 23642 break 23643 } 23644 x3 := y3.Args[0] 23645 if x3.Op != OpARM64MOVBUloadidx { 23646 break 23647 } 23648 _ = x3.Args[2] 23649 if ptr != x3.Args[0] { 23650 break 23651 } 23652 x3_1 := x3.Args[1] 23653 if x3_1.Op != OpARM64ADDconst { 23654 break 23655 } 23656 if x3_1.AuxInt != 4 { 23657 break 23658 } 23659 if idx != x3_1.Args[0] { 23660 break 23661 } 23662 if mem != x3.Args[2] { 23663 break 23664 } 23665 y4 := o2.Args[1] 23666 if y4.Op != OpARM64MOVDnop { 23667 break 23668 } 23669 x4 := y4.Args[0] 23670 if x4.Op != OpARM64MOVBUloadidx { 23671 break 23672 } 23673 _ = x4.Args[2] 23674 if ptr != x4.Args[0] { 23675 break 23676 } 23677 x4_1 := x4.Args[1] 23678 if x4_1.Op != OpARM64ADDconst { 23679 break 23680 } 23681 if x4_1.AuxInt != 3 { 23682 break 23683 } 23684 if idx != x4_1.Args[0] { 23685 break 23686 } 23687 if mem != x4.Args[2] { 23688 break 23689 } 23690 y5 := o1.Args[1] 23691 if y5.Op != OpARM64MOVDnop { 23692 break 23693 } 23694 x5 := y5.Args[0] 23695 if x5.Op != OpARM64MOVBUloadidx { 23696 break 23697 } 23698 _ = x5.Args[2] 23699 if ptr != x5.Args[0] { 23700 break 23701 } 23702 x5_1 := x5.Args[1] 23703 if x5_1.Op != OpARM64ADDconst { 23704 break 23705 } 23706 if x5_1.AuxInt != 2 { 23707 break 23708 } 23709 if idx != x5_1.Args[0] { 23710 break 23711 } 23712 if mem != x5.Args[2] { 23713 break 23714 } 23715 y6 := o0.Args[1] 23716 if y6.Op != OpARM64MOVDnop { 23717 break 23718 } 23719 x6 := y6.Args[0] 23720 if x6.Op != OpARM64MOVBUloadidx { 23721 break 23722 } 23723 _ = x6.Args[2] 23724 if ptr != x6.Args[0] { 23725 break 23726 } 23727 x6_1 := x6.Args[1] 23728 if x6_1.Op != OpARM64ADDconst { 23729 break 23730 } 23731 if x6_1.AuxInt != 1 { 23732 break 23733 } 23734 if idx != x6_1.Args[0] { 23735 break 23736 } 23737 if mem != x6.Args[2] { 23738 break 23739 } 23740 y7 := v.Args[1] 23741 if y7.Op != OpARM64MOVDnop { 23742 break 23743 } 23744 x7 := y7.Args[0] 23745 if x7.Op != OpARM64MOVBUloadidx { 23746 break 23747 } 23748 _ = x7.Args[2] 23749 if ptr != x7.Args[0] { 23750 break 23751 } 23752 if idx != x7.Args[1] { 23753 break 23754 } 23755 if mem != x7.Args[2] { 23756 break 23757 } 23758 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 23759 break 23760 } 23761 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 23762 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 23763 v.reset(OpCopy) 23764 v.AddArg(v0) 23765 v0.AddArg(ptr) 23766 v0.AddArg(idx) 23767 v0.AddArg(mem) 23768 return true 23769 } 23770 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr idx mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [1] idx) mem)))) 23771 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 23772 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (MOVDloadidx <t> ptr idx mem) 23773 for { 23774 t := v.Type 23775 _ = v.Args[1] 23776 y7 := v.Args[0] 23777 if y7.Op != OpARM64MOVDnop { 23778 break 23779 } 23780 x7 := y7.Args[0] 23781 if x7.Op != OpARM64MOVBUloadidx { 23782 break 23783 } 23784 _ = x7.Args[2] 23785 ptr := x7.Args[0] 23786 idx := x7.Args[1] 23787 mem := x7.Args[2] 23788 o0 := v.Args[1] 23789 if o0.Op != OpARM64ORshiftLL { 23790 break 23791 } 23792 if o0.AuxInt != 8 { 23793 break 23794 } 23795 _ = o0.Args[1] 23796 o1 := o0.Args[0] 23797 if o1.Op != OpARM64ORshiftLL { 23798 break 23799 } 23800 if o1.AuxInt != 16 { 23801 break 23802 } 23803 _ = o1.Args[1] 23804 o2 := o1.Args[0] 23805 if o2.Op != OpARM64ORshiftLL { 23806 break 23807 } 23808 if o2.AuxInt != 24 { 23809 break 23810 } 23811 _ = o2.Args[1] 23812 o3 := o2.Args[0] 23813 if o3.Op != OpARM64ORshiftLL { 23814 break 23815 } 23816 if o3.AuxInt != 32 { 23817 break 23818 } 23819 _ = o3.Args[1] 23820 o4 := o3.Args[0] 23821 if o4.Op != OpARM64ORshiftLL { 23822 break 23823 } 23824 if o4.AuxInt != 40 { 23825 break 23826 } 23827 _ = o4.Args[1] 23828 o5 := o4.Args[0] 23829 if o5.Op != OpARM64ORshiftLL { 23830 break 23831 } 23832 if o5.AuxInt != 48 { 23833 break 23834 } 23835 _ = o5.Args[1] 23836 s0 := o5.Args[0] 23837 if s0.Op != OpARM64SLLconst { 23838 break 23839 } 23840 if s0.AuxInt != 56 { 23841 break 23842 } 23843 y0 := s0.Args[0] 23844 if y0.Op != OpARM64MOVDnop { 23845 break 23846 } 23847 x0 := y0.Args[0] 23848 if x0.Op != OpARM64MOVBUloadidx { 23849 break 23850 } 23851 _ = x0.Args[2] 23852 if ptr != x0.Args[0] { 23853 break 23854 } 23855 x0_1 := x0.Args[1] 23856 if x0_1.Op != OpARM64ADDconst { 23857 break 23858 } 23859 if x0_1.AuxInt != 7 { 23860 break 23861 } 23862 if idx != x0_1.Args[0] { 23863 break 23864 } 23865 if mem != x0.Args[2] { 23866 break 23867 } 23868 y1 := o5.Args[1] 23869 if y1.Op != OpARM64MOVDnop { 23870 break 23871 } 23872 x1 := y1.Args[0] 23873 if x1.Op != OpARM64MOVBUloadidx { 23874 break 23875 } 23876 _ = x1.Args[2] 23877 if ptr != x1.Args[0] { 23878 break 23879 } 23880 x1_1 := x1.Args[1] 23881 if x1_1.Op != OpARM64ADDconst { 23882 break 23883 } 23884 if x1_1.AuxInt != 6 { 23885 break 23886 } 23887 if idx != x1_1.Args[0] { 23888 break 23889 } 23890 if mem != x1.Args[2] { 23891 break 23892 } 23893 y2 := o4.Args[1] 23894 if y2.Op != OpARM64MOVDnop { 23895 break 23896 } 23897 x2 := y2.Args[0] 23898 if x2.Op != OpARM64MOVBUloadidx { 23899 break 23900 } 23901 _ = x2.Args[2] 23902 if ptr != x2.Args[0] { 23903 break 23904 } 23905 x2_1 := x2.Args[1] 23906 if x2_1.Op != OpARM64ADDconst { 23907 break 23908 } 23909 if x2_1.AuxInt != 5 { 23910 break 23911 } 23912 if idx != x2_1.Args[0] { 23913 break 23914 } 23915 if mem != x2.Args[2] { 23916 break 23917 } 23918 y3 := o3.Args[1] 23919 if y3.Op != OpARM64MOVDnop { 23920 break 23921 } 23922 x3 := y3.Args[0] 23923 if x3.Op != OpARM64MOVBUloadidx { 23924 break 23925 } 23926 _ = x3.Args[2] 23927 if ptr != x3.Args[0] { 23928 break 23929 } 23930 x3_1 := x3.Args[1] 23931 if x3_1.Op != OpARM64ADDconst { 23932 break 23933 } 23934 if x3_1.AuxInt != 4 { 23935 break 23936 } 23937 if idx != x3_1.Args[0] { 23938 break 23939 } 23940 if mem != x3.Args[2] { 23941 break 23942 } 23943 y4 := o2.Args[1] 23944 if y4.Op != OpARM64MOVDnop { 23945 break 23946 } 23947 x4 := y4.Args[0] 23948 if x4.Op != OpARM64MOVBUloadidx { 23949 break 23950 } 23951 _ = x4.Args[2] 23952 if ptr != x4.Args[0] { 23953 break 23954 } 23955 x4_1 := x4.Args[1] 23956 if x4_1.Op != OpARM64ADDconst { 23957 break 23958 } 23959 if x4_1.AuxInt != 3 { 23960 break 23961 } 23962 if idx != x4_1.Args[0] { 23963 break 23964 } 23965 if mem != x4.Args[2] { 23966 break 23967 } 23968 y5 := o1.Args[1] 23969 if y5.Op != OpARM64MOVDnop { 23970 break 23971 } 23972 x5 := y5.Args[0] 23973 if x5.Op != OpARM64MOVBUloadidx { 23974 break 23975 } 23976 _ = x5.Args[2] 23977 if ptr != x5.Args[0] { 23978 break 23979 } 23980 x5_1 := x5.Args[1] 23981 if x5_1.Op != OpARM64ADDconst { 23982 break 23983 } 23984 if x5_1.AuxInt != 2 { 23985 break 23986 } 23987 if idx != x5_1.Args[0] { 23988 break 23989 } 23990 if mem != x5.Args[2] { 23991 break 23992 } 23993 y6 := o0.Args[1] 23994 if y6.Op != OpARM64MOVDnop { 23995 break 23996 } 23997 x6 := y6.Args[0] 23998 if x6.Op != OpARM64MOVBUloadidx { 23999 break 24000 } 24001 _ = x6.Args[2] 24002 if ptr != x6.Args[0] { 24003 break 24004 } 24005 x6_1 := x6.Args[1] 24006 if x6_1.Op != OpARM64ADDconst { 24007 break 24008 } 24009 if x6_1.AuxInt != 1 { 24010 break 24011 } 24012 if idx != x6_1.Args[0] { 24013 break 24014 } 24015 if mem != x6.Args[2] { 24016 break 24017 } 24018 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 24019 break 24020 } 24021 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 24022 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 24023 v.reset(OpCopy) 24024 v.AddArg(v0) 24025 v0.AddArg(ptr) 24026 v0.AddArg(idx) 24027 v0.AddArg(mem) 24028 return true 24029 } 24030 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) 24031 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24032 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24033 for { 24034 t := v.Type 24035 _ = v.Args[1] 24036 o0 := v.Args[0] 24037 if o0.Op != OpARM64ORshiftLL { 24038 break 24039 } 24040 if o0.AuxInt != 8 { 24041 break 24042 } 24043 _ = o0.Args[1] 24044 o1 := o0.Args[0] 24045 if o1.Op != OpARM64ORshiftLL { 24046 break 24047 } 24048 if o1.AuxInt != 16 { 24049 break 24050 } 24051 _ = o1.Args[1] 24052 s0 := o1.Args[0] 24053 if s0.Op != OpARM64SLLconst { 24054 break 24055 } 24056 if s0.AuxInt != 24 { 24057 break 24058 } 24059 y0 := s0.Args[0] 24060 if y0.Op != OpARM64MOVDnop { 24061 break 24062 } 24063 x0 := y0.Args[0] 24064 if x0.Op != OpARM64MOVBUload { 24065 break 24066 } 24067 i0 := x0.AuxInt 24068 s := x0.Aux 24069 _ = x0.Args[1] 24070 p := x0.Args[0] 24071 mem := x0.Args[1] 24072 y1 := o1.Args[1] 24073 if y1.Op != OpARM64MOVDnop { 24074 break 24075 } 24076 x1 := y1.Args[0] 24077 if x1.Op != OpARM64MOVBUload { 24078 break 24079 } 24080 i1 := x1.AuxInt 24081 if x1.Aux != s { 24082 break 24083 } 24084 _ = x1.Args[1] 24085 if p != x1.Args[0] { 24086 break 24087 } 24088 if mem != x1.Args[1] { 24089 break 24090 } 24091 y2 := o0.Args[1] 24092 if y2.Op != OpARM64MOVDnop { 24093 break 24094 } 24095 x2 := y2.Args[0] 24096 if x2.Op != OpARM64MOVBUload { 24097 break 24098 } 24099 i2 := x2.AuxInt 24100 if x2.Aux != s { 24101 break 24102 } 24103 _ = x2.Args[1] 24104 if p != x2.Args[0] { 24105 break 24106 } 24107 if mem != x2.Args[1] { 24108 break 24109 } 24110 y3 := v.Args[1] 24111 if y3.Op != OpARM64MOVDnop { 24112 break 24113 } 24114 x3 := y3.Args[0] 24115 if x3.Op != OpARM64MOVBUload { 24116 break 24117 } 24118 i3 := x3.AuxInt 24119 if x3.Aux != s { 24120 break 24121 } 24122 _ = x3.Args[1] 24123 if p != x3.Args[0] { 24124 break 24125 } 24126 if mem != x3.Args[1] { 24127 break 24128 } 24129 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24130 break 24131 } 24132 b = mergePoint(b, x0, x1, x2, x3) 24133 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t) 24134 v.reset(OpCopy) 24135 v.AddArg(v0) 24136 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUload, t) 24137 v1.Aux = s 24138 v2 := b.NewValue0(x3.Pos, OpOffPtr, p.Type) 24139 v2.AuxInt = i0 24140 v2.AddArg(p) 24141 v1.AddArg(v2) 24142 v1.AddArg(mem) 24143 v0.AddArg(v1) 24144 return true 24145 } 24146 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem)))) 24147 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24148 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24149 for { 24150 t := v.Type 24151 _ = v.Args[1] 24152 y3 := v.Args[0] 24153 if y3.Op != OpARM64MOVDnop { 24154 break 24155 } 24156 x3 := y3.Args[0] 24157 if x3.Op != OpARM64MOVBUload { 24158 break 24159 } 24160 i3 := x3.AuxInt 24161 s := x3.Aux 24162 _ = x3.Args[1] 24163 p := x3.Args[0] 24164 mem := x3.Args[1] 24165 o0 := v.Args[1] 24166 if o0.Op != OpARM64ORshiftLL { 24167 break 24168 } 24169 if o0.AuxInt != 8 { 24170 break 24171 } 24172 _ = o0.Args[1] 24173 o1 := o0.Args[0] 24174 if o1.Op != OpARM64ORshiftLL { 24175 break 24176 } 24177 if o1.AuxInt != 16 { 24178 break 24179 } 24180 _ = o1.Args[1] 24181 s0 := o1.Args[0] 24182 if s0.Op != OpARM64SLLconst { 24183 break 24184 } 24185 if s0.AuxInt != 24 { 24186 break 24187 } 24188 y0 := s0.Args[0] 24189 if y0.Op != OpARM64MOVDnop { 24190 break 24191 } 24192 x0 := y0.Args[0] 24193 if x0.Op != OpARM64MOVBUload { 24194 break 24195 } 24196 i0 := x0.AuxInt 24197 if x0.Aux != s { 24198 break 24199 } 24200 _ = x0.Args[1] 24201 if p != x0.Args[0] { 24202 break 24203 } 24204 if mem != x0.Args[1] { 24205 break 24206 } 24207 y1 := o1.Args[1] 24208 if y1.Op != OpARM64MOVDnop { 24209 break 24210 } 24211 x1 := y1.Args[0] 24212 if x1.Op != OpARM64MOVBUload { 24213 break 24214 } 24215 i1 := x1.AuxInt 24216 if x1.Aux != s { 24217 break 24218 } 24219 _ = x1.Args[1] 24220 if p != x1.Args[0] { 24221 break 24222 } 24223 if mem != x1.Args[1] { 24224 break 24225 } 24226 y2 := o0.Args[1] 24227 if y2.Op != OpARM64MOVDnop { 24228 break 24229 } 24230 x2 := y2.Args[0] 24231 if x2.Op != OpARM64MOVBUload { 24232 break 24233 } 24234 i2 := x2.AuxInt 24235 if x2.Aux != s { 24236 break 24237 } 24238 _ = x2.Args[1] 24239 if p != x2.Args[0] { 24240 break 24241 } 24242 if mem != x2.Args[1] { 24243 break 24244 } 24245 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24246 break 24247 } 24248 b = mergePoint(b, x0, x1, x2, x3) 24249 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t) 24250 v.reset(OpCopy) 24251 v.AddArg(v0) 24252 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 24253 v1.Aux = s 24254 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 24255 v2.AuxInt = i0 24256 v2.AddArg(p) 24257 v1.AddArg(v2) 24258 v1.AddArg(mem) 24259 v0.AddArg(v1) 24260 return true 24261 } 24262 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) 24263 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24264 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 24265 for { 24266 t := v.Type 24267 _ = v.Args[1] 24268 o0 := v.Args[0] 24269 if o0.Op != OpARM64ORshiftLL { 24270 break 24271 } 24272 if o0.AuxInt != 8 { 24273 break 24274 } 24275 _ = o0.Args[1] 24276 o1 := o0.Args[0] 24277 if o1.Op != OpARM64ORshiftLL { 24278 break 24279 } 24280 if o1.AuxInt != 16 { 24281 break 24282 } 24283 _ = o1.Args[1] 24284 s0 := o1.Args[0] 24285 if s0.Op != OpARM64SLLconst { 24286 break 24287 } 24288 if s0.AuxInt != 24 { 24289 break 24290 } 24291 y0 := s0.Args[0] 24292 if y0.Op != OpARM64MOVDnop { 24293 break 24294 } 24295 x0 := y0.Args[0] 24296 if x0.Op != OpARM64MOVBUloadidx { 24297 break 24298 } 24299 _ = x0.Args[2] 24300 ptr0 := x0.Args[0] 24301 idx0 := x0.Args[1] 24302 mem := x0.Args[2] 24303 y1 := o1.Args[1] 24304 if y1.Op != OpARM64MOVDnop { 24305 break 24306 } 24307 x1 := y1.Args[0] 24308 if x1.Op != OpARM64MOVBUload { 24309 break 24310 } 24311 if x1.AuxInt != 1 { 24312 break 24313 } 24314 s := x1.Aux 24315 _ = x1.Args[1] 24316 p1 := x1.Args[0] 24317 if p1.Op != OpARM64ADD { 24318 break 24319 } 24320 _ = p1.Args[1] 24321 ptr1 := p1.Args[0] 24322 idx1 := p1.Args[1] 24323 if mem != x1.Args[1] { 24324 break 24325 } 24326 y2 := o0.Args[1] 24327 if y2.Op != OpARM64MOVDnop { 24328 break 24329 } 24330 x2 := y2.Args[0] 24331 if x2.Op != OpARM64MOVBUload { 24332 break 24333 } 24334 if x2.AuxInt != 2 { 24335 break 24336 } 24337 if x2.Aux != s { 24338 break 24339 } 24340 _ = x2.Args[1] 24341 p := x2.Args[0] 24342 if mem != x2.Args[1] { 24343 break 24344 } 24345 y3 := v.Args[1] 24346 if y3.Op != OpARM64MOVDnop { 24347 break 24348 } 24349 x3 := y3.Args[0] 24350 if x3.Op != OpARM64MOVBUload { 24351 break 24352 } 24353 if x3.AuxInt != 3 { 24354 break 24355 } 24356 if x3.Aux != s { 24357 break 24358 } 24359 _ = x3.Args[1] 24360 if p != x3.Args[0] { 24361 break 24362 } 24363 if mem != x3.Args[1] { 24364 break 24365 } 24366 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24367 break 24368 } 24369 b = mergePoint(b, x0, x1, x2, x3) 24370 v0 := b.NewValue0(x3.Pos, OpARM64REVW, t) 24371 v.reset(OpCopy) 24372 v.AddArg(v0) 24373 v1 := b.NewValue0(x3.Pos, OpARM64MOVWUloadidx, t) 24374 v1.AddArg(ptr0) 24375 v1.AddArg(idx0) 24376 v1.AddArg(mem) 24377 v0.AddArg(v1) 24378 return true 24379 } 24380 // match: (OR <t> y3:(MOVDnop x3:(MOVBUload [3] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem)))) 24381 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24382 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 24383 for { 24384 t := v.Type 24385 _ = v.Args[1] 24386 y3 := v.Args[0] 24387 if y3.Op != OpARM64MOVDnop { 24388 break 24389 } 24390 x3 := y3.Args[0] 24391 if x3.Op != OpARM64MOVBUload { 24392 break 24393 } 24394 if x3.AuxInt != 3 { 24395 break 24396 } 24397 s := x3.Aux 24398 _ = x3.Args[1] 24399 p := x3.Args[0] 24400 mem := x3.Args[1] 24401 o0 := v.Args[1] 24402 if o0.Op != OpARM64ORshiftLL { 24403 break 24404 } 24405 if o0.AuxInt != 8 { 24406 break 24407 } 24408 _ = o0.Args[1] 24409 o1 := o0.Args[0] 24410 if o1.Op != OpARM64ORshiftLL { 24411 break 24412 } 24413 if o1.AuxInt != 16 { 24414 break 24415 } 24416 _ = o1.Args[1] 24417 s0 := o1.Args[0] 24418 if s0.Op != OpARM64SLLconst { 24419 break 24420 } 24421 if s0.AuxInt != 24 { 24422 break 24423 } 24424 y0 := s0.Args[0] 24425 if y0.Op != OpARM64MOVDnop { 24426 break 24427 } 24428 x0 := y0.Args[0] 24429 if x0.Op != OpARM64MOVBUloadidx { 24430 break 24431 } 24432 _ = x0.Args[2] 24433 ptr0 := x0.Args[0] 24434 idx0 := x0.Args[1] 24435 if mem != x0.Args[2] { 24436 break 24437 } 24438 y1 := o1.Args[1] 24439 if y1.Op != OpARM64MOVDnop { 24440 break 24441 } 24442 x1 := y1.Args[0] 24443 if x1.Op != OpARM64MOVBUload { 24444 break 24445 } 24446 if x1.AuxInt != 1 { 24447 break 24448 } 24449 if x1.Aux != s { 24450 break 24451 } 24452 _ = x1.Args[1] 24453 p1 := x1.Args[0] 24454 if p1.Op != OpARM64ADD { 24455 break 24456 } 24457 _ = p1.Args[1] 24458 ptr1 := p1.Args[0] 24459 idx1 := p1.Args[1] 24460 if mem != x1.Args[1] { 24461 break 24462 } 24463 y2 := o0.Args[1] 24464 if y2.Op != OpARM64MOVDnop { 24465 break 24466 } 24467 x2 := y2.Args[0] 24468 if x2.Op != OpARM64MOVBUload { 24469 break 24470 } 24471 if x2.AuxInt != 2 { 24472 break 24473 } 24474 if x2.Aux != s { 24475 break 24476 } 24477 _ = x2.Args[1] 24478 if p != x2.Args[0] { 24479 break 24480 } 24481 if mem != x2.Args[1] { 24482 break 24483 } 24484 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24485 break 24486 } 24487 b = mergePoint(b, x0, x1, x2, x3) 24488 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t) 24489 v.reset(OpCopy) 24490 v.AddArg(v0) 24491 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 24492 v1.AddArg(ptr0) 24493 v1.AddArg(idx0) 24494 v1.AddArg(mem) 24495 v0.AddArg(v1) 24496 return true 24497 } 24498 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) 24499 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24500 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 24501 for { 24502 t := v.Type 24503 _ = v.Args[1] 24504 o0 := v.Args[0] 24505 if o0.Op != OpARM64ORshiftLL { 24506 break 24507 } 24508 if o0.AuxInt != 8 { 24509 break 24510 } 24511 _ = o0.Args[1] 24512 o1 := o0.Args[0] 24513 if o1.Op != OpARM64ORshiftLL { 24514 break 24515 } 24516 if o1.AuxInt != 16 { 24517 break 24518 } 24519 _ = o1.Args[1] 24520 s0 := o1.Args[0] 24521 if s0.Op != OpARM64SLLconst { 24522 break 24523 } 24524 if s0.AuxInt != 24 { 24525 break 24526 } 24527 y0 := s0.Args[0] 24528 if y0.Op != OpARM64MOVDnop { 24529 break 24530 } 24531 x0 := y0.Args[0] 24532 if x0.Op != OpARM64MOVBUloadidx { 24533 break 24534 } 24535 _ = x0.Args[2] 24536 ptr := x0.Args[0] 24537 idx := x0.Args[1] 24538 mem := x0.Args[2] 24539 y1 := o1.Args[1] 24540 if y1.Op != OpARM64MOVDnop { 24541 break 24542 } 24543 x1 := y1.Args[0] 24544 if x1.Op != OpARM64MOVBUloadidx { 24545 break 24546 } 24547 _ = x1.Args[2] 24548 if ptr != x1.Args[0] { 24549 break 24550 } 24551 x1_1 := x1.Args[1] 24552 if x1_1.Op != OpARM64ADDconst { 24553 break 24554 } 24555 if x1_1.AuxInt != 1 { 24556 break 24557 } 24558 if idx != x1_1.Args[0] { 24559 break 24560 } 24561 if mem != x1.Args[2] { 24562 break 24563 } 24564 y2 := o0.Args[1] 24565 if y2.Op != OpARM64MOVDnop { 24566 break 24567 } 24568 x2 := y2.Args[0] 24569 if x2.Op != OpARM64MOVBUloadidx { 24570 break 24571 } 24572 _ = x2.Args[2] 24573 if ptr != x2.Args[0] { 24574 break 24575 } 24576 x2_1 := x2.Args[1] 24577 if x2_1.Op != OpARM64ADDconst { 24578 break 24579 } 24580 if x2_1.AuxInt != 2 { 24581 break 24582 } 24583 if idx != x2_1.Args[0] { 24584 break 24585 } 24586 if mem != x2.Args[2] { 24587 break 24588 } 24589 y3 := v.Args[1] 24590 if y3.Op != OpARM64MOVDnop { 24591 break 24592 } 24593 x3 := y3.Args[0] 24594 if x3.Op != OpARM64MOVBUloadidx { 24595 break 24596 } 24597 _ = x3.Args[2] 24598 if ptr != x3.Args[0] { 24599 break 24600 } 24601 x3_1 := x3.Args[1] 24602 if x3_1.Op != OpARM64ADDconst { 24603 break 24604 } 24605 if x3_1.AuxInt != 3 { 24606 break 24607 } 24608 if idx != x3_1.Args[0] { 24609 break 24610 } 24611 if mem != x3.Args[2] { 24612 break 24613 } 24614 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24615 break 24616 } 24617 b = mergePoint(b, x0, x1, x2, x3) 24618 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24619 v.reset(OpCopy) 24620 v.AddArg(v0) 24621 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 24622 v1.AddArg(ptr) 24623 v1.AddArg(idx) 24624 v1.AddArg(mem) 24625 v0.AddArg(v1) 24626 return true 24627 } 24628 return false 24629 } 24630 func rewriteValueARM64_OpARM64OR_40(v *Value) bool { 24631 b := v.Block 24632 _ = b 24633 // match: (OR <t> y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] s0:(SLLconst [24] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem)))) 24634 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0) 24635 // result: @mergePoint(b,x0,x1,x2,x3) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 24636 for { 24637 t := v.Type 24638 _ = v.Args[1] 24639 y3 := v.Args[0] 24640 if y3.Op != OpARM64MOVDnop { 24641 break 24642 } 24643 x3 := y3.Args[0] 24644 if x3.Op != OpARM64MOVBUloadidx { 24645 break 24646 } 24647 _ = x3.Args[2] 24648 ptr := x3.Args[0] 24649 x3_1 := x3.Args[1] 24650 if x3_1.Op != OpARM64ADDconst { 24651 break 24652 } 24653 if x3_1.AuxInt != 3 { 24654 break 24655 } 24656 idx := x3_1.Args[0] 24657 mem := x3.Args[2] 24658 o0 := v.Args[1] 24659 if o0.Op != OpARM64ORshiftLL { 24660 break 24661 } 24662 if o0.AuxInt != 8 { 24663 break 24664 } 24665 _ = o0.Args[1] 24666 o1 := o0.Args[0] 24667 if o1.Op != OpARM64ORshiftLL { 24668 break 24669 } 24670 if o1.AuxInt != 16 { 24671 break 24672 } 24673 _ = o1.Args[1] 24674 s0 := o1.Args[0] 24675 if s0.Op != OpARM64SLLconst { 24676 break 24677 } 24678 if s0.AuxInt != 24 { 24679 break 24680 } 24681 y0 := s0.Args[0] 24682 if y0.Op != OpARM64MOVDnop { 24683 break 24684 } 24685 x0 := y0.Args[0] 24686 if x0.Op != OpARM64MOVBUloadidx { 24687 break 24688 } 24689 _ = x0.Args[2] 24690 if ptr != x0.Args[0] { 24691 break 24692 } 24693 if idx != x0.Args[1] { 24694 break 24695 } 24696 if mem != x0.Args[2] { 24697 break 24698 } 24699 y1 := o1.Args[1] 24700 if y1.Op != OpARM64MOVDnop { 24701 break 24702 } 24703 x1 := y1.Args[0] 24704 if x1.Op != OpARM64MOVBUloadidx { 24705 break 24706 } 24707 _ = x1.Args[2] 24708 if ptr != x1.Args[0] { 24709 break 24710 } 24711 x1_1 := x1.Args[1] 24712 if x1_1.Op != OpARM64ADDconst { 24713 break 24714 } 24715 if x1_1.AuxInt != 1 { 24716 break 24717 } 24718 if idx != x1_1.Args[0] { 24719 break 24720 } 24721 if mem != x1.Args[2] { 24722 break 24723 } 24724 y2 := o0.Args[1] 24725 if y2.Op != OpARM64MOVDnop { 24726 break 24727 } 24728 x2 := y2.Args[0] 24729 if x2.Op != OpARM64MOVBUloadidx { 24730 break 24731 } 24732 _ = x2.Args[2] 24733 if ptr != x2.Args[0] { 24734 break 24735 } 24736 x2_1 := x2.Args[1] 24737 if x2_1.Op != OpARM64ADDconst { 24738 break 24739 } 24740 if x2_1.AuxInt != 2 { 24741 break 24742 } 24743 if idx != x2_1.Args[0] { 24744 break 24745 } 24746 if mem != x2.Args[2] { 24747 break 24748 } 24749 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(o0) && clobber(o1) && clobber(s0)) { 24750 break 24751 } 24752 b = mergePoint(b, x0, x1, x2, x3) 24753 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 24754 v.reset(OpCopy) 24755 v.AddArg(v0) 24756 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 24757 v1.AddArg(ptr) 24758 v1.AddArg(idx) 24759 v1.AddArg(mem) 24760 v0.AddArg(v1) 24761 return true 24762 } 24763 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem))) 24764 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 24765 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24766 for { 24767 t := v.Type 24768 _ = v.Args[1] 24769 o0 := v.Args[0] 24770 if o0.Op != OpARM64ORshiftLL { 24771 break 24772 } 24773 if o0.AuxInt != 8 { 24774 break 24775 } 24776 _ = o0.Args[1] 24777 o1 := o0.Args[0] 24778 if o1.Op != OpARM64ORshiftLL { 24779 break 24780 } 24781 if o1.AuxInt != 16 { 24782 break 24783 } 24784 _ = o1.Args[1] 24785 o2 := o1.Args[0] 24786 if o2.Op != OpARM64ORshiftLL { 24787 break 24788 } 24789 if o2.AuxInt != 24 { 24790 break 24791 } 24792 _ = o2.Args[1] 24793 o3 := o2.Args[0] 24794 if o3.Op != OpARM64ORshiftLL { 24795 break 24796 } 24797 if o3.AuxInt != 32 { 24798 break 24799 } 24800 _ = o3.Args[1] 24801 o4 := o3.Args[0] 24802 if o4.Op != OpARM64ORshiftLL { 24803 break 24804 } 24805 if o4.AuxInt != 40 { 24806 break 24807 } 24808 _ = o4.Args[1] 24809 o5 := o4.Args[0] 24810 if o5.Op != OpARM64ORshiftLL { 24811 break 24812 } 24813 if o5.AuxInt != 48 { 24814 break 24815 } 24816 _ = o5.Args[1] 24817 s0 := o5.Args[0] 24818 if s0.Op != OpARM64SLLconst { 24819 break 24820 } 24821 if s0.AuxInt != 56 { 24822 break 24823 } 24824 y0 := s0.Args[0] 24825 if y0.Op != OpARM64MOVDnop { 24826 break 24827 } 24828 x0 := y0.Args[0] 24829 if x0.Op != OpARM64MOVBUload { 24830 break 24831 } 24832 i0 := x0.AuxInt 24833 s := x0.Aux 24834 _ = x0.Args[1] 24835 p := x0.Args[0] 24836 mem := x0.Args[1] 24837 y1 := o5.Args[1] 24838 if y1.Op != OpARM64MOVDnop { 24839 break 24840 } 24841 x1 := y1.Args[0] 24842 if x1.Op != OpARM64MOVBUload { 24843 break 24844 } 24845 i1 := x1.AuxInt 24846 if x1.Aux != s { 24847 break 24848 } 24849 _ = x1.Args[1] 24850 if p != x1.Args[0] { 24851 break 24852 } 24853 if mem != x1.Args[1] { 24854 break 24855 } 24856 y2 := o4.Args[1] 24857 if y2.Op != OpARM64MOVDnop { 24858 break 24859 } 24860 x2 := y2.Args[0] 24861 if x2.Op != OpARM64MOVBUload { 24862 break 24863 } 24864 i2 := x2.AuxInt 24865 if x2.Aux != s { 24866 break 24867 } 24868 _ = x2.Args[1] 24869 if p != x2.Args[0] { 24870 break 24871 } 24872 if mem != x2.Args[1] { 24873 break 24874 } 24875 y3 := o3.Args[1] 24876 if y3.Op != OpARM64MOVDnop { 24877 break 24878 } 24879 x3 := y3.Args[0] 24880 if x3.Op != OpARM64MOVBUload { 24881 break 24882 } 24883 i3 := x3.AuxInt 24884 if x3.Aux != s { 24885 break 24886 } 24887 _ = x3.Args[1] 24888 if p != x3.Args[0] { 24889 break 24890 } 24891 if mem != x3.Args[1] { 24892 break 24893 } 24894 y4 := o2.Args[1] 24895 if y4.Op != OpARM64MOVDnop { 24896 break 24897 } 24898 x4 := y4.Args[0] 24899 if x4.Op != OpARM64MOVBUload { 24900 break 24901 } 24902 i4 := x4.AuxInt 24903 if x4.Aux != s { 24904 break 24905 } 24906 _ = x4.Args[1] 24907 if p != x4.Args[0] { 24908 break 24909 } 24910 if mem != x4.Args[1] { 24911 break 24912 } 24913 y5 := o1.Args[1] 24914 if y5.Op != OpARM64MOVDnop { 24915 break 24916 } 24917 x5 := y5.Args[0] 24918 if x5.Op != OpARM64MOVBUload { 24919 break 24920 } 24921 i5 := x5.AuxInt 24922 if x5.Aux != s { 24923 break 24924 } 24925 _ = x5.Args[1] 24926 if p != x5.Args[0] { 24927 break 24928 } 24929 if mem != x5.Args[1] { 24930 break 24931 } 24932 y6 := o0.Args[1] 24933 if y6.Op != OpARM64MOVDnop { 24934 break 24935 } 24936 x6 := y6.Args[0] 24937 if x6.Op != OpARM64MOVBUload { 24938 break 24939 } 24940 i6 := x6.AuxInt 24941 if x6.Aux != s { 24942 break 24943 } 24944 _ = x6.Args[1] 24945 if p != x6.Args[0] { 24946 break 24947 } 24948 if mem != x6.Args[1] { 24949 break 24950 } 24951 y7 := v.Args[1] 24952 if y7.Op != OpARM64MOVDnop { 24953 break 24954 } 24955 x7 := y7.Args[0] 24956 if x7.Op != OpARM64MOVBUload { 24957 break 24958 } 24959 i7 := x7.AuxInt 24960 if x7.Aux != s { 24961 break 24962 } 24963 _ = x7.Args[1] 24964 if p != x7.Args[0] { 24965 break 24966 } 24967 if mem != x7.Args[1] { 24968 break 24969 } 24970 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 24971 break 24972 } 24973 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 24974 v0 := b.NewValue0(x7.Pos, OpARM64REV, t) 24975 v.reset(OpCopy) 24976 v.AddArg(v0) 24977 v1 := b.NewValue0(x7.Pos, OpARM64MOVDload, t) 24978 v1.Aux = s 24979 v2 := b.NewValue0(x7.Pos, OpOffPtr, p.Type) 24980 v2.AuxInt = i0 24981 v2.AddArg(p) 24982 v1.AddArg(v2) 24983 v1.AddArg(mem) 24984 v0.AddArg(v1) 24985 return true 24986 } 24987 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [i7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem))) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [i5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [i6] {s} p mem)))) 24988 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 24989 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 24990 for { 24991 t := v.Type 24992 _ = v.Args[1] 24993 y7 := v.Args[0] 24994 if y7.Op != OpARM64MOVDnop { 24995 break 24996 } 24997 x7 := y7.Args[0] 24998 if x7.Op != OpARM64MOVBUload { 24999 break 25000 } 25001 i7 := x7.AuxInt 25002 s := x7.Aux 25003 _ = x7.Args[1] 25004 p := x7.Args[0] 25005 mem := x7.Args[1] 25006 o0 := v.Args[1] 25007 if o0.Op != OpARM64ORshiftLL { 25008 break 25009 } 25010 if o0.AuxInt != 8 { 25011 break 25012 } 25013 _ = o0.Args[1] 25014 o1 := o0.Args[0] 25015 if o1.Op != OpARM64ORshiftLL { 25016 break 25017 } 25018 if o1.AuxInt != 16 { 25019 break 25020 } 25021 _ = o1.Args[1] 25022 o2 := o1.Args[0] 25023 if o2.Op != OpARM64ORshiftLL { 25024 break 25025 } 25026 if o2.AuxInt != 24 { 25027 break 25028 } 25029 _ = o2.Args[1] 25030 o3 := o2.Args[0] 25031 if o3.Op != OpARM64ORshiftLL { 25032 break 25033 } 25034 if o3.AuxInt != 32 { 25035 break 25036 } 25037 _ = o3.Args[1] 25038 o4 := o3.Args[0] 25039 if o4.Op != OpARM64ORshiftLL { 25040 break 25041 } 25042 if o4.AuxInt != 40 { 25043 break 25044 } 25045 _ = o4.Args[1] 25046 o5 := o4.Args[0] 25047 if o5.Op != OpARM64ORshiftLL { 25048 break 25049 } 25050 if o5.AuxInt != 48 { 25051 break 25052 } 25053 _ = o5.Args[1] 25054 s0 := o5.Args[0] 25055 if s0.Op != OpARM64SLLconst { 25056 break 25057 } 25058 if s0.AuxInt != 56 { 25059 break 25060 } 25061 y0 := s0.Args[0] 25062 if y0.Op != OpARM64MOVDnop { 25063 break 25064 } 25065 x0 := y0.Args[0] 25066 if x0.Op != OpARM64MOVBUload { 25067 break 25068 } 25069 i0 := x0.AuxInt 25070 if x0.Aux != s { 25071 break 25072 } 25073 _ = x0.Args[1] 25074 if p != x0.Args[0] { 25075 break 25076 } 25077 if mem != x0.Args[1] { 25078 break 25079 } 25080 y1 := o5.Args[1] 25081 if y1.Op != OpARM64MOVDnop { 25082 break 25083 } 25084 x1 := y1.Args[0] 25085 if x1.Op != OpARM64MOVBUload { 25086 break 25087 } 25088 i1 := x1.AuxInt 25089 if x1.Aux != s { 25090 break 25091 } 25092 _ = x1.Args[1] 25093 if p != x1.Args[0] { 25094 break 25095 } 25096 if mem != x1.Args[1] { 25097 break 25098 } 25099 y2 := o4.Args[1] 25100 if y2.Op != OpARM64MOVDnop { 25101 break 25102 } 25103 x2 := y2.Args[0] 25104 if x2.Op != OpARM64MOVBUload { 25105 break 25106 } 25107 i2 := x2.AuxInt 25108 if x2.Aux != s { 25109 break 25110 } 25111 _ = x2.Args[1] 25112 if p != x2.Args[0] { 25113 break 25114 } 25115 if mem != x2.Args[1] { 25116 break 25117 } 25118 y3 := o3.Args[1] 25119 if y3.Op != OpARM64MOVDnop { 25120 break 25121 } 25122 x3 := y3.Args[0] 25123 if x3.Op != OpARM64MOVBUload { 25124 break 25125 } 25126 i3 := x3.AuxInt 25127 if x3.Aux != s { 25128 break 25129 } 25130 _ = x3.Args[1] 25131 if p != x3.Args[0] { 25132 break 25133 } 25134 if mem != x3.Args[1] { 25135 break 25136 } 25137 y4 := o2.Args[1] 25138 if y4.Op != OpARM64MOVDnop { 25139 break 25140 } 25141 x4 := y4.Args[0] 25142 if x4.Op != OpARM64MOVBUload { 25143 break 25144 } 25145 i4 := x4.AuxInt 25146 if x4.Aux != s { 25147 break 25148 } 25149 _ = x4.Args[1] 25150 if p != x4.Args[0] { 25151 break 25152 } 25153 if mem != x4.Args[1] { 25154 break 25155 } 25156 y5 := o1.Args[1] 25157 if y5.Op != OpARM64MOVDnop { 25158 break 25159 } 25160 x5 := y5.Args[0] 25161 if x5.Op != OpARM64MOVBUload { 25162 break 25163 } 25164 i5 := x5.AuxInt 25165 if x5.Aux != s { 25166 break 25167 } 25168 _ = x5.Args[1] 25169 if p != x5.Args[0] { 25170 break 25171 } 25172 if mem != x5.Args[1] { 25173 break 25174 } 25175 y6 := o0.Args[1] 25176 if y6.Op != OpARM64MOVDnop { 25177 break 25178 } 25179 x6 := y6.Args[0] 25180 if x6.Op != OpARM64MOVBUload { 25181 break 25182 } 25183 i6 := x6.AuxInt 25184 if x6.Aux != s { 25185 break 25186 } 25187 _ = x6.Args[1] 25188 if p != x6.Args[0] { 25189 break 25190 } 25191 if mem != x6.Args[1] { 25192 break 25193 } 25194 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25195 break 25196 } 25197 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25198 v0 := b.NewValue0(x6.Pos, OpARM64REV, t) 25199 v.reset(OpCopy) 25200 v.AddArg(v0) 25201 v1 := b.NewValue0(x6.Pos, OpARM64MOVDload, t) 25202 v1.Aux = s 25203 v2 := b.NewValue0(x6.Pos, OpOffPtr, p.Type) 25204 v2.AuxInt = i0 25205 v2.AddArg(p) 25206 v1.AddArg(v2) 25207 v1.AddArg(mem) 25208 v0.AddArg(v1) 25209 return true 25210 } 25211 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem))) y7:(MOVDnop x7:(MOVBUload [7] {s} p mem))) 25212 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25213 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 25214 for { 25215 t := v.Type 25216 _ = v.Args[1] 25217 o0 := v.Args[0] 25218 if o0.Op != OpARM64ORshiftLL { 25219 break 25220 } 25221 if o0.AuxInt != 8 { 25222 break 25223 } 25224 _ = o0.Args[1] 25225 o1 := o0.Args[0] 25226 if o1.Op != OpARM64ORshiftLL { 25227 break 25228 } 25229 if o1.AuxInt != 16 { 25230 break 25231 } 25232 _ = o1.Args[1] 25233 o2 := o1.Args[0] 25234 if o2.Op != OpARM64ORshiftLL { 25235 break 25236 } 25237 if o2.AuxInt != 24 { 25238 break 25239 } 25240 _ = o2.Args[1] 25241 o3 := o2.Args[0] 25242 if o3.Op != OpARM64ORshiftLL { 25243 break 25244 } 25245 if o3.AuxInt != 32 { 25246 break 25247 } 25248 _ = o3.Args[1] 25249 o4 := o3.Args[0] 25250 if o4.Op != OpARM64ORshiftLL { 25251 break 25252 } 25253 if o4.AuxInt != 40 { 25254 break 25255 } 25256 _ = o4.Args[1] 25257 o5 := o4.Args[0] 25258 if o5.Op != OpARM64ORshiftLL { 25259 break 25260 } 25261 if o5.AuxInt != 48 { 25262 break 25263 } 25264 _ = o5.Args[1] 25265 s0 := o5.Args[0] 25266 if s0.Op != OpARM64SLLconst { 25267 break 25268 } 25269 if s0.AuxInt != 56 { 25270 break 25271 } 25272 y0 := s0.Args[0] 25273 if y0.Op != OpARM64MOVDnop { 25274 break 25275 } 25276 x0 := y0.Args[0] 25277 if x0.Op != OpARM64MOVBUloadidx { 25278 break 25279 } 25280 _ = x0.Args[2] 25281 ptr0 := x0.Args[0] 25282 idx0 := x0.Args[1] 25283 mem := x0.Args[2] 25284 y1 := o5.Args[1] 25285 if y1.Op != OpARM64MOVDnop { 25286 break 25287 } 25288 x1 := y1.Args[0] 25289 if x1.Op != OpARM64MOVBUload { 25290 break 25291 } 25292 if x1.AuxInt != 1 { 25293 break 25294 } 25295 s := x1.Aux 25296 _ = x1.Args[1] 25297 p1 := x1.Args[0] 25298 if p1.Op != OpARM64ADD { 25299 break 25300 } 25301 _ = p1.Args[1] 25302 ptr1 := p1.Args[0] 25303 idx1 := p1.Args[1] 25304 if mem != x1.Args[1] { 25305 break 25306 } 25307 y2 := o4.Args[1] 25308 if y2.Op != OpARM64MOVDnop { 25309 break 25310 } 25311 x2 := y2.Args[0] 25312 if x2.Op != OpARM64MOVBUload { 25313 break 25314 } 25315 if x2.AuxInt != 2 { 25316 break 25317 } 25318 if x2.Aux != s { 25319 break 25320 } 25321 _ = x2.Args[1] 25322 p := x2.Args[0] 25323 if mem != x2.Args[1] { 25324 break 25325 } 25326 y3 := o3.Args[1] 25327 if y3.Op != OpARM64MOVDnop { 25328 break 25329 } 25330 x3 := y3.Args[0] 25331 if x3.Op != OpARM64MOVBUload { 25332 break 25333 } 25334 if x3.AuxInt != 3 { 25335 break 25336 } 25337 if x3.Aux != s { 25338 break 25339 } 25340 _ = x3.Args[1] 25341 if p != x3.Args[0] { 25342 break 25343 } 25344 if mem != x3.Args[1] { 25345 break 25346 } 25347 y4 := o2.Args[1] 25348 if y4.Op != OpARM64MOVDnop { 25349 break 25350 } 25351 x4 := y4.Args[0] 25352 if x4.Op != OpARM64MOVBUload { 25353 break 25354 } 25355 if x4.AuxInt != 4 { 25356 break 25357 } 25358 if x4.Aux != s { 25359 break 25360 } 25361 _ = x4.Args[1] 25362 if p != x4.Args[0] { 25363 break 25364 } 25365 if mem != x4.Args[1] { 25366 break 25367 } 25368 y5 := o1.Args[1] 25369 if y5.Op != OpARM64MOVDnop { 25370 break 25371 } 25372 x5 := y5.Args[0] 25373 if x5.Op != OpARM64MOVBUload { 25374 break 25375 } 25376 if x5.AuxInt != 5 { 25377 break 25378 } 25379 if x5.Aux != s { 25380 break 25381 } 25382 _ = x5.Args[1] 25383 if p != x5.Args[0] { 25384 break 25385 } 25386 if mem != x5.Args[1] { 25387 break 25388 } 25389 y6 := o0.Args[1] 25390 if y6.Op != OpARM64MOVDnop { 25391 break 25392 } 25393 x6 := y6.Args[0] 25394 if x6.Op != OpARM64MOVBUload { 25395 break 25396 } 25397 if x6.AuxInt != 6 { 25398 break 25399 } 25400 if x6.Aux != s { 25401 break 25402 } 25403 _ = x6.Args[1] 25404 if p != x6.Args[0] { 25405 break 25406 } 25407 if mem != x6.Args[1] { 25408 break 25409 } 25410 y7 := v.Args[1] 25411 if y7.Op != OpARM64MOVDnop { 25412 break 25413 } 25414 x7 := y7.Args[0] 25415 if x7.Op != OpARM64MOVBUload { 25416 break 25417 } 25418 if x7.AuxInt != 7 { 25419 break 25420 } 25421 if x7.Aux != s { 25422 break 25423 } 25424 _ = x7.Args[1] 25425 if p != x7.Args[0] { 25426 break 25427 } 25428 if mem != x7.Args[1] { 25429 break 25430 } 25431 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25432 break 25433 } 25434 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25435 v0 := b.NewValue0(x7.Pos, OpARM64REV, t) 25436 v.reset(OpCopy) 25437 v.AddArg(v0) 25438 v1 := b.NewValue0(x7.Pos, OpARM64MOVDloadidx, t) 25439 v1.AddArg(ptr0) 25440 v1.AddArg(idx0) 25441 v1.AddArg(mem) 25442 v0.AddArg(v1) 25443 return true 25444 } 25445 // match: (OR <t> y7:(MOVDnop x7:(MOVBUload [7] {s} p mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem))) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [3] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [4] {s} p mem))) y5:(MOVDnop x5:(MOVBUload [5] {s} p mem))) y6:(MOVDnop x6:(MOVBUload [6] {s} p mem)))) 25446 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25447 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 25448 for { 25449 t := v.Type 25450 _ = v.Args[1] 25451 y7 := v.Args[0] 25452 if y7.Op != OpARM64MOVDnop { 25453 break 25454 } 25455 x7 := y7.Args[0] 25456 if x7.Op != OpARM64MOVBUload { 25457 break 25458 } 25459 if x7.AuxInt != 7 { 25460 break 25461 } 25462 s := x7.Aux 25463 _ = x7.Args[1] 25464 p := x7.Args[0] 25465 mem := x7.Args[1] 25466 o0 := v.Args[1] 25467 if o0.Op != OpARM64ORshiftLL { 25468 break 25469 } 25470 if o0.AuxInt != 8 { 25471 break 25472 } 25473 _ = o0.Args[1] 25474 o1 := o0.Args[0] 25475 if o1.Op != OpARM64ORshiftLL { 25476 break 25477 } 25478 if o1.AuxInt != 16 { 25479 break 25480 } 25481 _ = o1.Args[1] 25482 o2 := o1.Args[0] 25483 if o2.Op != OpARM64ORshiftLL { 25484 break 25485 } 25486 if o2.AuxInt != 24 { 25487 break 25488 } 25489 _ = o2.Args[1] 25490 o3 := o2.Args[0] 25491 if o3.Op != OpARM64ORshiftLL { 25492 break 25493 } 25494 if o3.AuxInt != 32 { 25495 break 25496 } 25497 _ = o3.Args[1] 25498 o4 := o3.Args[0] 25499 if o4.Op != OpARM64ORshiftLL { 25500 break 25501 } 25502 if o4.AuxInt != 40 { 25503 break 25504 } 25505 _ = o4.Args[1] 25506 o5 := o4.Args[0] 25507 if o5.Op != OpARM64ORshiftLL { 25508 break 25509 } 25510 if o5.AuxInt != 48 { 25511 break 25512 } 25513 _ = o5.Args[1] 25514 s0 := o5.Args[0] 25515 if s0.Op != OpARM64SLLconst { 25516 break 25517 } 25518 if s0.AuxInt != 56 { 25519 break 25520 } 25521 y0 := s0.Args[0] 25522 if y0.Op != OpARM64MOVDnop { 25523 break 25524 } 25525 x0 := y0.Args[0] 25526 if x0.Op != OpARM64MOVBUloadidx { 25527 break 25528 } 25529 _ = x0.Args[2] 25530 ptr0 := x0.Args[0] 25531 idx0 := x0.Args[1] 25532 if mem != x0.Args[2] { 25533 break 25534 } 25535 y1 := o5.Args[1] 25536 if y1.Op != OpARM64MOVDnop { 25537 break 25538 } 25539 x1 := y1.Args[0] 25540 if x1.Op != OpARM64MOVBUload { 25541 break 25542 } 25543 if x1.AuxInt != 1 { 25544 break 25545 } 25546 if x1.Aux != s { 25547 break 25548 } 25549 _ = x1.Args[1] 25550 p1 := x1.Args[0] 25551 if p1.Op != OpARM64ADD { 25552 break 25553 } 25554 _ = p1.Args[1] 25555 ptr1 := p1.Args[0] 25556 idx1 := p1.Args[1] 25557 if mem != x1.Args[1] { 25558 break 25559 } 25560 y2 := o4.Args[1] 25561 if y2.Op != OpARM64MOVDnop { 25562 break 25563 } 25564 x2 := y2.Args[0] 25565 if x2.Op != OpARM64MOVBUload { 25566 break 25567 } 25568 if x2.AuxInt != 2 { 25569 break 25570 } 25571 if x2.Aux != s { 25572 break 25573 } 25574 _ = x2.Args[1] 25575 if p != x2.Args[0] { 25576 break 25577 } 25578 if mem != x2.Args[1] { 25579 break 25580 } 25581 y3 := o3.Args[1] 25582 if y3.Op != OpARM64MOVDnop { 25583 break 25584 } 25585 x3 := y3.Args[0] 25586 if x3.Op != OpARM64MOVBUload { 25587 break 25588 } 25589 if x3.AuxInt != 3 { 25590 break 25591 } 25592 if x3.Aux != s { 25593 break 25594 } 25595 _ = x3.Args[1] 25596 if p != x3.Args[0] { 25597 break 25598 } 25599 if mem != x3.Args[1] { 25600 break 25601 } 25602 y4 := o2.Args[1] 25603 if y4.Op != OpARM64MOVDnop { 25604 break 25605 } 25606 x4 := y4.Args[0] 25607 if x4.Op != OpARM64MOVBUload { 25608 break 25609 } 25610 if x4.AuxInt != 4 { 25611 break 25612 } 25613 if x4.Aux != s { 25614 break 25615 } 25616 _ = x4.Args[1] 25617 if p != x4.Args[0] { 25618 break 25619 } 25620 if mem != x4.Args[1] { 25621 break 25622 } 25623 y5 := o1.Args[1] 25624 if y5.Op != OpARM64MOVDnop { 25625 break 25626 } 25627 x5 := y5.Args[0] 25628 if x5.Op != OpARM64MOVBUload { 25629 break 25630 } 25631 if x5.AuxInt != 5 { 25632 break 25633 } 25634 if x5.Aux != s { 25635 break 25636 } 25637 _ = x5.Args[1] 25638 if p != x5.Args[0] { 25639 break 25640 } 25641 if mem != x5.Args[1] { 25642 break 25643 } 25644 y6 := o0.Args[1] 25645 if y6.Op != OpARM64MOVDnop { 25646 break 25647 } 25648 x6 := y6.Args[0] 25649 if x6.Op != OpARM64MOVBUload { 25650 break 25651 } 25652 if x6.AuxInt != 6 { 25653 break 25654 } 25655 if x6.Aux != s { 25656 break 25657 } 25658 _ = x6.Args[1] 25659 if p != x6.Args[0] { 25660 break 25661 } 25662 if mem != x6.Args[1] { 25663 break 25664 } 25665 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25666 break 25667 } 25668 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25669 v0 := b.NewValue0(x6.Pos, OpARM64REV, t) 25670 v.reset(OpCopy) 25671 v.AddArg(v0) 25672 v1 := b.NewValue0(x6.Pos, OpARM64MOVDloadidx, t) 25673 v1.AddArg(ptr0) 25674 v1.AddArg(idx0) 25675 v1.AddArg(mem) 25676 v0.AddArg(v1) 25677 return true 25678 } 25679 // match: (OR <t> o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) 25680 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25681 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem)) 25682 for { 25683 t := v.Type 25684 _ = v.Args[1] 25685 o0 := v.Args[0] 25686 if o0.Op != OpARM64ORshiftLL { 25687 break 25688 } 25689 if o0.AuxInt != 8 { 25690 break 25691 } 25692 _ = o0.Args[1] 25693 o1 := o0.Args[0] 25694 if o1.Op != OpARM64ORshiftLL { 25695 break 25696 } 25697 if o1.AuxInt != 16 { 25698 break 25699 } 25700 _ = o1.Args[1] 25701 o2 := o1.Args[0] 25702 if o2.Op != OpARM64ORshiftLL { 25703 break 25704 } 25705 if o2.AuxInt != 24 { 25706 break 25707 } 25708 _ = o2.Args[1] 25709 o3 := o2.Args[0] 25710 if o3.Op != OpARM64ORshiftLL { 25711 break 25712 } 25713 if o3.AuxInt != 32 { 25714 break 25715 } 25716 _ = o3.Args[1] 25717 o4 := o3.Args[0] 25718 if o4.Op != OpARM64ORshiftLL { 25719 break 25720 } 25721 if o4.AuxInt != 40 { 25722 break 25723 } 25724 _ = o4.Args[1] 25725 o5 := o4.Args[0] 25726 if o5.Op != OpARM64ORshiftLL { 25727 break 25728 } 25729 if o5.AuxInt != 48 { 25730 break 25731 } 25732 _ = o5.Args[1] 25733 s0 := o5.Args[0] 25734 if s0.Op != OpARM64SLLconst { 25735 break 25736 } 25737 if s0.AuxInt != 56 { 25738 break 25739 } 25740 y0 := s0.Args[0] 25741 if y0.Op != OpARM64MOVDnop { 25742 break 25743 } 25744 x0 := y0.Args[0] 25745 if x0.Op != OpARM64MOVBUloadidx { 25746 break 25747 } 25748 _ = x0.Args[2] 25749 ptr := x0.Args[0] 25750 idx := x0.Args[1] 25751 mem := x0.Args[2] 25752 y1 := o5.Args[1] 25753 if y1.Op != OpARM64MOVDnop { 25754 break 25755 } 25756 x1 := y1.Args[0] 25757 if x1.Op != OpARM64MOVBUloadidx { 25758 break 25759 } 25760 _ = x1.Args[2] 25761 if ptr != x1.Args[0] { 25762 break 25763 } 25764 x1_1 := x1.Args[1] 25765 if x1_1.Op != OpARM64ADDconst { 25766 break 25767 } 25768 if x1_1.AuxInt != 1 { 25769 break 25770 } 25771 if idx != x1_1.Args[0] { 25772 break 25773 } 25774 if mem != x1.Args[2] { 25775 break 25776 } 25777 y2 := o4.Args[1] 25778 if y2.Op != OpARM64MOVDnop { 25779 break 25780 } 25781 x2 := y2.Args[0] 25782 if x2.Op != OpARM64MOVBUloadidx { 25783 break 25784 } 25785 _ = x2.Args[2] 25786 if ptr != x2.Args[0] { 25787 break 25788 } 25789 x2_1 := x2.Args[1] 25790 if x2_1.Op != OpARM64ADDconst { 25791 break 25792 } 25793 if x2_1.AuxInt != 2 { 25794 break 25795 } 25796 if idx != x2_1.Args[0] { 25797 break 25798 } 25799 if mem != x2.Args[2] { 25800 break 25801 } 25802 y3 := o3.Args[1] 25803 if y3.Op != OpARM64MOVDnop { 25804 break 25805 } 25806 x3 := y3.Args[0] 25807 if x3.Op != OpARM64MOVBUloadidx { 25808 break 25809 } 25810 _ = x3.Args[2] 25811 if ptr != x3.Args[0] { 25812 break 25813 } 25814 x3_1 := x3.Args[1] 25815 if x3_1.Op != OpARM64ADDconst { 25816 break 25817 } 25818 if x3_1.AuxInt != 3 { 25819 break 25820 } 25821 if idx != x3_1.Args[0] { 25822 break 25823 } 25824 if mem != x3.Args[2] { 25825 break 25826 } 25827 y4 := o2.Args[1] 25828 if y4.Op != OpARM64MOVDnop { 25829 break 25830 } 25831 x4 := y4.Args[0] 25832 if x4.Op != OpARM64MOVBUloadidx { 25833 break 25834 } 25835 _ = x4.Args[2] 25836 if ptr != x4.Args[0] { 25837 break 25838 } 25839 x4_1 := x4.Args[1] 25840 if x4_1.Op != OpARM64ADDconst { 25841 break 25842 } 25843 if x4_1.AuxInt != 4 { 25844 break 25845 } 25846 if idx != x4_1.Args[0] { 25847 break 25848 } 25849 if mem != x4.Args[2] { 25850 break 25851 } 25852 y5 := o1.Args[1] 25853 if y5.Op != OpARM64MOVDnop { 25854 break 25855 } 25856 x5 := y5.Args[0] 25857 if x5.Op != OpARM64MOVBUloadidx { 25858 break 25859 } 25860 _ = x5.Args[2] 25861 if ptr != x5.Args[0] { 25862 break 25863 } 25864 x5_1 := x5.Args[1] 25865 if x5_1.Op != OpARM64ADDconst { 25866 break 25867 } 25868 if x5_1.AuxInt != 5 { 25869 break 25870 } 25871 if idx != x5_1.Args[0] { 25872 break 25873 } 25874 if mem != x5.Args[2] { 25875 break 25876 } 25877 y6 := o0.Args[1] 25878 if y6.Op != OpARM64MOVDnop { 25879 break 25880 } 25881 x6 := y6.Args[0] 25882 if x6.Op != OpARM64MOVBUloadidx { 25883 break 25884 } 25885 _ = x6.Args[2] 25886 if ptr != x6.Args[0] { 25887 break 25888 } 25889 x6_1 := x6.Args[1] 25890 if x6_1.Op != OpARM64ADDconst { 25891 break 25892 } 25893 if x6_1.AuxInt != 6 { 25894 break 25895 } 25896 if idx != x6_1.Args[0] { 25897 break 25898 } 25899 if mem != x6.Args[2] { 25900 break 25901 } 25902 y7 := v.Args[1] 25903 if y7.Op != OpARM64MOVDnop { 25904 break 25905 } 25906 x7 := y7.Args[0] 25907 if x7.Op != OpARM64MOVBUloadidx { 25908 break 25909 } 25910 _ = x7.Args[2] 25911 if ptr != x7.Args[0] { 25912 break 25913 } 25914 x7_1 := x7.Args[1] 25915 if x7_1.Op != OpARM64ADDconst { 25916 break 25917 } 25918 if x7_1.AuxInt != 7 { 25919 break 25920 } 25921 if idx != x7_1.Args[0] { 25922 break 25923 } 25924 if mem != x7.Args[2] { 25925 break 25926 } 25927 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 25928 break 25929 } 25930 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 25931 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 25932 v.reset(OpCopy) 25933 v.AddArg(v0) 25934 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 25935 v1.AddArg(ptr) 25936 v1.AddArg(idx) 25937 v1.AddArg(mem) 25938 v0.AddArg(v1) 25939 return true 25940 } 25941 // match: (OR <t> y7:(MOVDnop x7:(MOVBUloadidx ptr (ADDconst [7] idx) mem)) o0:(ORshiftLL [8] o1:(ORshiftLL [16] o2:(ORshiftLL [24] o3:(ORshiftLL [32] o4:(ORshiftLL [40] o5:(ORshiftLL [48] s0:(SLLconst [56] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem))) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y5:(MOVDnop x5:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y6:(MOVDnop x6:(MOVBUloadidx ptr (ADDconst [6] idx) mem)))) 25942 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0) 25943 // result: @mergePoint(b,x0,x1,x2,x3,x4,x5,x6,x7) (REV <t> (MOVDloadidx <t> ptr idx mem)) 25944 for { 25945 t := v.Type 25946 _ = v.Args[1] 25947 y7 := v.Args[0] 25948 if y7.Op != OpARM64MOVDnop { 25949 break 25950 } 25951 x7 := y7.Args[0] 25952 if x7.Op != OpARM64MOVBUloadidx { 25953 break 25954 } 25955 _ = x7.Args[2] 25956 ptr := x7.Args[0] 25957 x7_1 := x7.Args[1] 25958 if x7_1.Op != OpARM64ADDconst { 25959 break 25960 } 25961 if x7_1.AuxInt != 7 { 25962 break 25963 } 25964 idx := x7_1.Args[0] 25965 mem := x7.Args[2] 25966 o0 := v.Args[1] 25967 if o0.Op != OpARM64ORshiftLL { 25968 break 25969 } 25970 if o0.AuxInt != 8 { 25971 break 25972 } 25973 _ = o0.Args[1] 25974 o1 := o0.Args[0] 25975 if o1.Op != OpARM64ORshiftLL { 25976 break 25977 } 25978 if o1.AuxInt != 16 { 25979 break 25980 } 25981 _ = o1.Args[1] 25982 o2 := o1.Args[0] 25983 if o2.Op != OpARM64ORshiftLL { 25984 break 25985 } 25986 if o2.AuxInt != 24 { 25987 break 25988 } 25989 _ = o2.Args[1] 25990 o3 := o2.Args[0] 25991 if o3.Op != OpARM64ORshiftLL { 25992 break 25993 } 25994 if o3.AuxInt != 32 { 25995 break 25996 } 25997 _ = o3.Args[1] 25998 o4 := o3.Args[0] 25999 if o4.Op != OpARM64ORshiftLL { 26000 break 26001 } 26002 if o4.AuxInt != 40 { 26003 break 26004 } 26005 _ = o4.Args[1] 26006 o5 := o4.Args[0] 26007 if o5.Op != OpARM64ORshiftLL { 26008 break 26009 } 26010 if o5.AuxInt != 48 { 26011 break 26012 } 26013 _ = o5.Args[1] 26014 s0 := o5.Args[0] 26015 if s0.Op != OpARM64SLLconst { 26016 break 26017 } 26018 if s0.AuxInt != 56 { 26019 break 26020 } 26021 y0 := s0.Args[0] 26022 if y0.Op != OpARM64MOVDnop { 26023 break 26024 } 26025 x0 := y0.Args[0] 26026 if x0.Op != OpARM64MOVBUloadidx { 26027 break 26028 } 26029 _ = x0.Args[2] 26030 if ptr != x0.Args[0] { 26031 break 26032 } 26033 if idx != x0.Args[1] { 26034 break 26035 } 26036 if mem != x0.Args[2] { 26037 break 26038 } 26039 y1 := o5.Args[1] 26040 if y1.Op != OpARM64MOVDnop { 26041 break 26042 } 26043 x1 := y1.Args[0] 26044 if x1.Op != OpARM64MOVBUloadidx { 26045 break 26046 } 26047 _ = x1.Args[2] 26048 if ptr != x1.Args[0] { 26049 break 26050 } 26051 x1_1 := x1.Args[1] 26052 if x1_1.Op != OpARM64ADDconst { 26053 break 26054 } 26055 if x1_1.AuxInt != 1 { 26056 break 26057 } 26058 if idx != x1_1.Args[0] { 26059 break 26060 } 26061 if mem != x1.Args[2] { 26062 break 26063 } 26064 y2 := o4.Args[1] 26065 if y2.Op != OpARM64MOVDnop { 26066 break 26067 } 26068 x2 := y2.Args[0] 26069 if x2.Op != OpARM64MOVBUloadidx { 26070 break 26071 } 26072 _ = x2.Args[2] 26073 if ptr != x2.Args[0] { 26074 break 26075 } 26076 x2_1 := x2.Args[1] 26077 if x2_1.Op != OpARM64ADDconst { 26078 break 26079 } 26080 if x2_1.AuxInt != 2 { 26081 break 26082 } 26083 if idx != x2_1.Args[0] { 26084 break 26085 } 26086 if mem != x2.Args[2] { 26087 break 26088 } 26089 y3 := o3.Args[1] 26090 if y3.Op != OpARM64MOVDnop { 26091 break 26092 } 26093 x3 := y3.Args[0] 26094 if x3.Op != OpARM64MOVBUloadidx { 26095 break 26096 } 26097 _ = x3.Args[2] 26098 if ptr != x3.Args[0] { 26099 break 26100 } 26101 x3_1 := x3.Args[1] 26102 if x3_1.Op != OpARM64ADDconst { 26103 break 26104 } 26105 if x3_1.AuxInt != 3 { 26106 break 26107 } 26108 if idx != x3_1.Args[0] { 26109 break 26110 } 26111 if mem != x3.Args[2] { 26112 break 26113 } 26114 y4 := o2.Args[1] 26115 if y4.Op != OpARM64MOVDnop { 26116 break 26117 } 26118 x4 := y4.Args[0] 26119 if x4.Op != OpARM64MOVBUloadidx { 26120 break 26121 } 26122 _ = x4.Args[2] 26123 if ptr != x4.Args[0] { 26124 break 26125 } 26126 x4_1 := x4.Args[1] 26127 if x4_1.Op != OpARM64ADDconst { 26128 break 26129 } 26130 if x4_1.AuxInt != 4 { 26131 break 26132 } 26133 if idx != x4_1.Args[0] { 26134 break 26135 } 26136 if mem != x4.Args[2] { 26137 break 26138 } 26139 y5 := o1.Args[1] 26140 if y5.Op != OpARM64MOVDnop { 26141 break 26142 } 26143 x5 := y5.Args[0] 26144 if x5.Op != OpARM64MOVBUloadidx { 26145 break 26146 } 26147 _ = x5.Args[2] 26148 if ptr != x5.Args[0] { 26149 break 26150 } 26151 x5_1 := x5.Args[1] 26152 if x5_1.Op != OpARM64ADDconst { 26153 break 26154 } 26155 if x5_1.AuxInt != 5 { 26156 break 26157 } 26158 if idx != x5_1.Args[0] { 26159 break 26160 } 26161 if mem != x5.Args[2] { 26162 break 26163 } 26164 y6 := o0.Args[1] 26165 if y6.Op != OpARM64MOVDnop { 26166 break 26167 } 26168 x6 := y6.Args[0] 26169 if x6.Op != OpARM64MOVBUloadidx { 26170 break 26171 } 26172 _ = x6.Args[2] 26173 if ptr != x6.Args[0] { 26174 break 26175 } 26176 x6_1 := x6.Args[1] 26177 if x6_1.Op != OpARM64ADDconst { 26178 break 26179 } 26180 if x6_1.AuxInt != 6 { 26181 break 26182 } 26183 if idx != x6_1.Args[0] { 26184 break 26185 } 26186 if mem != x6.Args[2] { 26187 break 26188 } 26189 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && x7.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && y5.Uses == 1 && y6.Uses == 1 && y7.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && o3.Uses == 1 && o4.Uses == 1 && o5.Uses == 1 && s0.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6) && clobber(x7) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(y5) && clobber(y6) && clobber(y7) && clobber(o0) && clobber(o1) && clobber(o2) && clobber(o3) && clobber(o4) && clobber(o5) && clobber(s0)) { 26190 break 26191 } 26192 b = mergePoint(b, x0, x1, x2, x3, x4, x5, x6, x7) 26193 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 26194 v.reset(OpCopy) 26195 v.AddArg(v0) 26196 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 26197 v1.AddArg(ptr) 26198 v1.AddArg(idx) 26199 v1.AddArg(mem) 26200 v0.AddArg(v1) 26201 return true 26202 } 26203 return false 26204 } 26205 func rewriteValueARM64_OpARM64ORN_0(v *Value) bool { 26206 // match: (ORN x (MOVDconst [c])) 26207 // cond: 26208 // result: (ORconst [^c] x) 26209 for { 26210 _ = v.Args[1] 26211 x := v.Args[0] 26212 v_1 := v.Args[1] 26213 if v_1.Op != OpARM64MOVDconst { 26214 break 26215 } 26216 c := v_1.AuxInt 26217 v.reset(OpARM64ORconst) 26218 v.AuxInt = ^c 26219 v.AddArg(x) 26220 return true 26221 } 26222 // match: (ORN x x) 26223 // cond: 26224 // result: (MOVDconst [-1]) 26225 for { 26226 _ = v.Args[1] 26227 x := v.Args[0] 26228 if x != v.Args[1] { 26229 break 26230 } 26231 v.reset(OpARM64MOVDconst) 26232 v.AuxInt = -1 26233 return true 26234 } 26235 // match: (ORN x0 x1:(SLLconst [c] y)) 26236 // cond: clobberIfDead(x1) 26237 // result: (ORNshiftLL x0 y [c]) 26238 for { 26239 _ = v.Args[1] 26240 x0 := v.Args[0] 26241 x1 := v.Args[1] 26242 if x1.Op != OpARM64SLLconst { 26243 break 26244 } 26245 c := x1.AuxInt 26246 y := x1.Args[0] 26247 if !(clobberIfDead(x1)) { 26248 break 26249 } 26250 v.reset(OpARM64ORNshiftLL) 26251 v.AuxInt = c 26252 v.AddArg(x0) 26253 v.AddArg(y) 26254 return true 26255 } 26256 // match: (ORN x0 x1:(SRLconst [c] y)) 26257 // cond: clobberIfDead(x1) 26258 // result: (ORNshiftRL x0 y [c]) 26259 for { 26260 _ = v.Args[1] 26261 x0 := v.Args[0] 26262 x1 := v.Args[1] 26263 if x1.Op != OpARM64SRLconst { 26264 break 26265 } 26266 c := x1.AuxInt 26267 y := x1.Args[0] 26268 if !(clobberIfDead(x1)) { 26269 break 26270 } 26271 v.reset(OpARM64ORNshiftRL) 26272 v.AuxInt = c 26273 v.AddArg(x0) 26274 v.AddArg(y) 26275 return true 26276 } 26277 // match: (ORN x0 x1:(SRAconst [c] y)) 26278 // cond: clobberIfDead(x1) 26279 // result: (ORNshiftRA x0 y [c]) 26280 for { 26281 _ = v.Args[1] 26282 x0 := v.Args[0] 26283 x1 := v.Args[1] 26284 if x1.Op != OpARM64SRAconst { 26285 break 26286 } 26287 c := x1.AuxInt 26288 y := x1.Args[0] 26289 if !(clobberIfDead(x1)) { 26290 break 26291 } 26292 v.reset(OpARM64ORNshiftRA) 26293 v.AuxInt = c 26294 v.AddArg(x0) 26295 v.AddArg(y) 26296 return true 26297 } 26298 return false 26299 } 26300 func rewriteValueARM64_OpARM64ORNshiftLL_0(v *Value) bool { 26301 // match: (ORNshiftLL x (MOVDconst [c]) [d]) 26302 // cond: 26303 // result: (ORconst x [^int64(uint64(c)<<uint64(d))]) 26304 for { 26305 d := v.AuxInt 26306 _ = v.Args[1] 26307 x := v.Args[0] 26308 v_1 := v.Args[1] 26309 if v_1.Op != OpARM64MOVDconst { 26310 break 26311 } 26312 c := v_1.AuxInt 26313 v.reset(OpARM64ORconst) 26314 v.AuxInt = ^int64(uint64(c) << uint64(d)) 26315 v.AddArg(x) 26316 return true 26317 } 26318 // match: (ORNshiftLL x (SLLconst x [c]) [d]) 26319 // cond: c==d 26320 // result: (MOVDconst [-1]) 26321 for { 26322 d := v.AuxInt 26323 _ = v.Args[1] 26324 x := v.Args[0] 26325 v_1 := v.Args[1] 26326 if v_1.Op != OpARM64SLLconst { 26327 break 26328 } 26329 c := v_1.AuxInt 26330 if x != v_1.Args[0] { 26331 break 26332 } 26333 if !(c == d) { 26334 break 26335 } 26336 v.reset(OpARM64MOVDconst) 26337 v.AuxInt = -1 26338 return true 26339 } 26340 return false 26341 } 26342 func rewriteValueARM64_OpARM64ORNshiftRA_0(v *Value) bool { 26343 // match: (ORNshiftRA x (MOVDconst [c]) [d]) 26344 // cond: 26345 // result: (ORconst x [^(c>>uint64(d))]) 26346 for { 26347 d := v.AuxInt 26348 _ = v.Args[1] 26349 x := v.Args[0] 26350 v_1 := v.Args[1] 26351 if v_1.Op != OpARM64MOVDconst { 26352 break 26353 } 26354 c := v_1.AuxInt 26355 v.reset(OpARM64ORconst) 26356 v.AuxInt = ^(c >> uint64(d)) 26357 v.AddArg(x) 26358 return true 26359 } 26360 // match: (ORNshiftRA x (SRAconst x [c]) [d]) 26361 // cond: c==d 26362 // result: (MOVDconst [-1]) 26363 for { 26364 d := v.AuxInt 26365 _ = v.Args[1] 26366 x := v.Args[0] 26367 v_1 := v.Args[1] 26368 if v_1.Op != OpARM64SRAconst { 26369 break 26370 } 26371 c := v_1.AuxInt 26372 if x != v_1.Args[0] { 26373 break 26374 } 26375 if !(c == d) { 26376 break 26377 } 26378 v.reset(OpARM64MOVDconst) 26379 v.AuxInt = -1 26380 return true 26381 } 26382 return false 26383 } 26384 func rewriteValueARM64_OpARM64ORNshiftRL_0(v *Value) bool { 26385 // match: (ORNshiftRL x (MOVDconst [c]) [d]) 26386 // cond: 26387 // result: (ORconst x [^int64(uint64(c)>>uint64(d))]) 26388 for { 26389 d := v.AuxInt 26390 _ = v.Args[1] 26391 x := v.Args[0] 26392 v_1 := v.Args[1] 26393 if v_1.Op != OpARM64MOVDconst { 26394 break 26395 } 26396 c := v_1.AuxInt 26397 v.reset(OpARM64ORconst) 26398 v.AuxInt = ^int64(uint64(c) >> uint64(d)) 26399 v.AddArg(x) 26400 return true 26401 } 26402 // match: (ORNshiftRL x (SRLconst x [c]) [d]) 26403 // cond: c==d 26404 // result: (MOVDconst [-1]) 26405 for { 26406 d := v.AuxInt 26407 _ = v.Args[1] 26408 x := v.Args[0] 26409 v_1 := v.Args[1] 26410 if v_1.Op != OpARM64SRLconst { 26411 break 26412 } 26413 c := v_1.AuxInt 26414 if x != v_1.Args[0] { 26415 break 26416 } 26417 if !(c == d) { 26418 break 26419 } 26420 v.reset(OpARM64MOVDconst) 26421 v.AuxInt = -1 26422 return true 26423 } 26424 return false 26425 } 26426 func rewriteValueARM64_OpARM64ORconst_0(v *Value) bool { 26427 // match: (ORconst [0] x) 26428 // cond: 26429 // result: x 26430 for { 26431 if v.AuxInt != 0 { 26432 break 26433 } 26434 x := v.Args[0] 26435 v.reset(OpCopy) 26436 v.Type = x.Type 26437 v.AddArg(x) 26438 return true 26439 } 26440 // match: (ORconst [-1] _) 26441 // cond: 26442 // result: (MOVDconst [-1]) 26443 for { 26444 if v.AuxInt != -1 { 26445 break 26446 } 26447 v.reset(OpARM64MOVDconst) 26448 v.AuxInt = -1 26449 return true 26450 } 26451 // match: (ORconst [c] (MOVDconst [d])) 26452 // cond: 26453 // result: (MOVDconst [c|d]) 26454 for { 26455 c := v.AuxInt 26456 v_0 := v.Args[0] 26457 if v_0.Op != OpARM64MOVDconst { 26458 break 26459 } 26460 d := v_0.AuxInt 26461 v.reset(OpARM64MOVDconst) 26462 v.AuxInt = c | d 26463 return true 26464 } 26465 // match: (ORconst [c] (ORconst [d] x)) 26466 // cond: 26467 // result: (ORconst [c|d] x) 26468 for { 26469 c := v.AuxInt 26470 v_0 := v.Args[0] 26471 if v_0.Op != OpARM64ORconst { 26472 break 26473 } 26474 d := v_0.AuxInt 26475 x := v_0.Args[0] 26476 v.reset(OpARM64ORconst) 26477 v.AuxInt = c | d 26478 v.AddArg(x) 26479 return true 26480 } 26481 // match: (ORconst [c1] (ANDconst [c2] x)) 26482 // cond: c2|c1 == ^0 26483 // result: (ORconst [c1] x) 26484 for { 26485 c1 := v.AuxInt 26486 v_0 := v.Args[0] 26487 if v_0.Op != OpARM64ANDconst { 26488 break 26489 } 26490 c2 := v_0.AuxInt 26491 x := v_0.Args[0] 26492 if !(c2|c1 == ^0) { 26493 break 26494 } 26495 v.reset(OpARM64ORconst) 26496 v.AuxInt = c1 26497 v.AddArg(x) 26498 return true 26499 } 26500 return false 26501 } 26502 func rewriteValueARM64_OpARM64ORshiftLL_0(v *Value) bool { 26503 b := v.Block 26504 _ = b 26505 // match: (ORshiftLL (MOVDconst [c]) x [d]) 26506 // cond: 26507 // result: (ORconst [c] (SLLconst <x.Type> x [d])) 26508 for { 26509 d := v.AuxInt 26510 _ = v.Args[1] 26511 v_0 := v.Args[0] 26512 if v_0.Op != OpARM64MOVDconst { 26513 break 26514 } 26515 c := v_0.AuxInt 26516 x := v.Args[1] 26517 v.reset(OpARM64ORconst) 26518 v.AuxInt = c 26519 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 26520 v0.AuxInt = d 26521 v0.AddArg(x) 26522 v.AddArg(v0) 26523 return true 26524 } 26525 // match: (ORshiftLL x (MOVDconst [c]) [d]) 26526 // cond: 26527 // result: (ORconst x [int64(uint64(c)<<uint64(d))]) 26528 for { 26529 d := v.AuxInt 26530 _ = v.Args[1] 26531 x := v.Args[0] 26532 v_1 := v.Args[1] 26533 if v_1.Op != OpARM64MOVDconst { 26534 break 26535 } 26536 c := v_1.AuxInt 26537 v.reset(OpARM64ORconst) 26538 v.AuxInt = int64(uint64(c) << uint64(d)) 26539 v.AddArg(x) 26540 return true 26541 } 26542 // match: (ORshiftLL x y:(SLLconst x [c]) [d]) 26543 // cond: c==d 26544 // result: y 26545 for { 26546 d := v.AuxInt 26547 _ = v.Args[1] 26548 x := v.Args[0] 26549 y := v.Args[1] 26550 if y.Op != OpARM64SLLconst { 26551 break 26552 } 26553 c := y.AuxInt 26554 if x != y.Args[0] { 26555 break 26556 } 26557 if !(c == d) { 26558 break 26559 } 26560 v.reset(OpCopy) 26561 v.Type = y.Type 26562 v.AddArg(y) 26563 return true 26564 } 26565 // match: (ORshiftLL [c] (SRLconst x [64-c]) x) 26566 // cond: 26567 // result: (RORconst [64-c] x) 26568 for { 26569 c := v.AuxInt 26570 _ = v.Args[1] 26571 v_0 := v.Args[0] 26572 if v_0.Op != OpARM64SRLconst { 26573 break 26574 } 26575 if v_0.AuxInt != 64-c { 26576 break 26577 } 26578 x := v_0.Args[0] 26579 if x != v.Args[1] { 26580 break 26581 } 26582 v.reset(OpARM64RORconst) 26583 v.AuxInt = 64 - c 26584 v.AddArg(x) 26585 return true 26586 } 26587 // match: (ORshiftLL <t> [c] (UBFX [bfc] x) x) 26588 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 26589 // result: (RORWconst [32-c] x) 26590 for { 26591 t := v.Type 26592 c := v.AuxInt 26593 _ = v.Args[1] 26594 v_0 := v.Args[0] 26595 if v_0.Op != OpARM64UBFX { 26596 break 26597 } 26598 bfc := v_0.AuxInt 26599 x := v_0.Args[0] 26600 if x != v.Args[1] { 26601 break 26602 } 26603 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 26604 break 26605 } 26606 v.reset(OpARM64RORWconst) 26607 v.AuxInt = 32 - c 26608 v.AddArg(x) 26609 return true 26610 } 26611 // match: (ORshiftLL [c] (SRLconst x [64-c]) x2) 26612 // cond: 26613 // result: (EXTRconst [64-c] x2 x) 26614 for { 26615 c := v.AuxInt 26616 _ = v.Args[1] 26617 v_0 := v.Args[0] 26618 if v_0.Op != OpARM64SRLconst { 26619 break 26620 } 26621 if v_0.AuxInt != 64-c { 26622 break 26623 } 26624 x := v_0.Args[0] 26625 x2 := v.Args[1] 26626 v.reset(OpARM64EXTRconst) 26627 v.AuxInt = 64 - c 26628 v.AddArg(x2) 26629 v.AddArg(x) 26630 return true 26631 } 26632 // match: (ORshiftLL <t> [c] (UBFX [bfc] x) x2) 26633 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 26634 // result: (EXTRWconst [32-c] x2 x) 26635 for { 26636 t := v.Type 26637 c := v.AuxInt 26638 _ = v.Args[1] 26639 v_0 := v.Args[0] 26640 if v_0.Op != OpARM64UBFX { 26641 break 26642 } 26643 bfc := v_0.AuxInt 26644 x := v_0.Args[0] 26645 x2 := v.Args[1] 26646 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 26647 break 26648 } 26649 v.reset(OpARM64EXTRWconst) 26650 v.AuxInt = 32 - c 26651 v.AddArg(x2) 26652 v.AddArg(x) 26653 return true 26654 } 26655 // match: (ORshiftLL [sc] (UBFX [bfc] x) (SRLconst [sc] y)) 26656 // cond: sc == getARM64BFwidth(bfc) 26657 // result: (BFXIL [bfc] y x) 26658 for { 26659 sc := v.AuxInt 26660 _ = v.Args[1] 26661 v_0 := v.Args[0] 26662 if v_0.Op != OpARM64UBFX { 26663 break 26664 } 26665 bfc := v_0.AuxInt 26666 x := v_0.Args[0] 26667 v_1 := v.Args[1] 26668 if v_1.Op != OpARM64SRLconst { 26669 break 26670 } 26671 if v_1.AuxInt != sc { 26672 break 26673 } 26674 y := v_1.Args[0] 26675 if !(sc == getARM64BFwidth(bfc)) { 26676 break 26677 } 26678 v.reset(OpARM64BFXIL) 26679 v.AuxInt = bfc 26680 v.AddArg(y) 26681 v.AddArg(x) 26682 return true 26683 } 26684 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i0] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) 26685 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 26686 // result: @mergePoint(b,x0,x1) (MOVHUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 26687 for { 26688 t := v.Type 26689 if v.AuxInt != 8 { 26690 break 26691 } 26692 _ = v.Args[1] 26693 y0 := v.Args[0] 26694 if y0.Op != OpARM64MOVDnop { 26695 break 26696 } 26697 x0 := y0.Args[0] 26698 if x0.Op != OpARM64MOVBUload { 26699 break 26700 } 26701 i0 := x0.AuxInt 26702 s := x0.Aux 26703 _ = x0.Args[1] 26704 p := x0.Args[0] 26705 mem := x0.Args[1] 26706 y1 := v.Args[1] 26707 if y1.Op != OpARM64MOVDnop { 26708 break 26709 } 26710 x1 := y1.Args[0] 26711 if x1.Op != OpARM64MOVBUload { 26712 break 26713 } 26714 i1 := x1.AuxInt 26715 if x1.Aux != s { 26716 break 26717 } 26718 _ = x1.Args[1] 26719 if p != x1.Args[0] { 26720 break 26721 } 26722 if mem != x1.Args[1] { 26723 break 26724 } 26725 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 26726 break 26727 } 26728 b = mergePoint(b, x0, x1) 26729 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t) 26730 v.reset(OpCopy) 26731 v.AddArg(v0) 26732 v0.Aux = s 26733 v1 := b.NewValue0(x1.Pos, OpOffPtr, p.Type) 26734 v1.AuxInt = i0 26735 v1.AddArg(p) 26736 v0.AddArg(v1) 26737 v0.AddArg(mem) 26738 return true 26739 } 26740 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr0 idx0 mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) 26741 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 26742 // result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr0 idx0 mem) 26743 for { 26744 t := v.Type 26745 if v.AuxInt != 8 { 26746 break 26747 } 26748 _ = v.Args[1] 26749 y0 := v.Args[0] 26750 if y0.Op != OpARM64MOVDnop { 26751 break 26752 } 26753 x0 := y0.Args[0] 26754 if x0.Op != OpARM64MOVBUloadidx { 26755 break 26756 } 26757 _ = x0.Args[2] 26758 ptr0 := x0.Args[0] 26759 idx0 := x0.Args[1] 26760 mem := x0.Args[2] 26761 y1 := v.Args[1] 26762 if y1.Op != OpARM64MOVDnop { 26763 break 26764 } 26765 x1 := y1.Args[0] 26766 if x1.Op != OpARM64MOVBUload { 26767 break 26768 } 26769 if x1.AuxInt != 1 { 26770 break 26771 } 26772 s := x1.Aux 26773 _ = x1.Args[1] 26774 p1 := x1.Args[0] 26775 if p1.Op != OpARM64ADD { 26776 break 26777 } 26778 _ = p1.Args[1] 26779 ptr1 := p1.Args[0] 26780 idx1 := p1.Args[1] 26781 if mem != x1.Args[1] { 26782 break 26783 } 26784 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 26785 break 26786 } 26787 b = mergePoint(b, x0, x1) 26788 v0 := b.NewValue0(x1.Pos, OpARM64MOVHUloadidx, t) 26789 v.reset(OpCopy) 26790 v.AddArg(v0) 26791 v0.AddArg(ptr0) 26792 v0.AddArg(idx0) 26793 v0.AddArg(mem) 26794 return true 26795 } 26796 return false 26797 } 26798 func rewriteValueARM64_OpARM64ORshiftLL_10(v *Value) bool { 26799 b := v.Block 26800 _ = b 26801 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr idx mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) 26802 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 26803 // result: @mergePoint(b,x0,x1) (MOVHUloadidx <t> ptr idx mem) 26804 for { 26805 t := v.Type 26806 if v.AuxInt != 8 { 26807 break 26808 } 26809 _ = v.Args[1] 26810 y0 := v.Args[0] 26811 if y0.Op != OpARM64MOVDnop { 26812 break 26813 } 26814 x0 := y0.Args[0] 26815 if x0.Op != OpARM64MOVBUloadidx { 26816 break 26817 } 26818 _ = x0.Args[2] 26819 ptr := x0.Args[0] 26820 idx := x0.Args[1] 26821 mem := x0.Args[2] 26822 y1 := v.Args[1] 26823 if y1.Op != OpARM64MOVDnop { 26824 break 26825 } 26826 x1 := y1.Args[0] 26827 if x1.Op != OpARM64MOVBUloadidx { 26828 break 26829 } 26830 _ = x1.Args[2] 26831 if ptr != x1.Args[0] { 26832 break 26833 } 26834 x1_1 := x1.Args[1] 26835 if x1_1.Op != OpARM64ADDconst { 26836 break 26837 } 26838 if x1_1.AuxInt != 1 { 26839 break 26840 } 26841 if idx != x1_1.Args[0] { 26842 break 26843 } 26844 if mem != x1.Args[2] { 26845 break 26846 } 26847 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 26848 break 26849 } 26850 b = mergePoint(b, x0, x1) 26851 v0 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 26852 v.reset(OpCopy) 26853 v.AddArg(v0) 26854 v0.AddArg(ptr) 26855 v0.AddArg(idx) 26856 v0.AddArg(mem) 26857 return true 26858 } 26859 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i2] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i3] {s} p mem))) 26860 // cond: i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 26861 // result: @mergePoint(b,x0,x1,x2) (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem) 26862 for { 26863 t := v.Type 26864 if v.AuxInt != 24 { 26865 break 26866 } 26867 _ = v.Args[1] 26868 o0 := v.Args[0] 26869 if o0.Op != OpARM64ORshiftLL { 26870 break 26871 } 26872 if o0.AuxInt != 16 { 26873 break 26874 } 26875 _ = o0.Args[1] 26876 x0 := o0.Args[0] 26877 if x0.Op != OpARM64MOVHUload { 26878 break 26879 } 26880 i0 := x0.AuxInt 26881 s := x0.Aux 26882 _ = x0.Args[1] 26883 p := x0.Args[0] 26884 mem := x0.Args[1] 26885 y1 := o0.Args[1] 26886 if y1.Op != OpARM64MOVDnop { 26887 break 26888 } 26889 x1 := y1.Args[0] 26890 if x1.Op != OpARM64MOVBUload { 26891 break 26892 } 26893 i2 := x1.AuxInt 26894 if x1.Aux != s { 26895 break 26896 } 26897 _ = x1.Args[1] 26898 if p != x1.Args[0] { 26899 break 26900 } 26901 if mem != x1.Args[1] { 26902 break 26903 } 26904 y2 := v.Args[1] 26905 if y2.Op != OpARM64MOVDnop { 26906 break 26907 } 26908 x2 := y2.Args[0] 26909 if x2.Op != OpARM64MOVBUload { 26910 break 26911 } 26912 i3 := x2.AuxInt 26913 if x2.Aux != s { 26914 break 26915 } 26916 _ = x2.Args[1] 26917 if p != x2.Args[0] { 26918 break 26919 } 26920 if mem != x2.Args[1] { 26921 break 26922 } 26923 if !(i2 == i0+2 && i3 == i0+3 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 26924 break 26925 } 26926 b = mergePoint(b, x0, x1, x2) 26927 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 26928 v.reset(OpCopy) 26929 v.AddArg(v0) 26930 v0.Aux = s 26931 v1 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 26932 v1.AuxInt = i0 26933 v1.AddArg(p) 26934 v0.AddArg(v1) 26935 v0.AddArg(mem) 26936 return true 26937 } 26938 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem))) 26939 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 26940 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 idx0 mem) 26941 for { 26942 t := v.Type 26943 if v.AuxInt != 24 { 26944 break 26945 } 26946 _ = v.Args[1] 26947 o0 := v.Args[0] 26948 if o0.Op != OpARM64ORshiftLL { 26949 break 26950 } 26951 if o0.AuxInt != 16 { 26952 break 26953 } 26954 _ = o0.Args[1] 26955 x0 := o0.Args[0] 26956 if x0.Op != OpARM64MOVHUloadidx { 26957 break 26958 } 26959 _ = x0.Args[2] 26960 ptr0 := x0.Args[0] 26961 idx0 := x0.Args[1] 26962 mem := x0.Args[2] 26963 y1 := o0.Args[1] 26964 if y1.Op != OpARM64MOVDnop { 26965 break 26966 } 26967 x1 := y1.Args[0] 26968 if x1.Op != OpARM64MOVBUload { 26969 break 26970 } 26971 if x1.AuxInt != 2 { 26972 break 26973 } 26974 s := x1.Aux 26975 _ = x1.Args[1] 26976 p1 := x1.Args[0] 26977 if p1.Op != OpARM64ADD { 26978 break 26979 } 26980 _ = p1.Args[1] 26981 ptr1 := p1.Args[0] 26982 idx1 := p1.Args[1] 26983 if mem != x1.Args[1] { 26984 break 26985 } 26986 y2 := v.Args[1] 26987 if y2.Op != OpARM64MOVDnop { 26988 break 26989 } 26990 x2 := y2.Args[0] 26991 if x2.Op != OpARM64MOVBUload { 26992 break 26993 } 26994 if x2.AuxInt != 3 { 26995 break 26996 } 26997 if x2.Aux != s { 26998 break 26999 } 27000 _ = x2.Args[1] 27001 p := x2.Args[0] 27002 if mem != x2.Args[1] { 27003 break 27004 } 27005 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 27006 break 27007 } 27008 b = mergePoint(b, x0, x1, x2) 27009 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 27010 v.reset(OpCopy) 27011 v.AddArg(v0) 27012 v0.AddArg(ptr0) 27013 v0.AddArg(idx0) 27014 v0.AddArg(mem) 27015 return true 27016 } 27017 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) 27018 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 27019 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr idx mem) 27020 for { 27021 t := v.Type 27022 if v.AuxInt != 24 { 27023 break 27024 } 27025 _ = v.Args[1] 27026 o0 := v.Args[0] 27027 if o0.Op != OpARM64ORshiftLL { 27028 break 27029 } 27030 if o0.AuxInt != 16 { 27031 break 27032 } 27033 _ = o0.Args[1] 27034 x0 := o0.Args[0] 27035 if x0.Op != OpARM64MOVHUloadidx { 27036 break 27037 } 27038 _ = x0.Args[2] 27039 ptr := x0.Args[0] 27040 idx := x0.Args[1] 27041 mem := x0.Args[2] 27042 y1 := o0.Args[1] 27043 if y1.Op != OpARM64MOVDnop { 27044 break 27045 } 27046 x1 := y1.Args[0] 27047 if x1.Op != OpARM64MOVBUloadidx { 27048 break 27049 } 27050 _ = x1.Args[2] 27051 if ptr != x1.Args[0] { 27052 break 27053 } 27054 x1_1 := x1.Args[1] 27055 if x1_1.Op != OpARM64ADDconst { 27056 break 27057 } 27058 if x1_1.AuxInt != 2 { 27059 break 27060 } 27061 if idx != x1_1.Args[0] { 27062 break 27063 } 27064 if mem != x1.Args[2] { 27065 break 27066 } 27067 y2 := v.Args[1] 27068 if y2.Op != OpARM64MOVDnop { 27069 break 27070 } 27071 x2 := y2.Args[0] 27072 if x2.Op != OpARM64MOVBUloadidx { 27073 break 27074 } 27075 _ = x2.Args[2] 27076 if ptr != x2.Args[0] { 27077 break 27078 } 27079 x2_1 := x2.Args[1] 27080 if x2_1.Op != OpARM64ADDconst { 27081 break 27082 } 27083 if x2_1.AuxInt != 3 { 27084 break 27085 } 27086 if idx != x2_1.Args[0] { 27087 break 27088 } 27089 if mem != x2.Args[2] { 27090 break 27091 } 27092 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 27093 break 27094 } 27095 b = mergePoint(b, x0, x1, x2) 27096 v0 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 27097 v.reset(OpCopy) 27098 v.AddArg(v0) 27099 v0.AddArg(ptr) 27100 v0.AddArg(idx) 27101 v0.AddArg(mem) 27102 return true 27103 } 27104 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] x0:(MOVHUloadidx2 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [2] {s} p1:(ADDshiftLL [1] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [3] {s} p mem))) 27105 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0) 27106 // result: @mergePoint(b,x0,x1,x2) (MOVWUloadidx <t> ptr0 (SLLconst <idx0.Type> [1] idx0) mem) 27107 for { 27108 t := v.Type 27109 if v.AuxInt != 24 { 27110 break 27111 } 27112 _ = v.Args[1] 27113 o0 := v.Args[0] 27114 if o0.Op != OpARM64ORshiftLL { 27115 break 27116 } 27117 if o0.AuxInt != 16 { 27118 break 27119 } 27120 _ = o0.Args[1] 27121 x0 := o0.Args[0] 27122 if x0.Op != OpARM64MOVHUloadidx2 { 27123 break 27124 } 27125 _ = x0.Args[2] 27126 ptr0 := x0.Args[0] 27127 idx0 := x0.Args[1] 27128 mem := x0.Args[2] 27129 y1 := o0.Args[1] 27130 if y1.Op != OpARM64MOVDnop { 27131 break 27132 } 27133 x1 := y1.Args[0] 27134 if x1.Op != OpARM64MOVBUload { 27135 break 27136 } 27137 if x1.AuxInt != 2 { 27138 break 27139 } 27140 s := x1.Aux 27141 _ = x1.Args[1] 27142 p1 := x1.Args[0] 27143 if p1.Op != OpARM64ADDshiftLL { 27144 break 27145 } 27146 if p1.AuxInt != 1 { 27147 break 27148 } 27149 _ = p1.Args[1] 27150 ptr1 := p1.Args[0] 27151 idx1 := p1.Args[1] 27152 if mem != x1.Args[1] { 27153 break 27154 } 27155 y2 := v.Args[1] 27156 if y2.Op != OpARM64MOVDnop { 27157 break 27158 } 27159 x2 := y2.Args[0] 27160 if x2.Op != OpARM64MOVBUload { 27161 break 27162 } 27163 if x2.AuxInt != 3 { 27164 break 27165 } 27166 if x2.Aux != s { 27167 break 27168 } 27169 _ = x2.Args[1] 27170 p := x2.Args[0] 27171 if mem != x2.Args[1] { 27172 break 27173 } 27174 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y1) && clobber(y2) && clobber(o0)) { 27175 break 27176 } 27177 b = mergePoint(b, x0, x1, x2) 27178 v0 := b.NewValue0(x2.Pos, OpARM64MOVWUloadidx, t) 27179 v.reset(OpCopy) 27180 v.AddArg(v0) 27181 v0.AddArg(ptr0) 27182 v1 := b.NewValue0(x2.Pos, OpARM64SLLconst, idx0.Type) 27183 v1.AuxInt = 1 27184 v1.AddArg(idx0) 27185 v0.AddArg(v1) 27186 v0.AddArg(mem) 27187 return true 27188 } 27189 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUload [i0] {s} p mem) y1:(MOVDnop x1:(MOVBUload [i4] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i7] {s} p mem))) 27190 // cond: i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27191 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem) 27192 for { 27193 t := v.Type 27194 if v.AuxInt != 56 { 27195 break 27196 } 27197 _ = v.Args[1] 27198 o0 := v.Args[0] 27199 if o0.Op != OpARM64ORshiftLL { 27200 break 27201 } 27202 if o0.AuxInt != 48 { 27203 break 27204 } 27205 _ = o0.Args[1] 27206 o1 := o0.Args[0] 27207 if o1.Op != OpARM64ORshiftLL { 27208 break 27209 } 27210 if o1.AuxInt != 40 { 27211 break 27212 } 27213 _ = o1.Args[1] 27214 o2 := o1.Args[0] 27215 if o2.Op != OpARM64ORshiftLL { 27216 break 27217 } 27218 if o2.AuxInt != 32 { 27219 break 27220 } 27221 _ = o2.Args[1] 27222 x0 := o2.Args[0] 27223 if x0.Op != OpARM64MOVWUload { 27224 break 27225 } 27226 i0 := x0.AuxInt 27227 s := x0.Aux 27228 _ = x0.Args[1] 27229 p := x0.Args[0] 27230 mem := x0.Args[1] 27231 y1 := o2.Args[1] 27232 if y1.Op != OpARM64MOVDnop { 27233 break 27234 } 27235 x1 := y1.Args[0] 27236 if x1.Op != OpARM64MOVBUload { 27237 break 27238 } 27239 i4 := x1.AuxInt 27240 if x1.Aux != s { 27241 break 27242 } 27243 _ = x1.Args[1] 27244 if p != x1.Args[0] { 27245 break 27246 } 27247 if mem != x1.Args[1] { 27248 break 27249 } 27250 y2 := o1.Args[1] 27251 if y2.Op != OpARM64MOVDnop { 27252 break 27253 } 27254 x2 := y2.Args[0] 27255 if x2.Op != OpARM64MOVBUload { 27256 break 27257 } 27258 i5 := x2.AuxInt 27259 if x2.Aux != s { 27260 break 27261 } 27262 _ = x2.Args[1] 27263 if p != x2.Args[0] { 27264 break 27265 } 27266 if mem != x2.Args[1] { 27267 break 27268 } 27269 y3 := o0.Args[1] 27270 if y3.Op != OpARM64MOVDnop { 27271 break 27272 } 27273 x3 := y3.Args[0] 27274 if x3.Op != OpARM64MOVBUload { 27275 break 27276 } 27277 i6 := x3.AuxInt 27278 if x3.Aux != s { 27279 break 27280 } 27281 _ = x3.Args[1] 27282 if p != x3.Args[0] { 27283 break 27284 } 27285 if mem != x3.Args[1] { 27286 break 27287 } 27288 y4 := v.Args[1] 27289 if y4.Op != OpARM64MOVDnop { 27290 break 27291 } 27292 x4 := y4.Args[0] 27293 if x4.Op != OpARM64MOVBUload { 27294 break 27295 } 27296 i7 := x4.AuxInt 27297 if x4.Aux != s { 27298 break 27299 } 27300 _ = x4.Args[1] 27301 if p != x4.Args[0] { 27302 break 27303 } 27304 if mem != x4.Args[1] { 27305 break 27306 } 27307 if !(i4 == i0+4 && i5 == i0+5 && i6 == i0+6 && i7 == i0+7 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27308 break 27309 } 27310 b = mergePoint(b, x0, x1, x2, x3, x4) 27311 v0 := b.NewValue0(x4.Pos, OpARM64MOVDload, t) 27312 v.reset(OpCopy) 27313 v.AddArg(v0) 27314 v0.Aux = s 27315 v1 := b.NewValue0(x4.Pos, OpOffPtr, p.Type) 27316 v1.AuxInt = i0 27317 v1.AddArg(p) 27318 v0.AddArg(v1) 27319 v0.AddArg(mem) 27320 return true 27321 } 27322 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem))) 27323 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27324 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 idx0 mem) 27325 for { 27326 t := v.Type 27327 if v.AuxInt != 56 { 27328 break 27329 } 27330 _ = v.Args[1] 27331 o0 := v.Args[0] 27332 if o0.Op != OpARM64ORshiftLL { 27333 break 27334 } 27335 if o0.AuxInt != 48 { 27336 break 27337 } 27338 _ = o0.Args[1] 27339 o1 := o0.Args[0] 27340 if o1.Op != OpARM64ORshiftLL { 27341 break 27342 } 27343 if o1.AuxInt != 40 { 27344 break 27345 } 27346 _ = o1.Args[1] 27347 o2 := o1.Args[0] 27348 if o2.Op != OpARM64ORshiftLL { 27349 break 27350 } 27351 if o2.AuxInt != 32 { 27352 break 27353 } 27354 _ = o2.Args[1] 27355 x0 := o2.Args[0] 27356 if x0.Op != OpARM64MOVWUloadidx { 27357 break 27358 } 27359 _ = x0.Args[2] 27360 ptr0 := x0.Args[0] 27361 idx0 := x0.Args[1] 27362 mem := x0.Args[2] 27363 y1 := o2.Args[1] 27364 if y1.Op != OpARM64MOVDnop { 27365 break 27366 } 27367 x1 := y1.Args[0] 27368 if x1.Op != OpARM64MOVBUload { 27369 break 27370 } 27371 if x1.AuxInt != 4 { 27372 break 27373 } 27374 s := x1.Aux 27375 _ = x1.Args[1] 27376 p1 := x1.Args[0] 27377 if p1.Op != OpARM64ADD { 27378 break 27379 } 27380 _ = p1.Args[1] 27381 ptr1 := p1.Args[0] 27382 idx1 := p1.Args[1] 27383 if mem != x1.Args[1] { 27384 break 27385 } 27386 y2 := o1.Args[1] 27387 if y2.Op != OpARM64MOVDnop { 27388 break 27389 } 27390 x2 := y2.Args[0] 27391 if x2.Op != OpARM64MOVBUload { 27392 break 27393 } 27394 if x2.AuxInt != 5 { 27395 break 27396 } 27397 if x2.Aux != s { 27398 break 27399 } 27400 _ = x2.Args[1] 27401 p := x2.Args[0] 27402 if mem != x2.Args[1] { 27403 break 27404 } 27405 y3 := o0.Args[1] 27406 if y3.Op != OpARM64MOVDnop { 27407 break 27408 } 27409 x3 := y3.Args[0] 27410 if x3.Op != OpARM64MOVBUload { 27411 break 27412 } 27413 if x3.AuxInt != 6 { 27414 break 27415 } 27416 if x3.Aux != s { 27417 break 27418 } 27419 _ = x3.Args[1] 27420 if p != x3.Args[0] { 27421 break 27422 } 27423 if mem != x3.Args[1] { 27424 break 27425 } 27426 y4 := v.Args[1] 27427 if y4.Op != OpARM64MOVDnop { 27428 break 27429 } 27430 x4 := y4.Args[0] 27431 if x4.Op != OpARM64MOVBUload { 27432 break 27433 } 27434 if x4.AuxInt != 7 { 27435 break 27436 } 27437 if x4.Aux != s { 27438 break 27439 } 27440 _ = x4.Args[1] 27441 if p != x4.Args[0] { 27442 break 27443 } 27444 if mem != x4.Args[1] { 27445 break 27446 } 27447 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27448 break 27449 } 27450 b = mergePoint(b, x0, x1, x2, x3, x4) 27451 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t) 27452 v.reset(OpCopy) 27453 v.AddArg(v0) 27454 v0.AddArg(ptr0) 27455 v0.AddArg(idx0) 27456 v0.AddArg(mem) 27457 return true 27458 } 27459 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx4 ptr0 idx0 mem) y1:(MOVDnop x1:(MOVBUload [4] {s} p1:(ADDshiftLL [2] ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUload [5] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [6] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [7] {s} p mem))) 27460 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27461 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr0 (SLLconst <idx0.Type> [2] idx0) mem) 27462 for { 27463 t := v.Type 27464 if v.AuxInt != 56 { 27465 break 27466 } 27467 _ = v.Args[1] 27468 o0 := v.Args[0] 27469 if o0.Op != OpARM64ORshiftLL { 27470 break 27471 } 27472 if o0.AuxInt != 48 { 27473 break 27474 } 27475 _ = o0.Args[1] 27476 o1 := o0.Args[0] 27477 if o1.Op != OpARM64ORshiftLL { 27478 break 27479 } 27480 if o1.AuxInt != 40 { 27481 break 27482 } 27483 _ = o1.Args[1] 27484 o2 := o1.Args[0] 27485 if o2.Op != OpARM64ORshiftLL { 27486 break 27487 } 27488 if o2.AuxInt != 32 { 27489 break 27490 } 27491 _ = o2.Args[1] 27492 x0 := o2.Args[0] 27493 if x0.Op != OpARM64MOVWUloadidx4 { 27494 break 27495 } 27496 _ = x0.Args[2] 27497 ptr0 := x0.Args[0] 27498 idx0 := x0.Args[1] 27499 mem := x0.Args[2] 27500 y1 := o2.Args[1] 27501 if y1.Op != OpARM64MOVDnop { 27502 break 27503 } 27504 x1 := y1.Args[0] 27505 if x1.Op != OpARM64MOVBUload { 27506 break 27507 } 27508 if x1.AuxInt != 4 { 27509 break 27510 } 27511 s := x1.Aux 27512 _ = x1.Args[1] 27513 p1 := x1.Args[0] 27514 if p1.Op != OpARM64ADDshiftLL { 27515 break 27516 } 27517 if p1.AuxInt != 2 { 27518 break 27519 } 27520 _ = p1.Args[1] 27521 ptr1 := p1.Args[0] 27522 idx1 := p1.Args[1] 27523 if mem != x1.Args[1] { 27524 break 27525 } 27526 y2 := o1.Args[1] 27527 if y2.Op != OpARM64MOVDnop { 27528 break 27529 } 27530 x2 := y2.Args[0] 27531 if x2.Op != OpARM64MOVBUload { 27532 break 27533 } 27534 if x2.AuxInt != 5 { 27535 break 27536 } 27537 if x2.Aux != s { 27538 break 27539 } 27540 _ = x2.Args[1] 27541 p := x2.Args[0] 27542 if mem != x2.Args[1] { 27543 break 27544 } 27545 y3 := o0.Args[1] 27546 if y3.Op != OpARM64MOVDnop { 27547 break 27548 } 27549 x3 := y3.Args[0] 27550 if x3.Op != OpARM64MOVBUload { 27551 break 27552 } 27553 if x3.AuxInt != 6 { 27554 break 27555 } 27556 if x3.Aux != s { 27557 break 27558 } 27559 _ = x3.Args[1] 27560 if p != x3.Args[0] { 27561 break 27562 } 27563 if mem != x3.Args[1] { 27564 break 27565 } 27566 y4 := v.Args[1] 27567 if y4.Op != OpARM64MOVDnop { 27568 break 27569 } 27570 x4 := y4.Args[0] 27571 if x4.Op != OpARM64MOVBUload { 27572 break 27573 } 27574 if x4.AuxInt != 7 { 27575 break 27576 } 27577 if x4.Aux != s { 27578 break 27579 } 27580 _ = x4.Args[1] 27581 if p != x4.Args[0] { 27582 break 27583 } 27584 if mem != x4.Args[1] { 27585 break 27586 } 27587 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27588 break 27589 } 27590 b = mergePoint(b, x0, x1, x2, x3, x4) 27591 v0 := b.NewValue0(x4.Pos, OpARM64MOVDloadidx, t) 27592 v.reset(OpCopy) 27593 v.AddArg(v0) 27594 v0.AddArg(ptr0) 27595 v1 := b.NewValue0(x4.Pos, OpARM64SLLconst, idx0.Type) 27596 v1.AuxInt = 2 27597 v1.AddArg(idx0) 27598 v0.AddArg(v1) 27599 v0.AddArg(mem) 27600 return true 27601 } 27602 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] x0:(MOVWUloadidx ptr idx mem) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [4] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [5] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [6] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr (ADDconst [7] idx) mem))) 27603 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 27604 // result: @mergePoint(b,x0,x1,x2,x3,x4) (MOVDloadidx <t> ptr idx mem) 27605 for { 27606 t := v.Type 27607 if v.AuxInt != 56 { 27608 break 27609 } 27610 _ = v.Args[1] 27611 o0 := v.Args[0] 27612 if o0.Op != OpARM64ORshiftLL { 27613 break 27614 } 27615 if o0.AuxInt != 48 { 27616 break 27617 } 27618 _ = o0.Args[1] 27619 o1 := o0.Args[0] 27620 if o1.Op != OpARM64ORshiftLL { 27621 break 27622 } 27623 if o1.AuxInt != 40 { 27624 break 27625 } 27626 _ = o1.Args[1] 27627 o2 := o1.Args[0] 27628 if o2.Op != OpARM64ORshiftLL { 27629 break 27630 } 27631 if o2.AuxInt != 32 { 27632 break 27633 } 27634 _ = o2.Args[1] 27635 x0 := o2.Args[0] 27636 if x0.Op != OpARM64MOVWUloadidx { 27637 break 27638 } 27639 _ = x0.Args[2] 27640 ptr := x0.Args[0] 27641 idx := x0.Args[1] 27642 mem := x0.Args[2] 27643 y1 := o2.Args[1] 27644 if y1.Op != OpARM64MOVDnop { 27645 break 27646 } 27647 x1 := y1.Args[0] 27648 if x1.Op != OpARM64MOVBUloadidx { 27649 break 27650 } 27651 _ = x1.Args[2] 27652 if ptr != x1.Args[0] { 27653 break 27654 } 27655 x1_1 := x1.Args[1] 27656 if x1_1.Op != OpARM64ADDconst { 27657 break 27658 } 27659 if x1_1.AuxInt != 4 { 27660 break 27661 } 27662 if idx != x1_1.Args[0] { 27663 break 27664 } 27665 if mem != x1.Args[2] { 27666 break 27667 } 27668 y2 := o1.Args[1] 27669 if y2.Op != OpARM64MOVDnop { 27670 break 27671 } 27672 x2 := y2.Args[0] 27673 if x2.Op != OpARM64MOVBUloadidx { 27674 break 27675 } 27676 _ = x2.Args[2] 27677 if ptr != x2.Args[0] { 27678 break 27679 } 27680 x2_1 := x2.Args[1] 27681 if x2_1.Op != OpARM64ADDconst { 27682 break 27683 } 27684 if x2_1.AuxInt != 5 { 27685 break 27686 } 27687 if idx != x2_1.Args[0] { 27688 break 27689 } 27690 if mem != x2.Args[2] { 27691 break 27692 } 27693 y3 := o0.Args[1] 27694 if y3.Op != OpARM64MOVDnop { 27695 break 27696 } 27697 x3 := y3.Args[0] 27698 if x3.Op != OpARM64MOVBUloadidx { 27699 break 27700 } 27701 _ = x3.Args[2] 27702 if ptr != x3.Args[0] { 27703 break 27704 } 27705 x3_1 := x3.Args[1] 27706 if x3_1.Op != OpARM64ADDconst { 27707 break 27708 } 27709 if x3_1.AuxInt != 6 { 27710 break 27711 } 27712 if idx != x3_1.Args[0] { 27713 break 27714 } 27715 if mem != x3.Args[2] { 27716 break 27717 } 27718 y4 := v.Args[1] 27719 if y4.Op != OpARM64MOVDnop { 27720 break 27721 } 27722 x4 := y4.Args[0] 27723 if x4.Op != OpARM64MOVBUloadidx { 27724 break 27725 } 27726 _ = x4.Args[2] 27727 if ptr != x4.Args[0] { 27728 break 27729 } 27730 x4_1 := x4.Args[1] 27731 if x4_1.Op != OpARM64ADDconst { 27732 break 27733 } 27734 if x4_1.AuxInt != 7 { 27735 break 27736 } 27737 if idx != x4_1.Args[0] { 27738 break 27739 } 27740 if mem != x4.Args[2] { 27741 break 27742 } 27743 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 27744 break 27745 } 27746 b = mergePoint(b, x0, x1, x2, x3, x4) 27747 v0 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 27748 v.reset(OpCopy) 27749 v.AddArg(v0) 27750 v0.AddArg(ptr) 27751 v0.AddArg(idx) 27752 v0.AddArg(mem) 27753 return true 27754 } 27755 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [i1] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i0] {s} p mem))) 27756 // cond: i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 27757 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUload <t> [i0] {s} p mem)) 27758 for { 27759 t := v.Type 27760 if v.AuxInt != 8 { 27761 break 27762 } 27763 _ = v.Args[1] 27764 y0 := v.Args[0] 27765 if y0.Op != OpARM64MOVDnop { 27766 break 27767 } 27768 x0 := y0.Args[0] 27769 if x0.Op != OpARM64MOVBUload { 27770 break 27771 } 27772 i1 := x0.AuxInt 27773 s := x0.Aux 27774 _ = x0.Args[1] 27775 p := x0.Args[0] 27776 mem := x0.Args[1] 27777 y1 := v.Args[1] 27778 if y1.Op != OpARM64MOVDnop { 27779 break 27780 } 27781 x1 := y1.Args[0] 27782 if x1.Op != OpARM64MOVBUload { 27783 break 27784 } 27785 i0 := x1.AuxInt 27786 if x1.Aux != s { 27787 break 27788 } 27789 _ = x1.Args[1] 27790 if p != x1.Args[0] { 27791 break 27792 } 27793 if mem != x1.Args[1] { 27794 break 27795 } 27796 if !(i1 == i0+1 && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 27797 break 27798 } 27799 b = mergePoint(b, x0, x1) 27800 v0 := b.NewValue0(x1.Pos, OpARM64REV16W, t) 27801 v.reset(OpCopy) 27802 v.AddArg(v0) 27803 v1 := b.NewValue0(x1.Pos, OpARM64MOVHUload, t) 27804 v1.AuxInt = i0 27805 v1.Aux = s 27806 v1.AddArg(p) 27807 v1.AddArg(mem) 27808 v0.AddArg(v1) 27809 return true 27810 } 27811 return false 27812 } 27813 func rewriteValueARM64_OpARM64ORshiftLL_20(v *Value) bool { 27814 b := v.Block 27815 _ = b 27816 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr0 idx0 mem))) 27817 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 27818 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr0 idx0 mem)) 27819 for { 27820 t := v.Type 27821 if v.AuxInt != 8 { 27822 break 27823 } 27824 _ = v.Args[1] 27825 y0 := v.Args[0] 27826 if y0.Op != OpARM64MOVDnop { 27827 break 27828 } 27829 x0 := y0.Args[0] 27830 if x0.Op != OpARM64MOVBUload { 27831 break 27832 } 27833 if x0.AuxInt != 1 { 27834 break 27835 } 27836 s := x0.Aux 27837 _ = x0.Args[1] 27838 p1 := x0.Args[0] 27839 if p1.Op != OpARM64ADD { 27840 break 27841 } 27842 _ = p1.Args[1] 27843 ptr1 := p1.Args[0] 27844 idx1 := p1.Args[1] 27845 mem := x0.Args[1] 27846 y1 := v.Args[1] 27847 if y1.Op != OpARM64MOVDnop { 27848 break 27849 } 27850 x1 := y1.Args[0] 27851 if x1.Op != OpARM64MOVBUloadidx { 27852 break 27853 } 27854 _ = x1.Args[2] 27855 ptr0 := x1.Args[0] 27856 idx0 := x1.Args[1] 27857 if mem != x1.Args[2] { 27858 break 27859 } 27860 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 27861 break 27862 } 27863 b = mergePoint(b, x0, x1) 27864 v0 := b.NewValue0(x0.Pos, OpARM64REV16W, t) 27865 v.reset(OpCopy) 27866 v.AddArg(v0) 27867 v1 := b.NewValue0(x0.Pos, OpARM64MOVHUloadidx, t) 27868 v1.AddArg(ptr0) 27869 v1.AddArg(idx0) 27870 v1.AddArg(mem) 27871 v0.AddArg(v1) 27872 return true 27873 } 27874 // match: (ORshiftLL <t> [8] y0:(MOVDnop x0:(MOVBUloadidx ptr (ADDconst [1] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr idx mem))) 27875 // cond: x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b,x0,x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1) 27876 // result: @mergePoint(b,x0,x1) (REV16W <t> (MOVHUloadidx <t> ptr idx mem)) 27877 for { 27878 t := v.Type 27879 if v.AuxInt != 8 { 27880 break 27881 } 27882 _ = v.Args[1] 27883 y0 := v.Args[0] 27884 if y0.Op != OpARM64MOVDnop { 27885 break 27886 } 27887 x0 := y0.Args[0] 27888 if x0.Op != OpARM64MOVBUloadidx { 27889 break 27890 } 27891 _ = x0.Args[2] 27892 ptr := x0.Args[0] 27893 x0_1 := x0.Args[1] 27894 if x0_1.Op != OpARM64ADDconst { 27895 break 27896 } 27897 if x0_1.AuxInt != 1 { 27898 break 27899 } 27900 idx := x0_1.Args[0] 27901 mem := x0.Args[2] 27902 y1 := v.Args[1] 27903 if y1.Op != OpARM64MOVDnop { 27904 break 27905 } 27906 x1 := y1.Args[0] 27907 if x1.Op != OpARM64MOVBUloadidx { 27908 break 27909 } 27910 _ = x1.Args[2] 27911 if ptr != x1.Args[0] { 27912 break 27913 } 27914 if idx != x1.Args[1] { 27915 break 27916 } 27917 if mem != x1.Args[2] { 27918 break 27919 } 27920 if !(x0.Uses == 1 && x1.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && mergePoint(b, x0, x1) != nil && clobber(x0) && clobber(x1) && clobber(y0) && clobber(y1)) { 27921 break 27922 } 27923 b = mergePoint(b, x0, x1) 27924 v0 := b.NewValue0(v.Pos, OpARM64REV16W, t) 27925 v.reset(OpCopy) 27926 v.AddArg(v0) 27927 v1 := b.NewValue0(v.Pos, OpARM64MOVHUloadidx, t) 27928 v1.AddArg(ptr) 27929 v1.AddArg(idx) 27930 v1.AddArg(mem) 27931 v0.AddArg(v1) 27932 return true 27933 } 27934 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [i2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i1] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i0] {s} p mem))) 27935 // cond: i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 27936 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 27937 for { 27938 t := v.Type 27939 if v.AuxInt != 24 { 27940 break 27941 } 27942 _ = v.Args[1] 27943 o0 := v.Args[0] 27944 if o0.Op != OpARM64ORshiftLL { 27945 break 27946 } 27947 if o0.AuxInt != 16 { 27948 break 27949 } 27950 _ = o0.Args[1] 27951 y0 := o0.Args[0] 27952 if y0.Op != OpARM64REV16W { 27953 break 27954 } 27955 x0 := y0.Args[0] 27956 if x0.Op != OpARM64MOVHUload { 27957 break 27958 } 27959 i2 := x0.AuxInt 27960 s := x0.Aux 27961 _ = x0.Args[1] 27962 p := x0.Args[0] 27963 mem := x0.Args[1] 27964 y1 := o0.Args[1] 27965 if y1.Op != OpARM64MOVDnop { 27966 break 27967 } 27968 x1 := y1.Args[0] 27969 if x1.Op != OpARM64MOVBUload { 27970 break 27971 } 27972 i1 := x1.AuxInt 27973 if x1.Aux != s { 27974 break 27975 } 27976 _ = x1.Args[1] 27977 if p != x1.Args[0] { 27978 break 27979 } 27980 if mem != x1.Args[1] { 27981 break 27982 } 27983 y2 := v.Args[1] 27984 if y2.Op != OpARM64MOVDnop { 27985 break 27986 } 27987 x2 := y2.Args[0] 27988 if x2.Op != OpARM64MOVBUload { 27989 break 27990 } 27991 i0 := x2.AuxInt 27992 if x2.Aux != s { 27993 break 27994 } 27995 _ = x2.Args[1] 27996 if p != x2.Args[0] { 27997 break 27998 } 27999 if mem != x2.Args[1] { 28000 break 28001 } 28002 if !(i1 == i0+1 && i2 == i0+2 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 28003 break 28004 } 28005 b = mergePoint(b, x0, x1, x2) 28006 v0 := b.NewValue0(x2.Pos, OpARM64REVW, t) 28007 v.reset(OpCopy) 28008 v.AddArg(v0) 28009 v1 := b.NewValue0(x2.Pos, OpARM64MOVWUload, t) 28010 v1.Aux = s 28011 v2 := b.NewValue0(x2.Pos, OpOffPtr, p.Type) 28012 v2.AuxInt = i0 28013 v2.AddArg(p) 28014 v1.AddArg(v2) 28015 v1.AddArg(mem) 28016 v0.AddArg(v1) 28017 return true 28018 } 28019 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUload [2] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr0 idx0 mem))) 28020 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 28021 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr0 idx0 mem)) 28022 for { 28023 t := v.Type 28024 if v.AuxInt != 24 { 28025 break 28026 } 28027 _ = v.Args[1] 28028 o0 := v.Args[0] 28029 if o0.Op != OpARM64ORshiftLL { 28030 break 28031 } 28032 if o0.AuxInt != 16 { 28033 break 28034 } 28035 _ = o0.Args[1] 28036 y0 := o0.Args[0] 28037 if y0.Op != OpARM64REV16W { 28038 break 28039 } 28040 x0 := y0.Args[0] 28041 if x0.Op != OpARM64MOVHUload { 28042 break 28043 } 28044 if x0.AuxInt != 2 { 28045 break 28046 } 28047 s := x0.Aux 28048 _ = x0.Args[1] 28049 p := x0.Args[0] 28050 mem := x0.Args[1] 28051 y1 := o0.Args[1] 28052 if y1.Op != OpARM64MOVDnop { 28053 break 28054 } 28055 x1 := y1.Args[0] 28056 if x1.Op != OpARM64MOVBUload { 28057 break 28058 } 28059 if x1.AuxInt != 1 { 28060 break 28061 } 28062 if x1.Aux != s { 28063 break 28064 } 28065 _ = x1.Args[1] 28066 p1 := x1.Args[0] 28067 if p1.Op != OpARM64ADD { 28068 break 28069 } 28070 _ = p1.Args[1] 28071 ptr1 := p1.Args[0] 28072 idx1 := p1.Args[1] 28073 if mem != x1.Args[1] { 28074 break 28075 } 28076 y2 := v.Args[1] 28077 if y2.Op != OpARM64MOVDnop { 28078 break 28079 } 28080 x2 := y2.Args[0] 28081 if x2.Op != OpARM64MOVBUloadidx { 28082 break 28083 } 28084 _ = x2.Args[2] 28085 ptr0 := x2.Args[0] 28086 idx0 := x2.Args[1] 28087 if mem != x2.Args[2] { 28088 break 28089 } 28090 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 28091 break 28092 } 28093 b = mergePoint(b, x0, x1, x2) 28094 v0 := b.NewValue0(x1.Pos, OpARM64REVW, t) 28095 v.reset(OpCopy) 28096 v.AddArg(v0) 28097 v1 := b.NewValue0(x1.Pos, OpARM64MOVWUloadidx, t) 28098 v1.AddArg(ptr0) 28099 v1.AddArg(idx0) 28100 v1.AddArg(mem) 28101 v0.AddArg(v1) 28102 return true 28103 } 28104 // match: (ORshiftLL <t> [24] o0:(ORshiftLL [16] y0:(REV16W x0:(MOVHUloadidx ptr (ADDconst [2] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr idx mem))) 28105 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b,x0,x1,x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0) 28106 // result: @mergePoint(b,x0,x1,x2) (REVW <t> (MOVWUloadidx <t> ptr idx mem)) 28107 for { 28108 t := v.Type 28109 if v.AuxInt != 24 { 28110 break 28111 } 28112 _ = v.Args[1] 28113 o0 := v.Args[0] 28114 if o0.Op != OpARM64ORshiftLL { 28115 break 28116 } 28117 if o0.AuxInt != 16 { 28118 break 28119 } 28120 _ = o0.Args[1] 28121 y0 := o0.Args[0] 28122 if y0.Op != OpARM64REV16W { 28123 break 28124 } 28125 x0 := y0.Args[0] 28126 if x0.Op != OpARM64MOVHUloadidx { 28127 break 28128 } 28129 _ = x0.Args[2] 28130 ptr := x0.Args[0] 28131 x0_1 := x0.Args[1] 28132 if x0_1.Op != OpARM64ADDconst { 28133 break 28134 } 28135 if x0_1.AuxInt != 2 { 28136 break 28137 } 28138 idx := x0_1.Args[0] 28139 mem := x0.Args[2] 28140 y1 := o0.Args[1] 28141 if y1.Op != OpARM64MOVDnop { 28142 break 28143 } 28144 x1 := y1.Args[0] 28145 if x1.Op != OpARM64MOVBUloadidx { 28146 break 28147 } 28148 _ = x1.Args[2] 28149 if ptr != x1.Args[0] { 28150 break 28151 } 28152 x1_1 := x1.Args[1] 28153 if x1_1.Op != OpARM64ADDconst { 28154 break 28155 } 28156 if x1_1.AuxInt != 1 { 28157 break 28158 } 28159 if idx != x1_1.Args[0] { 28160 break 28161 } 28162 if mem != x1.Args[2] { 28163 break 28164 } 28165 y2 := v.Args[1] 28166 if y2.Op != OpARM64MOVDnop { 28167 break 28168 } 28169 x2 := y2.Args[0] 28170 if x2.Op != OpARM64MOVBUloadidx { 28171 break 28172 } 28173 _ = x2.Args[2] 28174 if ptr != x2.Args[0] { 28175 break 28176 } 28177 if idx != x2.Args[1] { 28178 break 28179 } 28180 if mem != x2.Args[2] { 28181 break 28182 } 28183 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && o0.Uses == 1 && mergePoint(b, x0, x1, x2) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(o0)) { 28184 break 28185 } 28186 b = mergePoint(b, x0, x1, x2) 28187 v0 := b.NewValue0(v.Pos, OpARM64REVW, t) 28188 v.reset(OpCopy) 28189 v.AddArg(v0) 28190 v1 := b.NewValue0(v.Pos, OpARM64MOVWUloadidx, t) 28191 v1.AddArg(ptr) 28192 v1.AddArg(idx) 28193 v1.AddArg(mem) 28194 v0.AddArg(v1) 28195 return true 28196 } 28197 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [i4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [i3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [i2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [i1] {s} p mem))) y4:(MOVDnop x4:(MOVBUload [i0] {s} p mem))) 28198 // cond: i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 28199 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDload <t> {s} (OffPtr <p.Type> [i0] p) mem)) 28200 for { 28201 t := v.Type 28202 if v.AuxInt != 56 { 28203 break 28204 } 28205 _ = v.Args[1] 28206 o0 := v.Args[0] 28207 if o0.Op != OpARM64ORshiftLL { 28208 break 28209 } 28210 if o0.AuxInt != 48 { 28211 break 28212 } 28213 _ = o0.Args[1] 28214 o1 := o0.Args[0] 28215 if o1.Op != OpARM64ORshiftLL { 28216 break 28217 } 28218 if o1.AuxInt != 40 { 28219 break 28220 } 28221 _ = o1.Args[1] 28222 o2 := o1.Args[0] 28223 if o2.Op != OpARM64ORshiftLL { 28224 break 28225 } 28226 if o2.AuxInt != 32 { 28227 break 28228 } 28229 _ = o2.Args[1] 28230 y0 := o2.Args[0] 28231 if y0.Op != OpARM64REVW { 28232 break 28233 } 28234 x0 := y0.Args[0] 28235 if x0.Op != OpARM64MOVWUload { 28236 break 28237 } 28238 i4 := x0.AuxInt 28239 s := x0.Aux 28240 _ = x0.Args[1] 28241 p := x0.Args[0] 28242 mem := x0.Args[1] 28243 y1 := o2.Args[1] 28244 if y1.Op != OpARM64MOVDnop { 28245 break 28246 } 28247 x1 := y1.Args[0] 28248 if x1.Op != OpARM64MOVBUload { 28249 break 28250 } 28251 i3 := x1.AuxInt 28252 if x1.Aux != s { 28253 break 28254 } 28255 _ = x1.Args[1] 28256 if p != x1.Args[0] { 28257 break 28258 } 28259 if mem != x1.Args[1] { 28260 break 28261 } 28262 y2 := o1.Args[1] 28263 if y2.Op != OpARM64MOVDnop { 28264 break 28265 } 28266 x2 := y2.Args[0] 28267 if x2.Op != OpARM64MOVBUload { 28268 break 28269 } 28270 i2 := x2.AuxInt 28271 if x2.Aux != s { 28272 break 28273 } 28274 _ = x2.Args[1] 28275 if p != x2.Args[0] { 28276 break 28277 } 28278 if mem != x2.Args[1] { 28279 break 28280 } 28281 y3 := o0.Args[1] 28282 if y3.Op != OpARM64MOVDnop { 28283 break 28284 } 28285 x3 := y3.Args[0] 28286 if x3.Op != OpARM64MOVBUload { 28287 break 28288 } 28289 i1 := x3.AuxInt 28290 if x3.Aux != s { 28291 break 28292 } 28293 _ = x3.Args[1] 28294 if p != x3.Args[0] { 28295 break 28296 } 28297 if mem != x3.Args[1] { 28298 break 28299 } 28300 y4 := v.Args[1] 28301 if y4.Op != OpARM64MOVDnop { 28302 break 28303 } 28304 x4 := y4.Args[0] 28305 if x4.Op != OpARM64MOVBUload { 28306 break 28307 } 28308 i0 := x4.AuxInt 28309 if x4.Aux != s { 28310 break 28311 } 28312 _ = x4.Args[1] 28313 if p != x4.Args[0] { 28314 break 28315 } 28316 if mem != x4.Args[1] { 28317 break 28318 } 28319 if !(i1 == i0+1 && i2 == i0+2 && i3 == i0+3 && i4 == i0+4 && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 28320 break 28321 } 28322 b = mergePoint(b, x0, x1, x2, x3, x4) 28323 v0 := b.NewValue0(x4.Pos, OpARM64REV, t) 28324 v.reset(OpCopy) 28325 v.AddArg(v0) 28326 v1 := b.NewValue0(x4.Pos, OpARM64MOVDload, t) 28327 v1.Aux = s 28328 v2 := b.NewValue0(x4.Pos, OpOffPtr, p.Type) 28329 v2.AuxInt = i0 28330 v2.AddArg(p) 28331 v1.AddArg(v2) 28332 v1.AddArg(mem) 28333 v0.AddArg(v1) 28334 return true 28335 } 28336 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUload [4] {s} p mem)) y1:(MOVDnop x1:(MOVBUload [3] {s} p mem))) y2:(MOVDnop x2:(MOVBUload [2] {s} p mem))) y3:(MOVDnop x3:(MOVBUload [1] {s} p1:(ADD ptr1 idx1) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr0 idx0 mem))) 28337 // cond: s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 28338 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr0 idx0 mem)) 28339 for { 28340 t := v.Type 28341 if v.AuxInt != 56 { 28342 break 28343 } 28344 _ = v.Args[1] 28345 o0 := v.Args[0] 28346 if o0.Op != OpARM64ORshiftLL { 28347 break 28348 } 28349 if o0.AuxInt != 48 { 28350 break 28351 } 28352 _ = o0.Args[1] 28353 o1 := o0.Args[0] 28354 if o1.Op != OpARM64ORshiftLL { 28355 break 28356 } 28357 if o1.AuxInt != 40 { 28358 break 28359 } 28360 _ = o1.Args[1] 28361 o2 := o1.Args[0] 28362 if o2.Op != OpARM64ORshiftLL { 28363 break 28364 } 28365 if o2.AuxInt != 32 { 28366 break 28367 } 28368 _ = o2.Args[1] 28369 y0 := o2.Args[0] 28370 if y0.Op != OpARM64REVW { 28371 break 28372 } 28373 x0 := y0.Args[0] 28374 if x0.Op != OpARM64MOVWUload { 28375 break 28376 } 28377 if x0.AuxInt != 4 { 28378 break 28379 } 28380 s := x0.Aux 28381 _ = x0.Args[1] 28382 p := x0.Args[0] 28383 mem := x0.Args[1] 28384 y1 := o2.Args[1] 28385 if y1.Op != OpARM64MOVDnop { 28386 break 28387 } 28388 x1 := y1.Args[0] 28389 if x1.Op != OpARM64MOVBUload { 28390 break 28391 } 28392 if x1.AuxInt != 3 { 28393 break 28394 } 28395 if x1.Aux != s { 28396 break 28397 } 28398 _ = x1.Args[1] 28399 if p != x1.Args[0] { 28400 break 28401 } 28402 if mem != x1.Args[1] { 28403 break 28404 } 28405 y2 := o1.Args[1] 28406 if y2.Op != OpARM64MOVDnop { 28407 break 28408 } 28409 x2 := y2.Args[0] 28410 if x2.Op != OpARM64MOVBUload { 28411 break 28412 } 28413 if x2.AuxInt != 2 { 28414 break 28415 } 28416 if x2.Aux != s { 28417 break 28418 } 28419 _ = x2.Args[1] 28420 if p != x2.Args[0] { 28421 break 28422 } 28423 if mem != x2.Args[1] { 28424 break 28425 } 28426 y3 := o0.Args[1] 28427 if y3.Op != OpARM64MOVDnop { 28428 break 28429 } 28430 x3 := y3.Args[0] 28431 if x3.Op != OpARM64MOVBUload { 28432 break 28433 } 28434 if x3.AuxInt != 1 { 28435 break 28436 } 28437 if x3.Aux != s { 28438 break 28439 } 28440 _ = x3.Args[1] 28441 p1 := x3.Args[0] 28442 if p1.Op != OpARM64ADD { 28443 break 28444 } 28445 _ = p1.Args[1] 28446 ptr1 := p1.Args[0] 28447 idx1 := p1.Args[1] 28448 if mem != x3.Args[1] { 28449 break 28450 } 28451 y4 := v.Args[1] 28452 if y4.Op != OpARM64MOVDnop { 28453 break 28454 } 28455 x4 := y4.Args[0] 28456 if x4.Op != OpARM64MOVBUloadidx { 28457 break 28458 } 28459 _ = x4.Args[2] 28460 ptr0 := x4.Args[0] 28461 idx0 := x4.Args[1] 28462 if mem != x4.Args[2] { 28463 break 28464 } 28465 if !(s == nil && x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 28466 break 28467 } 28468 b = mergePoint(b, x0, x1, x2, x3, x4) 28469 v0 := b.NewValue0(x3.Pos, OpARM64REV, t) 28470 v.reset(OpCopy) 28471 v.AddArg(v0) 28472 v1 := b.NewValue0(x3.Pos, OpARM64MOVDloadidx, t) 28473 v1.AddArg(ptr0) 28474 v1.AddArg(idx0) 28475 v1.AddArg(mem) 28476 v0.AddArg(v1) 28477 return true 28478 } 28479 // match: (ORshiftLL <t> [56] o0:(ORshiftLL [48] o1:(ORshiftLL [40] o2:(ORshiftLL [32] y0:(REVW x0:(MOVWUloadidx ptr (ADDconst [4] idx) mem)) y1:(MOVDnop x1:(MOVBUloadidx ptr (ADDconst [3] idx) mem))) y2:(MOVDnop x2:(MOVBUloadidx ptr (ADDconst [2] idx) mem))) y3:(MOVDnop x3:(MOVBUloadidx ptr (ADDconst [1] idx) mem))) y4:(MOVDnop x4:(MOVBUloadidx ptr idx mem))) 28480 // cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b,x0,x1,x2,x3,x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2) 28481 // result: @mergePoint(b,x0,x1,x2,x3,x4) (REV <t> (MOVDloadidx <t> ptr idx mem)) 28482 for { 28483 t := v.Type 28484 if v.AuxInt != 56 { 28485 break 28486 } 28487 _ = v.Args[1] 28488 o0 := v.Args[0] 28489 if o0.Op != OpARM64ORshiftLL { 28490 break 28491 } 28492 if o0.AuxInt != 48 { 28493 break 28494 } 28495 _ = o0.Args[1] 28496 o1 := o0.Args[0] 28497 if o1.Op != OpARM64ORshiftLL { 28498 break 28499 } 28500 if o1.AuxInt != 40 { 28501 break 28502 } 28503 _ = o1.Args[1] 28504 o2 := o1.Args[0] 28505 if o2.Op != OpARM64ORshiftLL { 28506 break 28507 } 28508 if o2.AuxInt != 32 { 28509 break 28510 } 28511 _ = o2.Args[1] 28512 y0 := o2.Args[0] 28513 if y0.Op != OpARM64REVW { 28514 break 28515 } 28516 x0 := y0.Args[0] 28517 if x0.Op != OpARM64MOVWUloadidx { 28518 break 28519 } 28520 _ = x0.Args[2] 28521 ptr := x0.Args[0] 28522 x0_1 := x0.Args[1] 28523 if x0_1.Op != OpARM64ADDconst { 28524 break 28525 } 28526 if x0_1.AuxInt != 4 { 28527 break 28528 } 28529 idx := x0_1.Args[0] 28530 mem := x0.Args[2] 28531 y1 := o2.Args[1] 28532 if y1.Op != OpARM64MOVDnop { 28533 break 28534 } 28535 x1 := y1.Args[0] 28536 if x1.Op != OpARM64MOVBUloadidx { 28537 break 28538 } 28539 _ = x1.Args[2] 28540 if ptr != x1.Args[0] { 28541 break 28542 } 28543 x1_1 := x1.Args[1] 28544 if x1_1.Op != OpARM64ADDconst { 28545 break 28546 } 28547 if x1_1.AuxInt != 3 { 28548 break 28549 } 28550 if idx != x1_1.Args[0] { 28551 break 28552 } 28553 if mem != x1.Args[2] { 28554 break 28555 } 28556 y2 := o1.Args[1] 28557 if y2.Op != OpARM64MOVDnop { 28558 break 28559 } 28560 x2 := y2.Args[0] 28561 if x2.Op != OpARM64MOVBUloadidx { 28562 break 28563 } 28564 _ = x2.Args[2] 28565 if ptr != x2.Args[0] { 28566 break 28567 } 28568 x2_1 := x2.Args[1] 28569 if x2_1.Op != OpARM64ADDconst { 28570 break 28571 } 28572 if x2_1.AuxInt != 2 { 28573 break 28574 } 28575 if idx != x2_1.Args[0] { 28576 break 28577 } 28578 if mem != x2.Args[2] { 28579 break 28580 } 28581 y3 := o0.Args[1] 28582 if y3.Op != OpARM64MOVDnop { 28583 break 28584 } 28585 x3 := y3.Args[0] 28586 if x3.Op != OpARM64MOVBUloadidx { 28587 break 28588 } 28589 _ = x3.Args[2] 28590 if ptr != x3.Args[0] { 28591 break 28592 } 28593 x3_1 := x3.Args[1] 28594 if x3_1.Op != OpARM64ADDconst { 28595 break 28596 } 28597 if x3_1.AuxInt != 1 { 28598 break 28599 } 28600 if idx != x3_1.Args[0] { 28601 break 28602 } 28603 if mem != x3.Args[2] { 28604 break 28605 } 28606 y4 := v.Args[1] 28607 if y4.Op != OpARM64MOVDnop { 28608 break 28609 } 28610 x4 := y4.Args[0] 28611 if x4.Op != OpARM64MOVBUloadidx { 28612 break 28613 } 28614 _ = x4.Args[2] 28615 if ptr != x4.Args[0] { 28616 break 28617 } 28618 if idx != x4.Args[1] { 28619 break 28620 } 28621 if mem != x4.Args[2] { 28622 break 28623 } 28624 if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && y0.Uses == 1 && y1.Uses == 1 && y2.Uses == 1 && y3.Uses == 1 && y4.Uses == 1 && o0.Uses == 1 && o1.Uses == 1 && o2.Uses == 1 && mergePoint(b, x0, x1, x2, x3, x4) != nil && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(y0) && clobber(y1) && clobber(y2) && clobber(y3) && clobber(y4) && clobber(o0) && clobber(o1) && clobber(o2)) { 28625 break 28626 } 28627 b = mergePoint(b, x0, x1, x2, x3, x4) 28628 v0 := b.NewValue0(v.Pos, OpARM64REV, t) 28629 v.reset(OpCopy) 28630 v.AddArg(v0) 28631 v1 := b.NewValue0(v.Pos, OpARM64MOVDloadidx, t) 28632 v1.AddArg(ptr) 28633 v1.AddArg(idx) 28634 v1.AddArg(mem) 28635 v0.AddArg(v1) 28636 return true 28637 } 28638 return false 28639 } 28640 func rewriteValueARM64_OpARM64ORshiftRA_0(v *Value) bool { 28641 b := v.Block 28642 _ = b 28643 // match: (ORshiftRA (MOVDconst [c]) x [d]) 28644 // cond: 28645 // result: (ORconst [c] (SRAconst <x.Type> x [d])) 28646 for { 28647 d := v.AuxInt 28648 _ = v.Args[1] 28649 v_0 := v.Args[0] 28650 if v_0.Op != OpARM64MOVDconst { 28651 break 28652 } 28653 c := v_0.AuxInt 28654 x := v.Args[1] 28655 v.reset(OpARM64ORconst) 28656 v.AuxInt = c 28657 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 28658 v0.AuxInt = d 28659 v0.AddArg(x) 28660 v.AddArg(v0) 28661 return true 28662 } 28663 // match: (ORshiftRA x (MOVDconst [c]) [d]) 28664 // cond: 28665 // result: (ORconst x [c>>uint64(d)]) 28666 for { 28667 d := v.AuxInt 28668 _ = v.Args[1] 28669 x := v.Args[0] 28670 v_1 := v.Args[1] 28671 if v_1.Op != OpARM64MOVDconst { 28672 break 28673 } 28674 c := v_1.AuxInt 28675 v.reset(OpARM64ORconst) 28676 v.AuxInt = c >> uint64(d) 28677 v.AddArg(x) 28678 return true 28679 } 28680 // match: (ORshiftRA x y:(SRAconst x [c]) [d]) 28681 // cond: c==d 28682 // result: y 28683 for { 28684 d := v.AuxInt 28685 _ = v.Args[1] 28686 x := v.Args[0] 28687 y := v.Args[1] 28688 if y.Op != OpARM64SRAconst { 28689 break 28690 } 28691 c := y.AuxInt 28692 if x != y.Args[0] { 28693 break 28694 } 28695 if !(c == d) { 28696 break 28697 } 28698 v.reset(OpCopy) 28699 v.Type = y.Type 28700 v.AddArg(y) 28701 return true 28702 } 28703 return false 28704 } 28705 func rewriteValueARM64_OpARM64ORshiftRL_0(v *Value) bool { 28706 b := v.Block 28707 _ = b 28708 // match: (ORshiftRL (MOVDconst [c]) x [d]) 28709 // cond: 28710 // result: (ORconst [c] (SRLconst <x.Type> x [d])) 28711 for { 28712 d := v.AuxInt 28713 _ = v.Args[1] 28714 v_0 := v.Args[0] 28715 if v_0.Op != OpARM64MOVDconst { 28716 break 28717 } 28718 c := v_0.AuxInt 28719 x := v.Args[1] 28720 v.reset(OpARM64ORconst) 28721 v.AuxInt = c 28722 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 28723 v0.AuxInt = d 28724 v0.AddArg(x) 28725 v.AddArg(v0) 28726 return true 28727 } 28728 // match: (ORshiftRL x (MOVDconst [c]) [d]) 28729 // cond: 28730 // result: (ORconst x [int64(uint64(c)>>uint64(d))]) 28731 for { 28732 d := v.AuxInt 28733 _ = v.Args[1] 28734 x := v.Args[0] 28735 v_1 := v.Args[1] 28736 if v_1.Op != OpARM64MOVDconst { 28737 break 28738 } 28739 c := v_1.AuxInt 28740 v.reset(OpARM64ORconst) 28741 v.AuxInt = int64(uint64(c) >> uint64(d)) 28742 v.AddArg(x) 28743 return true 28744 } 28745 // match: (ORshiftRL x y:(SRLconst x [c]) [d]) 28746 // cond: c==d 28747 // result: y 28748 for { 28749 d := v.AuxInt 28750 _ = v.Args[1] 28751 x := v.Args[0] 28752 y := v.Args[1] 28753 if y.Op != OpARM64SRLconst { 28754 break 28755 } 28756 c := y.AuxInt 28757 if x != y.Args[0] { 28758 break 28759 } 28760 if !(c == d) { 28761 break 28762 } 28763 v.reset(OpCopy) 28764 v.Type = y.Type 28765 v.AddArg(y) 28766 return true 28767 } 28768 // match: (ORshiftRL [c] (SLLconst x [64-c]) x) 28769 // cond: 28770 // result: (RORconst [ c] x) 28771 for { 28772 c := v.AuxInt 28773 _ = v.Args[1] 28774 v_0 := v.Args[0] 28775 if v_0.Op != OpARM64SLLconst { 28776 break 28777 } 28778 if v_0.AuxInt != 64-c { 28779 break 28780 } 28781 x := v_0.Args[0] 28782 if x != v.Args[1] { 28783 break 28784 } 28785 v.reset(OpARM64RORconst) 28786 v.AuxInt = c 28787 v.AddArg(x) 28788 return true 28789 } 28790 // match: (ORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 28791 // cond: c < 32 && t.Size() == 4 28792 // result: (RORWconst [c] x) 28793 for { 28794 t := v.Type 28795 c := v.AuxInt 28796 _ = v.Args[1] 28797 v_0 := v.Args[0] 28798 if v_0.Op != OpARM64SLLconst { 28799 break 28800 } 28801 if v_0.AuxInt != 32-c { 28802 break 28803 } 28804 x := v_0.Args[0] 28805 v_1 := v.Args[1] 28806 if v_1.Op != OpARM64MOVWUreg { 28807 break 28808 } 28809 if x != v_1.Args[0] { 28810 break 28811 } 28812 if !(c < 32 && t.Size() == 4) { 28813 break 28814 } 28815 v.reset(OpARM64RORWconst) 28816 v.AuxInt = c 28817 v.AddArg(x) 28818 return true 28819 } 28820 // match: (ORshiftRL [rc] (ANDconst [ac] x) (SLLconst [lc] y)) 28821 // cond: lc > rc && ac == ^((1<<uint(64-lc)-1) << uint64(lc-rc)) 28822 // result: (BFI [arm64BFAuxInt(lc-rc, 64-lc)] x y) 28823 for { 28824 rc := v.AuxInt 28825 _ = v.Args[1] 28826 v_0 := v.Args[0] 28827 if v_0.Op != OpARM64ANDconst { 28828 break 28829 } 28830 ac := v_0.AuxInt 28831 x := v_0.Args[0] 28832 v_1 := v.Args[1] 28833 if v_1.Op != OpARM64SLLconst { 28834 break 28835 } 28836 lc := v_1.AuxInt 28837 y := v_1.Args[0] 28838 if !(lc > rc && ac == ^((1<<uint(64-lc)-1)<<uint64(lc-rc))) { 28839 break 28840 } 28841 v.reset(OpARM64BFI) 28842 v.AuxInt = arm64BFAuxInt(lc-rc, 64-lc) 28843 v.AddArg(x) 28844 v.AddArg(y) 28845 return true 28846 } 28847 return false 28848 } 28849 func rewriteValueARM64_OpARM64RORWconst_0(v *Value) bool { 28850 // match: (RORWconst [c] (RORWconst [d] x)) 28851 // cond: 28852 // result: (RORWconst [(c+d)&31] x) 28853 for { 28854 c := v.AuxInt 28855 v_0 := v.Args[0] 28856 if v_0.Op != OpARM64RORWconst { 28857 break 28858 } 28859 d := v_0.AuxInt 28860 x := v_0.Args[0] 28861 v.reset(OpARM64RORWconst) 28862 v.AuxInt = (c + d) & 31 28863 v.AddArg(x) 28864 return true 28865 } 28866 return false 28867 } 28868 func rewriteValueARM64_OpARM64RORconst_0(v *Value) bool { 28869 // match: (RORconst [c] (RORconst [d] x)) 28870 // cond: 28871 // result: (RORconst [(c+d)&63] x) 28872 for { 28873 c := v.AuxInt 28874 v_0 := v.Args[0] 28875 if v_0.Op != OpARM64RORconst { 28876 break 28877 } 28878 d := v_0.AuxInt 28879 x := v_0.Args[0] 28880 v.reset(OpARM64RORconst) 28881 v.AuxInt = (c + d) & 63 28882 v.AddArg(x) 28883 return true 28884 } 28885 return false 28886 } 28887 func rewriteValueARM64_OpARM64SLL_0(v *Value) bool { 28888 // match: (SLL x (MOVDconst [c])) 28889 // cond: 28890 // result: (SLLconst x [c&63]) 28891 for { 28892 _ = v.Args[1] 28893 x := v.Args[0] 28894 v_1 := v.Args[1] 28895 if v_1.Op != OpARM64MOVDconst { 28896 break 28897 } 28898 c := v_1.AuxInt 28899 v.reset(OpARM64SLLconst) 28900 v.AuxInt = c & 63 28901 v.AddArg(x) 28902 return true 28903 } 28904 return false 28905 } 28906 func rewriteValueARM64_OpARM64SLLconst_0(v *Value) bool { 28907 // match: (SLLconst [c] (MOVDconst [d])) 28908 // cond: 28909 // result: (MOVDconst [d<<uint64(c)]) 28910 for { 28911 c := v.AuxInt 28912 v_0 := v.Args[0] 28913 if v_0.Op != OpARM64MOVDconst { 28914 break 28915 } 28916 d := v_0.AuxInt 28917 v.reset(OpARM64MOVDconst) 28918 v.AuxInt = d << uint64(c) 28919 return true 28920 } 28921 // match: (SLLconst [c] (SRLconst [c] x)) 28922 // cond: 0 < c && c < 64 28923 // result: (ANDconst [^(1<<uint(c)-1)] x) 28924 for { 28925 c := v.AuxInt 28926 v_0 := v.Args[0] 28927 if v_0.Op != OpARM64SRLconst { 28928 break 28929 } 28930 if v_0.AuxInt != c { 28931 break 28932 } 28933 x := v_0.Args[0] 28934 if !(0 < c && c < 64) { 28935 break 28936 } 28937 v.reset(OpARM64ANDconst) 28938 v.AuxInt = ^(1<<uint(c) - 1) 28939 v.AddArg(x) 28940 return true 28941 } 28942 // match: (SLLconst [sc] (ANDconst [ac] x)) 28943 // cond: isARM64BFMask(sc, ac, 0) 28944 // result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(ac, 0))] x) 28945 for { 28946 sc := v.AuxInt 28947 v_0 := v.Args[0] 28948 if v_0.Op != OpARM64ANDconst { 28949 break 28950 } 28951 ac := v_0.AuxInt 28952 x := v_0.Args[0] 28953 if !(isARM64BFMask(sc, ac, 0)) { 28954 break 28955 } 28956 v.reset(OpARM64UBFIZ) 28957 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, 0)) 28958 v.AddArg(x) 28959 return true 28960 } 28961 // match: (SLLconst [sc] (MOVWUreg x)) 28962 // cond: isARM64BFMask(sc, 1<<32-1, 0) 28963 // result: (UBFIZ [arm64BFAuxInt(sc, 32)] x) 28964 for { 28965 sc := v.AuxInt 28966 v_0 := v.Args[0] 28967 if v_0.Op != OpARM64MOVWUreg { 28968 break 28969 } 28970 x := v_0.Args[0] 28971 if !(isARM64BFMask(sc, 1<<32-1, 0)) { 28972 break 28973 } 28974 v.reset(OpARM64UBFIZ) 28975 v.AuxInt = arm64BFAuxInt(sc, 32) 28976 v.AddArg(x) 28977 return true 28978 } 28979 // match: (SLLconst [sc] (MOVHUreg x)) 28980 // cond: isARM64BFMask(sc, 1<<16-1, 0) 28981 // result: (UBFIZ [arm64BFAuxInt(sc, 16)] x) 28982 for { 28983 sc := v.AuxInt 28984 v_0 := v.Args[0] 28985 if v_0.Op != OpARM64MOVHUreg { 28986 break 28987 } 28988 x := v_0.Args[0] 28989 if !(isARM64BFMask(sc, 1<<16-1, 0)) { 28990 break 28991 } 28992 v.reset(OpARM64UBFIZ) 28993 v.AuxInt = arm64BFAuxInt(sc, 16) 28994 v.AddArg(x) 28995 return true 28996 } 28997 // match: (SLLconst [sc] (MOVBUreg x)) 28998 // cond: isARM64BFMask(sc, 1<<8-1, 0) 28999 // result: (UBFIZ [arm64BFAuxInt(sc, 8)] x) 29000 for { 29001 sc := v.AuxInt 29002 v_0 := v.Args[0] 29003 if v_0.Op != OpARM64MOVBUreg { 29004 break 29005 } 29006 x := v_0.Args[0] 29007 if !(isARM64BFMask(sc, 1<<8-1, 0)) { 29008 break 29009 } 29010 v.reset(OpARM64UBFIZ) 29011 v.AuxInt = arm64BFAuxInt(sc, 8) 29012 v.AddArg(x) 29013 return true 29014 } 29015 // match: (SLLconst [sc] (UBFIZ [bfc] x)) 29016 // cond: sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64 29017 // result: (UBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))] x) 29018 for { 29019 sc := v.AuxInt 29020 v_0 := v.Args[0] 29021 if v_0.Op != OpARM64UBFIZ { 29022 break 29023 } 29024 bfc := v_0.AuxInt 29025 x := v_0.Args[0] 29026 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) { 29027 break 29028 } 29029 v.reset(OpARM64UBFIZ) 29030 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)) 29031 v.AddArg(x) 29032 return true 29033 } 29034 return false 29035 } 29036 func rewriteValueARM64_OpARM64SRA_0(v *Value) bool { 29037 // match: (SRA x (MOVDconst [c])) 29038 // cond: 29039 // result: (SRAconst x [c&63]) 29040 for { 29041 _ = v.Args[1] 29042 x := v.Args[0] 29043 v_1 := v.Args[1] 29044 if v_1.Op != OpARM64MOVDconst { 29045 break 29046 } 29047 c := v_1.AuxInt 29048 v.reset(OpARM64SRAconst) 29049 v.AuxInt = c & 63 29050 v.AddArg(x) 29051 return true 29052 } 29053 return false 29054 } 29055 func rewriteValueARM64_OpARM64SRAconst_0(v *Value) bool { 29056 // match: (SRAconst [c] (MOVDconst [d])) 29057 // cond: 29058 // result: (MOVDconst [d>>uint64(c)]) 29059 for { 29060 c := v.AuxInt 29061 v_0 := v.Args[0] 29062 if v_0.Op != OpARM64MOVDconst { 29063 break 29064 } 29065 d := v_0.AuxInt 29066 v.reset(OpARM64MOVDconst) 29067 v.AuxInt = d >> uint64(c) 29068 return true 29069 } 29070 // match: (SRAconst [rc] (SLLconst [lc] x)) 29071 // cond: lc > rc 29072 // result: (SBFIZ [arm64BFAuxInt(lc-rc, 64-lc)] x) 29073 for { 29074 rc := v.AuxInt 29075 v_0 := v.Args[0] 29076 if v_0.Op != OpARM64SLLconst { 29077 break 29078 } 29079 lc := v_0.AuxInt 29080 x := v_0.Args[0] 29081 if !(lc > rc) { 29082 break 29083 } 29084 v.reset(OpARM64SBFIZ) 29085 v.AuxInt = arm64BFAuxInt(lc-rc, 64-lc) 29086 v.AddArg(x) 29087 return true 29088 } 29089 // match: (SRAconst [rc] (SLLconst [lc] x)) 29090 // cond: lc <= rc 29091 // result: (SBFX [arm64BFAuxInt(rc-lc, 64-rc)] x) 29092 for { 29093 rc := v.AuxInt 29094 v_0 := v.Args[0] 29095 if v_0.Op != OpARM64SLLconst { 29096 break 29097 } 29098 lc := v_0.AuxInt 29099 x := v_0.Args[0] 29100 if !(lc <= rc) { 29101 break 29102 } 29103 v.reset(OpARM64SBFX) 29104 v.AuxInt = arm64BFAuxInt(rc-lc, 64-rc) 29105 v.AddArg(x) 29106 return true 29107 } 29108 // match: (SRAconst [rc] (MOVWreg x)) 29109 // cond: rc < 32 29110 // result: (SBFX [arm64BFAuxInt(rc, 32-rc)] x) 29111 for { 29112 rc := v.AuxInt 29113 v_0 := v.Args[0] 29114 if v_0.Op != OpARM64MOVWreg { 29115 break 29116 } 29117 x := v_0.Args[0] 29118 if !(rc < 32) { 29119 break 29120 } 29121 v.reset(OpARM64SBFX) 29122 v.AuxInt = arm64BFAuxInt(rc, 32-rc) 29123 v.AddArg(x) 29124 return true 29125 } 29126 // match: (SRAconst [rc] (MOVHreg x)) 29127 // cond: rc < 16 29128 // result: (SBFX [arm64BFAuxInt(rc, 16-rc)] x) 29129 for { 29130 rc := v.AuxInt 29131 v_0 := v.Args[0] 29132 if v_0.Op != OpARM64MOVHreg { 29133 break 29134 } 29135 x := v_0.Args[0] 29136 if !(rc < 16) { 29137 break 29138 } 29139 v.reset(OpARM64SBFX) 29140 v.AuxInt = arm64BFAuxInt(rc, 16-rc) 29141 v.AddArg(x) 29142 return true 29143 } 29144 // match: (SRAconst [rc] (MOVBreg x)) 29145 // cond: rc < 8 29146 // result: (SBFX [arm64BFAuxInt(rc, 8-rc)] x) 29147 for { 29148 rc := v.AuxInt 29149 v_0 := v.Args[0] 29150 if v_0.Op != OpARM64MOVBreg { 29151 break 29152 } 29153 x := v_0.Args[0] 29154 if !(rc < 8) { 29155 break 29156 } 29157 v.reset(OpARM64SBFX) 29158 v.AuxInt = arm64BFAuxInt(rc, 8-rc) 29159 v.AddArg(x) 29160 return true 29161 } 29162 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 29163 // cond: sc < getARM64BFlsb(bfc) 29164 // result: (SBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 29165 for { 29166 sc := v.AuxInt 29167 v_0 := v.Args[0] 29168 if v_0.Op != OpARM64SBFIZ { 29169 break 29170 } 29171 bfc := v_0.AuxInt 29172 x := v_0.Args[0] 29173 if !(sc < getARM64BFlsb(bfc)) { 29174 break 29175 } 29176 v.reset(OpARM64SBFIZ) 29177 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 29178 v.AddArg(x) 29179 return true 29180 } 29181 // match: (SRAconst [sc] (SBFIZ [bfc] x)) 29182 // cond: sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 29183 // result: (SBFX [arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 29184 for { 29185 sc := v.AuxInt 29186 v_0 := v.Args[0] 29187 if v_0.Op != OpARM64SBFIZ { 29188 break 29189 } 29190 bfc := v_0.AuxInt 29191 x := v_0.Args[0] 29192 if !(sc >= getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 29193 break 29194 } 29195 v.reset(OpARM64SBFX) 29196 v.AuxInt = arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 29197 v.AddArg(x) 29198 return true 29199 } 29200 return false 29201 } 29202 func rewriteValueARM64_OpARM64SRL_0(v *Value) bool { 29203 // match: (SRL x (MOVDconst [c])) 29204 // cond: 29205 // result: (SRLconst x [c&63]) 29206 for { 29207 _ = v.Args[1] 29208 x := v.Args[0] 29209 v_1 := v.Args[1] 29210 if v_1.Op != OpARM64MOVDconst { 29211 break 29212 } 29213 c := v_1.AuxInt 29214 v.reset(OpARM64SRLconst) 29215 v.AuxInt = c & 63 29216 v.AddArg(x) 29217 return true 29218 } 29219 return false 29220 } 29221 func rewriteValueARM64_OpARM64SRLconst_0(v *Value) bool { 29222 // match: (SRLconst [c] (MOVDconst [d])) 29223 // cond: 29224 // result: (MOVDconst [int64(uint64(d)>>uint64(c))]) 29225 for { 29226 c := v.AuxInt 29227 v_0 := v.Args[0] 29228 if v_0.Op != OpARM64MOVDconst { 29229 break 29230 } 29231 d := v_0.AuxInt 29232 v.reset(OpARM64MOVDconst) 29233 v.AuxInt = int64(uint64(d) >> uint64(c)) 29234 return true 29235 } 29236 // match: (SRLconst [c] (SLLconst [c] x)) 29237 // cond: 0 < c && c < 64 29238 // result: (ANDconst [1<<uint(64-c)-1] x) 29239 for { 29240 c := v.AuxInt 29241 v_0 := v.Args[0] 29242 if v_0.Op != OpARM64SLLconst { 29243 break 29244 } 29245 if v_0.AuxInt != c { 29246 break 29247 } 29248 x := v_0.Args[0] 29249 if !(0 < c && c < 64) { 29250 break 29251 } 29252 v.reset(OpARM64ANDconst) 29253 v.AuxInt = 1<<uint(64-c) - 1 29254 v.AddArg(x) 29255 return true 29256 } 29257 // match: (SRLconst [rc] (SLLconst [lc] x)) 29258 // cond: lc > rc 29259 // result: (UBFIZ [arm64BFAuxInt(lc-rc, 64-lc)] x) 29260 for { 29261 rc := v.AuxInt 29262 v_0 := v.Args[0] 29263 if v_0.Op != OpARM64SLLconst { 29264 break 29265 } 29266 lc := v_0.AuxInt 29267 x := v_0.Args[0] 29268 if !(lc > rc) { 29269 break 29270 } 29271 v.reset(OpARM64UBFIZ) 29272 v.AuxInt = arm64BFAuxInt(lc-rc, 64-lc) 29273 v.AddArg(x) 29274 return true 29275 } 29276 // match: (SRLconst [sc] (ANDconst [ac] x)) 29277 // cond: isARM64BFMask(sc, ac, sc) 29278 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(ac, sc))] x) 29279 for { 29280 sc := v.AuxInt 29281 v_0 := v.Args[0] 29282 if v_0.Op != OpARM64ANDconst { 29283 break 29284 } 29285 ac := v_0.AuxInt 29286 x := v_0.Args[0] 29287 if !(isARM64BFMask(sc, ac, sc)) { 29288 break 29289 } 29290 v.reset(OpARM64UBFX) 29291 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, sc)) 29292 v.AddArg(x) 29293 return true 29294 } 29295 // match: (SRLconst [sc] (MOVWUreg x)) 29296 // cond: isARM64BFMask(sc, 1<<32-1, sc) 29297 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc))] x) 29298 for { 29299 sc := v.AuxInt 29300 v_0 := v.Args[0] 29301 if v_0.Op != OpARM64MOVWUreg { 29302 break 29303 } 29304 x := v_0.Args[0] 29305 if !(isARM64BFMask(sc, 1<<32-1, sc)) { 29306 break 29307 } 29308 v.reset(OpARM64UBFX) 29309 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<32-1, sc)) 29310 v.AddArg(x) 29311 return true 29312 } 29313 // match: (SRLconst [sc] (MOVHUreg x)) 29314 // cond: isARM64BFMask(sc, 1<<16-1, sc) 29315 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc))] x) 29316 for { 29317 sc := v.AuxInt 29318 v_0 := v.Args[0] 29319 if v_0.Op != OpARM64MOVHUreg { 29320 break 29321 } 29322 x := v_0.Args[0] 29323 if !(isARM64BFMask(sc, 1<<16-1, sc)) { 29324 break 29325 } 29326 v.reset(OpARM64UBFX) 29327 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<16-1, sc)) 29328 v.AddArg(x) 29329 return true 29330 } 29331 // match: (SRLconst [sc] (MOVBUreg x)) 29332 // cond: isARM64BFMask(sc, 1<<8-1, sc) 29333 // result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x) 29334 for { 29335 sc := v.AuxInt 29336 v_0 := v.Args[0] 29337 if v_0.Op != OpARM64MOVBUreg { 29338 break 29339 } 29340 x := v_0.Args[0] 29341 if !(isARM64BFMask(sc, 1<<8-1, sc)) { 29342 break 29343 } 29344 v.reset(OpARM64UBFX) 29345 v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc)) 29346 v.AddArg(x) 29347 return true 29348 } 29349 // match: (SRLconst [rc] (SLLconst [lc] x)) 29350 // cond: lc < rc 29351 // result: (UBFX [arm64BFAuxInt(rc-lc, 64-rc)] x) 29352 for { 29353 rc := v.AuxInt 29354 v_0 := v.Args[0] 29355 if v_0.Op != OpARM64SLLconst { 29356 break 29357 } 29358 lc := v_0.AuxInt 29359 x := v_0.Args[0] 29360 if !(lc < rc) { 29361 break 29362 } 29363 v.reset(OpARM64UBFX) 29364 v.AuxInt = arm64BFAuxInt(rc-lc, 64-rc) 29365 v.AddArg(x) 29366 return true 29367 } 29368 // match: (SRLconst [sc] (UBFX [bfc] x)) 29369 // cond: sc < getARM64BFwidth(bfc) 29370 // result: (UBFX [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)] x) 29371 for { 29372 sc := v.AuxInt 29373 v_0 := v.Args[0] 29374 if v_0.Op != OpARM64UBFX { 29375 break 29376 } 29377 bfc := v_0.AuxInt 29378 x := v_0.Args[0] 29379 if !(sc < getARM64BFwidth(bfc)) { 29380 break 29381 } 29382 v.reset(OpARM64UBFX) 29383 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc) 29384 v.AddArg(x) 29385 return true 29386 } 29387 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 29388 // cond: sc == getARM64BFlsb(bfc) 29389 // result: (ANDconst [1<<uint(getARM64BFwidth(bfc))-1] x) 29390 for { 29391 sc := v.AuxInt 29392 v_0 := v.Args[0] 29393 if v_0.Op != OpARM64UBFIZ { 29394 break 29395 } 29396 bfc := v_0.AuxInt 29397 x := v_0.Args[0] 29398 if !(sc == getARM64BFlsb(bfc)) { 29399 break 29400 } 29401 v.reset(OpARM64ANDconst) 29402 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1 29403 v.AddArg(x) 29404 return true 29405 } 29406 return false 29407 } 29408 func rewriteValueARM64_OpARM64SRLconst_10(v *Value) bool { 29409 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 29410 // cond: sc < getARM64BFlsb(bfc) 29411 // result: (UBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 29412 for { 29413 sc := v.AuxInt 29414 v_0 := v.Args[0] 29415 if v_0.Op != OpARM64UBFIZ { 29416 break 29417 } 29418 bfc := v_0.AuxInt 29419 x := v_0.Args[0] 29420 if !(sc < getARM64BFlsb(bfc)) { 29421 break 29422 } 29423 v.reset(OpARM64UBFIZ) 29424 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 29425 v.AddArg(x) 29426 return true 29427 } 29428 // match: (SRLconst [sc] (UBFIZ [bfc] x)) 29429 // cond: sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 29430 // result: (UBFX [arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 29431 for { 29432 sc := v.AuxInt 29433 v_0 := v.Args[0] 29434 if v_0.Op != OpARM64UBFIZ { 29435 break 29436 } 29437 bfc := v_0.AuxInt 29438 x := v_0.Args[0] 29439 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 29440 break 29441 } 29442 v.reset(OpARM64UBFX) 29443 v.AuxInt = arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 29444 v.AddArg(x) 29445 return true 29446 } 29447 return false 29448 } 29449 func rewriteValueARM64_OpARM64STP_0(v *Value) bool { 29450 b := v.Block 29451 _ = b 29452 config := b.Func.Config 29453 _ = config 29454 // match: (STP [off1] {sym} (ADDconst [off2] ptr) val1 val2 mem) 29455 // cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 29456 // result: (STP [off1+off2] {sym} ptr val1 val2 mem) 29457 for { 29458 off1 := v.AuxInt 29459 sym := v.Aux 29460 _ = v.Args[3] 29461 v_0 := v.Args[0] 29462 if v_0.Op != OpARM64ADDconst { 29463 break 29464 } 29465 off2 := v_0.AuxInt 29466 ptr := v_0.Args[0] 29467 val1 := v.Args[1] 29468 val2 := v.Args[2] 29469 mem := v.Args[3] 29470 if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 29471 break 29472 } 29473 v.reset(OpARM64STP) 29474 v.AuxInt = off1 + off2 29475 v.Aux = sym 29476 v.AddArg(ptr) 29477 v.AddArg(val1) 29478 v.AddArg(val2) 29479 v.AddArg(mem) 29480 return true 29481 } 29482 // match: (STP [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val1 val2 mem) 29483 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared) 29484 // result: (STP [off1+off2] {mergeSym(sym1,sym2)} ptr val1 val2 mem) 29485 for { 29486 off1 := v.AuxInt 29487 sym1 := v.Aux 29488 _ = v.Args[3] 29489 v_0 := v.Args[0] 29490 if v_0.Op != OpARM64MOVDaddr { 29491 break 29492 } 29493 off2 := v_0.AuxInt 29494 sym2 := v_0.Aux 29495 ptr := v_0.Args[0] 29496 val1 := v.Args[1] 29497 val2 := v.Args[2] 29498 mem := v.Args[3] 29499 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) { 29500 break 29501 } 29502 v.reset(OpARM64STP) 29503 v.AuxInt = off1 + off2 29504 v.Aux = mergeSym(sym1, sym2) 29505 v.AddArg(ptr) 29506 v.AddArg(val1) 29507 v.AddArg(val2) 29508 v.AddArg(mem) 29509 return true 29510 } 29511 // match: (STP [off] {sym} ptr (MOVDconst [0]) (MOVDconst [0]) mem) 29512 // cond: 29513 // result: (MOVQstorezero [off] {sym} ptr mem) 29514 for { 29515 off := v.AuxInt 29516 sym := v.Aux 29517 _ = v.Args[3] 29518 ptr := v.Args[0] 29519 v_1 := v.Args[1] 29520 if v_1.Op != OpARM64MOVDconst { 29521 break 29522 } 29523 if v_1.AuxInt != 0 { 29524 break 29525 } 29526 v_2 := v.Args[2] 29527 if v_2.Op != OpARM64MOVDconst { 29528 break 29529 } 29530 if v_2.AuxInt != 0 { 29531 break 29532 } 29533 mem := v.Args[3] 29534 v.reset(OpARM64MOVQstorezero) 29535 v.AuxInt = off 29536 v.Aux = sym 29537 v.AddArg(ptr) 29538 v.AddArg(mem) 29539 return true 29540 } 29541 return false 29542 } 29543 func rewriteValueARM64_OpARM64SUB_0(v *Value) bool { 29544 b := v.Block 29545 _ = b 29546 // match: (SUB x (MOVDconst [c])) 29547 // cond: 29548 // result: (SUBconst [c] x) 29549 for { 29550 _ = v.Args[1] 29551 x := v.Args[0] 29552 v_1 := v.Args[1] 29553 if v_1.Op != OpARM64MOVDconst { 29554 break 29555 } 29556 c := v_1.AuxInt 29557 v.reset(OpARM64SUBconst) 29558 v.AuxInt = c 29559 v.AddArg(x) 29560 return true 29561 } 29562 // match: (SUB a l:(MUL x y)) 29563 // cond: l.Uses==1 && clobber(l) 29564 // result: (MSUB a x y) 29565 for { 29566 _ = v.Args[1] 29567 a := v.Args[0] 29568 l := v.Args[1] 29569 if l.Op != OpARM64MUL { 29570 break 29571 } 29572 _ = l.Args[1] 29573 x := l.Args[0] 29574 y := l.Args[1] 29575 if !(l.Uses == 1 && clobber(l)) { 29576 break 29577 } 29578 v.reset(OpARM64MSUB) 29579 v.AddArg(a) 29580 v.AddArg(x) 29581 v.AddArg(y) 29582 return true 29583 } 29584 // match: (SUB a l:(MNEG x y)) 29585 // cond: l.Uses==1 && clobber(l) 29586 // result: (MADD a x y) 29587 for { 29588 _ = v.Args[1] 29589 a := v.Args[0] 29590 l := v.Args[1] 29591 if l.Op != OpARM64MNEG { 29592 break 29593 } 29594 _ = l.Args[1] 29595 x := l.Args[0] 29596 y := l.Args[1] 29597 if !(l.Uses == 1 && clobber(l)) { 29598 break 29599 } 29600 v.reset(OpARM64MADD) 29601 v.AddArg(a) 29602 v.AddArg(x) 29603 v.AddArg(y) 29604 return true 29605 } 29606 // match: (SUB a l:(MULW x y)) 29607 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 29608 // result: (MSUBW a x y) 29609 for { 29610 _ = v.Args[1] 29611 a := v.Args[0] 29612 l := v.Args[1] 29613 if l.Op != OpARM64MULW { 29614 break 29615 } 29616 _ = l.Args[1] 29617 x := l.Args[0] 29618 y := l.Args[1] 29619 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 29620 break 29621 } 29622 v.reset(OpARM64MSUBW) 29623 v.AddArg(a) 29624 v.AddArg(x) 29625 v.AddArg(y) 29626 return true 29627 } 29628 // match: (SUB a l:(MNEGW x y)) 29629 // cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l) 29630 // result: (MADDW a x y) 29631 for { 29632 _ = v.Args[1] 29633 a := v.Args[0] 29634 l := v.Args[1] 29635 if l.Op != OpARM64MNEGW { 29636 break 29637 } 29638 _ = l.Args[1] 29639 x := l.Args[0] 29640 y := l.Args[1] 29641 if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) { 29642 break 29643 } 29644 v.reset(OpARM64MADDW) 29645 v.AddArg(a) 29646 v.AddArg(x) 29647 v.AddArg(y) 29648 return true 29649 } 29650 // match: (SUB x x) 29651 // cond: 29652 // result: (MOVDconst [0]) 29653 for { 29654 _ = v.Args[1] 29655 x := v.Args[0] 29656 if x != v.Args[1] { 29657 break 29658 } 29659 v.reset(OpARM64MOVDconst) 29660 v.AuxInt = 0 29661 return true 29662 } 29663 // match: (SUB x (SUB y z)) 29664 // cond: 29665 // result: (SUB (ADD <v.Type> x z) y) 29666 for { 29667 _ = v.Args[1] 29668 x := v.Args[0] 29669 v_1 := v.Args[1] 29670 if v_1.Op != OpARM64SUB { 29671 break 29672 } 29673 _ = v_1.Args[1] 29674 y := v_1.Args[0] 29675 z := v_1.Args[1] 29676 v.reset(OpARM64SUB) 29677 v0 := b.NewValue0(v.Pos, OpARM64ADD, v.Type) 29678 v0.AddArg(x) 29679 v0.AddArg(z) 29680 v.AddArg(v0) 29681 v.AddArg(y) 29682 return true 29683 } 29684 // match: (SUB (SUB x y) z) 29685 // cond: 29686 // result: (SUB x (ADD <y.Type> y z)) 29687 for { 29688 _ = v.Args[1] 29689 v_0 := v.Args[0] 29690 if v_0.Op != OpARM64SUB { 29691 break 29692 } 29693 _ = v_0.Args[1] 29694 x := v_0.Args[0] 29695 y := v_0.Args[1] 29696 z := v.Args[1] 29697 v.reset(OpARM64SUB) 29698 v.AddArg(x) 29699 v0 := b.NewValue0(v.Pos, OpARM64ADD, y.Type) 29700 v0.AddArg(y) 29701 v0.AddArg(z) 29702 v.AddArg(v0) 29703 return true 29704 } 29705 // match: (SUB x0 x1:(SLLconst [c] y)) 29706 // cond: clobberIfDead(x1) 29707 // result: (SUBshiftLL x0 y [c]) 29708 for { 29709 _ = v.Args[1] 29710 x0 := v.Args[0] 29711 x1 := v.Args[1] 29712 if x1.Op != OpARM64SLLconst { 29713 break 29714 } 29715 c := x1.AuxInt 29716 y := x1.Args[0] 29717 if !(clobberIfDead(x1)) { 29718 break 29719 } 29720 v.reset(OpARM64SUBshiftLL) 29721 v.AuxInt = c 29722 v.AddArg(x0) 29723 v.AddArg(y) 29724 return true 29725 } 29726 // match: (SUB x0 x1:(SRLconst [c] y)) 29727 // cond: clobberIfDead(x1) 29728 // result: (SUBshiftRL x0 y [c]) 29729 for { 29730 _ = v.Args[1] 29731 x0 := v.Args[0] 29732 x1 := v.Args[1] 29733 if x1.Op != OpARM64SRLconst { 29734 break 29735 } 29736 c := x1.AuxInt 29737 y := x1.Args[0] 29738 if !(clobberIfDead(x1)) { 29739 break 29740 } 29741 v.reset(OpARM64SUBshiftRL) 29742 v.AuxInt = c 29743 v.AddArg(x0) 29744 v.AddArg(y) 29745 return true 29746 } 29747 return false 29748 } 29749 func rewriteValueARM64_OpARM64SUB_10(v *Value) bool { 29750 // match: (SUB x0 x1:(SRAconst [c] y)) 29751 // cond: clobberIfDead(x1) 29752 // result: (SUBshiftRA x0 y [c]) 29753 for { 29754 _ = v.Args[1] 29755 x0 := v.Args[0] 29756 x1 := v.Args[1] 29757 if x1.Op != OpARM64SRAconst { 29758 break 29759 } 29760 c := x1.AuxInt 29761 y := x1.Args[0] 29762 if !(clobberIfDead(x1)) { 29763 break 29764 } 29765 v.reset(OpARM64SUBshiftRA) 29766 v.AuxInt = c 29767 v.AddArg(x0) 29768 v.AddArg(y) 29769 return true 29770 } 29771 return false 29772 } 29773 func rewriteValueARM64_OpARM64SUBconst_0(v *Value) bool { 29774 // match: (SUBconst [0] x) 29775 // cond: 29776 // result: x 29777 for { 29778 if v.AuxInt != 0 { 29779 break 29780 } 29781 x := v.Args[0] 29782 v.reset(OpCopy) 29783 v.Type = x.Type 29784 v.AddArg(x) 29785 return true 29786 } 29787 // match: (SUBconst [c] (MOVDconst [d])) 29788 // cond: 29789 // result: (MOVDconst [d-c]) 29790 for { 29791 c := v.AuxInt 29792 v_0 := v.Args[0] 29793 if v_0.Op != OpARM64MOVDconst { 29794 break 29795 } 29796 d := v_0.AuxInt 29797 v.reset(OpARM64MOVDconst) 29798 v.AuxInt = d - c 29799 return true 29800 } 29801 // match: (SUBconst [c] (SUBconst [d] x)) 29802 // cond: 29803 // result: (ADDconst [-c-d] x) 29804 for { 29805 c := v.AuxInt 29806 v_0 := v.Args[0] 29807 if v_0.Op != OpARM64SUBconst { 29808 break 29809 } 29810 d := v_0.AuxInt 29811 x := v_0.Args[0] 29812 v.reset(OpARM64ADDconst) 29813 v.AuxInt = -c - d 29814 v.AddArg(x) 29815 return true 29816 } 29817 // match: (SUBconst [c] (ADDconst [d] x)) 29818 // cond: 29819 // result: (ADDconst [-c+d] x) 29820 for { 29821 c := v.AuxInt 29822 v_0 := v.Args[0] 29823 if v_0.Op != OpARM64ADDconst { 29824 break 29825 } 29826 d := v_0.AuxInt 29827 x := v_0.Args[0] 29828 v.reset(OpARM64ADDconst) 29829 v.AuxInt = -c + d 29830 v.AddArg(x) 29831 return true 29832 } 29833 return false 29834 } 29835 func rewriteValueARM64_OpARM64SUBshiftLL_0(v *Value) bool { 29836 // match: (SUBshiftLL x (MOVDconst [c]) [d]) 29837 // cond: 29838 // result: (SUBconst x [int64(uint64(c)<<uint64(d))]) 29839 for { 29840 d := v.AuxInt 29841 _ = v.Args[1] 29842 x := v.Args[0] 29843 v_1 := v.Args[1] 29844 if v_1.Op != OpARM64MOVDconst { 29845 break 29846 } 29847 c := v_1.AuxInt 29848 v.reset(OpARM64SUBconst) 29849 v.AuxInt = int64(uint64(c) << uint64(d)) 29850 v.AddArg(x) 29851 return true 29852 } 29853 // match: (SUBshiftLL x (SLLconst x [c]) [d]) 29854 // cond: c==d 29855 // result: (MOVDconst [0]) 29856 for { 29857 d := v.AuxInt 29858 _ = v.Args[1] 29859 x := v.Args[0] 29860 v_1 := v.Args[1] 29861 if v_1.Op != OpARM64SLLconst { 29862 break 29863 } 29864 c := v_1.AuxInt 29865 if x != v_1.Args[0] { 29866 break 29867 } 29868 if !(c == d) { 29869 break 29870 } 29871 v.reset(OpARM64MOVDconst) 29872 v.AuxInt = 0 29873 return true 29874 } 29875 return false 29876 } 29877 func rewriteValueARM64_OpARM64SUBshiftRA_0(v *Value) bool { 29878 // match: (SUBshiftRA x (MOVDconst [c]) [d]) 29879 // cond: 29880 // result: (SUBconst x [c>>uint64(d)]) 29881 for { 29882 d := v.AuxInt 29883 _ = v.Args[1] 29884 x := v.Args[0] 29885 v_1 := v.Args[1] 29886 if v_1.Op != OpARM64MOVDconst { 29887 break 29888 } 29889 c := v_1.AuxInt 29890 v.reset(OpARM64SUBconst) 29891 v.AuxInt = c >> uint64(d) 29892 v.AddArg(x) 29893 return true 29894 } 29895 // match: (SUBshiftRA x (SRAconst x [c]) [d]) 29896 // cond: c==d 29897 // result: (MOVDconst [0]) 29898 for { 29899 d := v.AuxInt 29900 _ = v.Args[1] 29901 x := v.Args[0] 29902 v_1 := v.Args[1] 29903 if v_1.Op != OpARM64SRAconst { 29904 break 29905 } 29906 c := v_1.AuxInt 29907 if x != v_1.Args[0] { 29908 break 29909 } 29910 if !(c == d) { 29911 break 29912 } 29913 v.reset(OpARM64MOVDconst) 29914 v.AuxInt = 0 29915 return true 29916 } 29917 return false 29918 } 29919 func rewriteValueARM64_OpARM64SUBshiftRL_0(v *Value) bool { 29920 // match: (SUBshiftRL x (MOVDconst [c]) [d]) 29921 // cond: 29922 // result: (SUBconst x [int64(uint64(c)>>uint64(d))]) 29923 for { 29924 d := v.AuxInt 29925 _ = v.Args[1] 29926 x := v.Args[0] 29927 v_1 := v.Args[1] 29928 if v_1.Op != OpARM64MOVDconst { 29929 break 29930 } 29931 c := v_1.AuxInt 29932 v.reset(OpARM64SUBconst) 29933 v.AuxInt = int64(uint64(c) >> uint64(d)) 29934 v.AddArg(x) 29935 return true 29936 } 29937 // match: (SUBshiftRL x (SRLconst x [c]) [d]) 29938 // cond: c==d 29939 // result: (MOVDconst [0]) 29940 for { 29941 d := v.AuxInt 29942 _ = v.Args[1] 29943 x := v.Args[0] 29944 v_1 := v.Args[1] 29945 if v_1.Op != OpARM64SRLconst { 29946 break 29947 } 29948 c := v_1.AuxInt 29949 if x != v_1.Args[0] { 29950 break 29951 } 29952 if !(c == d) { 29953 break 29954 } 29955 v.reset(OpARM64MOVDconst) 29956 v.AuxInt = 0 29957 return true 29958 } 29959 return false 29960 } 29961 func rewriteValueARM64_OpARM64TST_0(v *Value) bool { 29962 // match: (TST x (MOVDconst [c])) 29963 // cond: 29964 // result: (TSTconst [c] x) 29965 for { 29966 _ = v.Args[1] 29967 x := v.Args[0] 29968 v_1 := v.Args[1] 29969 if v_1.Op != OpARM64MOVDconst { 29970 break 29971 } 29972 c := v_1.AuxInt 29973 v.reset(OpARM64TSTconst) 29974 v.AuxInt = c 29975 v.AddArg(x) 29976 return true 29977 } 29978 // match: (TST (MOVDconst [c]) x) 29979 // cond: 29980 // result: (TSTconst [c] x) 29981 for { 29982 _ = v.Args[1] 29983 v_0 := v.Args[0] 29984 if v_0.Op != OpARM64MOVDconst { 29985 break 29986 } 29987 c := v_0.AuxInt 29988 x := v.Args[1] 29989 v.reset(OpARM64TSTconst) 29990 v.AuxInt = c 29991 v.AddArg(x) 29992 return true 29993 } 29994 // match: (TST x0 x1:(SLLconst [c] y)) 29995 // cond: clobberIfDead(x1) 29996 // result: (TSTshiftLL x0 y [c]) 29997 for { 29998 _ = v.Args[1] 29999 x0 := v.Args[0] 30000 x1 := v.Args[1] 30001 if x1.Op != OpARM64SLLconst { 30002 break 30003 } 30004 c := x1.AuxInt 30005 y := x1.Args[0] 30006 if !(clobberIfDead(x1)) { 30007 break 30008 } 30009 v.reset(OpARM64TSTshiftLL) 30010 v.AuxInt = c 30011 v.AddArg(x0) 30012 v.AddArg(y) 30013 return true 30014 } 30015 // match: (TST x1:(SLLconst [c] y) x0) 30016 // cond: clobberIfDead(x1) 30017 // result: (TSTshiftLL x0 y [c]) 30018 for { 30019 _ = v.Args[1] 30020 x1 := v.Args[0] 30021 if x1.Op != OpARM64SLLconst { 30022 break 30023 } 30024 c := x1.AuxInt 30025 y := x1.Args[0] 30026 x0 := v.Args[1] 30027 if !(clobberIfDead(x1)) { 30028 break 30029 } 30030 v.reset(OpARM64TSTshiftLL) 30031 v.AuxInt = c 30032 v.AddArg(x0) 30033 v.AddArg(y) 30034 return true 30035 } 30036 // match: (TST x0 x1:(SRLconst [c] y)) 30037 // cond: clobberIfDead(x1) 30038 // result: (TSTshiftRL x0 y [c]) 30039 for { 30040 _ = v.Args[1] 30041 x0 := v.Args[0] 30042 x1 := v.Args[1] 30043 if x1.Op != OpARM64SRLconst { 30044 break 30045 } 30046 c := x1.AuxInt 30047 y := x1.Args[0] 30048 if !(clobberIfDead(x1)) { 30049 break 30050 } 30051 v.reset(OpARM64TSTshiftRL) 30052 v.AuxInt = c 30053 v.AddArg(x0) 30054 v.AddArg(y) 30055 return true 30056 } 30057 // match: (TST x1:(SRLconst [c] y) x0) 30058 // cond: clobberIfDead(x1) 30059 // result: (TSTshiftRL x0 y [c]) 30060 for { 30061 _ = v.Args[1] 30062 x1 := v.Args[0] 30063 if x1.Op != OpARM64SRLconst { 30064 break 30065 } 30066 c := x1.AuxInt 30067 y := x1.Args[0] 30068 x0 := v.Args[1] 30069 if !(clobberIfDead(x1)) { 30070 break 30071 } 30072 v.reset(OpARM64TSTshiftRL) 30073 v.AuxInt = c 30074 v.AddArg(x0) 30075 v.AddArg(y) 30076 return true 30077 } 30078 // match: (TST x0 x1:(SRAconst [c] y)) 30079 // cond: clobberIfDead(x1) 30080 // result: (TSTshiftRA x0 y [c]) 30081 for { 30082 _ = v.Args[1] 30083 x0 := v.Args[0] 30084 x1 := v.Args[1] 30085 if x1.Op != OpARM64SRAconst { 30086 break 30087 } 30088 c := x1.AuxInt 30089 y := x1.Args[0] 30090 if !(clobberIfDead(x1)) { 30091 break 30092 } 30093 v.reset(OpARM64TSTshiftRA) 30094 v.AuxInt = c 30095 v.AddArg(x0) 30096 v.AddArg(y) 30097 return true 30098 } 30099 // match: (TST x1:(SRAconst [c] y) x0) 30100 // cond: clobberIfDead(x1) 30101 // result: (TSTshiftRA x0 y [c]) 30102 for { 30103 _ = v.Args[1] 30104 x1 := v.Args[0] 30105 if x1.Op != OpARM64SRAconst { 30106 break 30107 } 30108 c := x1.AuxInt 30109 y := x1.Args[0] 30110 x0 := v.Args[1] 30111 if !(clobberIfDead(x1)) { 30112 break 30113 } 30114 v.reset(OpARM64TSTshiftRA) 30115 v.AuxInt = c 30116 v.AddArg(x0) 30117 v.AddArg(y) 30118 return true 30119 } 30120 return false 30121 } 30122 func rewriteValueARM64_OpARM64TSTW_0(v *Value) bool { 30123 // match: (TSTW x (MOVDconst [c])) 30124 // cond: 30125 // result: (TSTWconst [c] x) 30126 for { 30127 _ = v.Args[1] 30128 x := v.Args[0] 30129 v_1 := v.Args[1] 30130 if v_1.Op != OpARM64MOVDconst { 30131 break 30132 } 30133 c := v_1.AuxInt 30134 v.reset(OpARM64TSTWconst) 30135 v.AuxInt = c 30136 v.AddArg(x) 30137 return true 30138 } 30139 // match: (TSTW (MOVDconst [c]) x) 30140 // cond: 30141 // result: (TSTWconst [c] x) 30142 for { 30143 _ = v.Args[1] 30144 v_0 := v.Args[0] 30145 if v_0.Op != OpARM64MOVDconst { 30146 break 30147 } 30148 c := v_0.AuxInt 30149 x := v.Args[1] 30150 v.reset(OpARM64TSTWconst) 30151 v.AuxInt = c 30152 v.AddArg(x) 30153 return true 30154 } 30155 return false 30156 } 30157 func rewriteValueARM64_OpARM64TSTWconst_0(v *Value) bool { 30158 // match: (TSTWconst (MOVDconst [x]) [y]) 30159 // cond: int32(x&y)==0 30160 // result: (FlagEQ) 30161 for { 30162 y := v.AuxInt 30163 v_0 := v.Args[0] 30164 if v_0.Op != OpARM64MOVDconst { 30165 break 30166 } 30167 x := v_0.AuxInt 30168 if !(int32(x&y) == 0) { 30169 break 30170 } 30171 v.reset(OpARM64FlagEQ) 30172 return true 30173 } 30174 // match: (TSTWconst (MOVDconst [x]) [y]) 30175 // cond: int32(x&y)<0 30176 // result: (FlagLT_UGT) 30177 for { 30178 y := v.AuxInt 30179 v_0 := v.Args[0] 30180 if v_0.Op != OpARM64MOVDconst { 30181 break 30182 } 30183 x := v_0.AuxInt 30184 if !(int32(x&y) < 0) { 30185 break 30186 } 30187 v.reset(OpARM64FlagLT_UGT) 30188 return true 30189 } 30190 // match: (TSTWconst (MOVDconst [x]) [y]) 30191 // cond: int32(x&y)>0 30192 // result: (FlagGT_UGT) 30193 for { 30194 y := v.AuxInt 30195 v_0 := v.Args[0] 30196 if v_0.Op != OpARM64MOVDconst { 30197 break 30198 } 30199 x := v_0.AuxInt 30200 if !(int32(x&y) > 0) { 30201 break 30202 } 30203 v.reset(OpARM64FlagGT_UGT) 30204 return true 30205 } 30206 return false 30207 } 30208 func rewriteValueARM64_OpARM64TSTconst_0(v *Value) bool { 30209 // match: (TSTconst (MOVDconst [x]) [y]) 30210 // cond: int64(x&y)==0 30211 // result: (FlagEQ) 30212 for { 30213 y := v.AuxInt 30214 v_0 := v.Args[0] 30215 if v_0.Op != OpARM64MOVDconst { 30216 break 30217 } 30218 x := v_0.AuxInt 30219 if !(int64(x&y) == 0) { 30220 break 30221 } 30222 v.reset(OpARM64FlagEQ) 30223 return true 30224 } 30225 // match: (TSTconst (MOVDconst [x]) [y]) 30226 // cond: int64(x&y)<0 30227 // result: (FlagLT_UGT) 30228 for { 30229 y := v.AuxInt 30230 v_0 := v.Args[0] 30231 if v_0.Op != OpARM64MOVDconst { 30232 break 30233 } 30234 x := v_0.AuxInt 30235 if !(int64(x&y) < 0) { 30236 break 30237 } 30238 v.reset(OpARM64FlagLT_UGT) 30239 return true 30240 } 30241 // match: (TSTconst (MOVDconst [x]) [y]) 30242 // cond: int64(x&y)>0 30243 // result: (FlagGT_UGT) 30244 for { 30245 y := v.AuxInt 30246 v_0 := v.Args[0] 30247 if v_0.Op != OpARM64MOVDconst { 30248 break 30249 } 30250 x := v_0.AuxInt 30251 if !(int64(x&y) > 0) { 30252 break 30253 } 30254 v.reset(OpARM64FlagGT_UGT) 30255 return true 30256 } 30257 return false 30258 } 30259 func rewriteValueARM64_OpARM64TSTshiftLL_0(v *Value) bool { 30260 b := v.Block 30261 _ = b 30262 // match: (TSTshiftLL (MOVDconst [c]) x [d]) 30263 // cond: 30264 // result: (TSTconst [c] (SLLconst <x.Type> x [d])) 30265 for { 30266 d := v.AuxInt 30267 _ = v.Args[1] 30268 v_0 := v.Args[0] 30269 if v_0.Op != OpARM64MOVDconst { 30270 break 30271 } 30272 c := v_0.AuxInt 30273 x := v.Args[1] 30274 v.reset(OpARM64TSTconst) 30275 v.AuxInt = c 30276 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 30277 v0.AuxInt = d 30278 v0.AddArg(x) 30279 v.AddArg(v0) 30280 return true 30281 } 30282 // match: (TSTshiftLL x (MOVDconst [c]) [d]) 30283 // cond: 30284 // result: (TSTconst x [int64(uint64(c)<<uint64(d))]) 30285 for { 30286 d := v.AuxInt 30287 _ = v.Args[1] 30288 x := v.Args[0] 30289 v_1 := v.Args[1] 30290 if v_1.Op != OpARM64MOVDconst { 30291 break 30292 } 30293 c := v_1.AuxInt 30294 v.reset(OpARM64TSTconst) 30295 v.AuxInt = int64(uint64(c) << uint64(d)) 30296 v.AddArg(x) 30297 return true 30298 } 30299 return false 30300 } 30301 func rewriteValueARM64_OpARM64TSTshiftRA_0(v *Value) bool { 30302 b := v.Block 30303 _ = b 30304 // match: (TSTshiftRA (MOVDconst [c]) x [d]) 30305 // cond: 30306 // result: (TSTconst [c] (SRAconst <x.Type> x [d])) 30307 for { 30308 d := v.AuxInt 30309 _ = v.Args[1] 30310 v_0 := v.Args[0] 30311 if v_0.Op != OpARM64MOVDconst { 30312 break 30313 } 30314 c := v_0.AuxInt 30315 x := v.Args[1] 30316 v.reset(OpARM64TSTconst) 30317 v.AuxInt = c 30318 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 30319 v0.AuxInt = d 30320 v0.AddArg(x) 30321 v.AddArg(v0) 30322 return true 30323 } 30324 // match: (TSTshiftRA x (MOVDconst [c]) [d]) 30325 // cond: 30326 // result: (TSTconst x [c>>uint64(d)]) 30327 for { 30328 d := v.AuxInt 30329 _ = v.Args[1] 30330 x := v.Args[0] 30331 v_1 := v.Args[1] 30332 if v_1.Op != OpARM64MOVDconst { 30333 break 30334 } 30335 c := v_1.AuxInt 30336 v.reset(OpARM64TSTconst) 30337 v.AuxInt = c >> uint64(d) 30338 v.AddArg(x) 30339 return true 30340 } 30341 return false 30342 } 30343 func rewriteValueARM64_OpARM64TSTshiftRL_0(v *Value) bool { 30344 b := v.Block 30345 _ = b 30346 // match: (TSTshiftRL (MOVDconst [c]) x [d]) 30347 // cond: 30348 // result: (TSTconst [c] (SRLconst <x.Type> x [d])) 30349 for { 30350 d := v.AuxInt 30351 _ = v.Args[1] 30352 v_0 := v.Args[0] 30353 if v_0.Op != OpARM64MOVDconst { 30354 break 30355 } 30356 c := v_0.AuxInt 30357 x := v.Args[1] 30358 v.reset(OpARM64TSTconst) 30359 v.AuxInt = c 30360 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 30361 v0.AuxInt = d 30362 v0.AddArg(x) 30363 v.AddArg(v0) 30364 return true 30365 } 30366 // match: (TSTshiftRL x (MOVDconst [c]) [d]) 30367 // cond: 30368 // result: (TSTconst x [int64(uint64(c)>>uint64(d))]) 30369 for { 30370 d := v.AuxInt 30371 _ = v.Args[1] 30372 x := v.Args[0] 30373 v_1 := v.Args[1] 30374 if v_1.Op != OpARM64MOVDconst { 30375 break 30376 } 30377 c := v_1.AuxInt 30378 v.reset(OpARM64TSTconst) 30379 v.AuxInt = int64(uint64(c) >> uint64(d)) 30380 v.AddArg(x) 30381 return true 30382 } 30383 return false 30384 } 30385 func rewriteValueARM64_OpARM64UBFIZ_0(v *Value) bool { 30386 // match: (UBFIZ [bfc] (SLLconst [sc] x)) 30387 // cond: sc < getARM64BFwidth(bfc) 30388 // result: (UBFIZ [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc)] x) 30389 for { 30390 bfc := v.AuxInt 30391 v_0 := v.Args[0] 30392 if v_0.Op != OpARM64SLLconst { 30393 break 30394 } 30395 sc := v_0.AuxInt 30396 x := v_0.Args[0] 30397 if !(sc < getARM64BFwidth(bfc)) { 30398 break 30399 } 30400 v.reset(OpARM64UBFIZ) 30401 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)-sc) 30402 v.AddArg(x) 30403 return true 30404 } 30405 return false 30406 } 30407 func rewriteValueARM64_OpARM64UBFX_0(v *Value) bool { 30408 // match: (UBFX [bfc] (SRLconst [sc] x)) 30409 // cond: sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64 30410 // result: (UBFX [arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc))] x) 30411 for { 30412 bfc := v.AuxInt 30413 v_0 := v.Args[0] 30414 if v_0.Op != OpARM64SRLconst { 30415 break 30416 } 30417 sc := v_0.AuxInt 30418 x := v_0.Args[0] 30419 if !(sc+getARM64BFwidth(bfc)+getARM64BFlsb(bfc) < 64) { 30420 break 30421 } 30422 v.reset(OpARM64UBFX) 30423 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)+sc, getARM64BFwidth(bfc)) 30424 v.AddArg(x) 30425 return true 30426 } 30427 // match: (UBFX [bfc] (SLLconst [sc] x)) 30428 // cond: sc == getARM64BFlsb(bfc) 30429 // result: (ANDconst [1<<uint(getARM64BFwidth(bfc))-1] x) 30430 for { 30431 bfc := v.AuxInt 30432 v_0 := v.Args[0] 30433 if v_0.Op != OpARM64SLLconst { 30434 break 30435 } 30436 sc := v_0.AuxInt 30437 x := v_0.Args[0] 30438 if !(sc == getARM64BFlsb(bfc)) { 30439 break 30440 } 30441 v.reset(OpARM64ANDconst) 30442 v.AuxInt = 1<<uint(getARM64BFwidth(bfc)) - 1 30443 v.AddArg(x) 30444 return true 30445 } 30446 // match: (UBFX [bfc] (SLLconst [sc] x)) 30447 // cond: sc < getARM64BFlsb(bfc) 30448 // result: (UBFX [arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc))] x) 30449 for { 30450 bfc := v.AuxInt 30451 v_0 := v.Args[0] 30452 if v_0.Op != OpARM64SLLconst { 30453 break 30454 } 30455 sc := v_0.AuxInt 30456 x := v_0.Args[0] 30457 if !(sc < getARM64BFlsb(bfc)) { 30458 break 30459 } 30460 v.reset(OpARM64UBFX) 30461 v.AuxInt = arm64BFAuxInt(getARM64BFlsb(bfc)-sc, getARM64BFwidth(bfc)) 30462 v.AddArg(x) 30463 return true 30464 } 30465 // match: (UBFX [bfc] (SLLconst [sc] x)) 30466 // cond: sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc) 30467 // result: (UBFIZ [arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc)] x) 30468 for { 30469 bfc := v.AuxInt 30470 v_0 := v.Args[0] 30471 if v_0.Op != OpARM64SLLconst { 30472 break 30473 } 30474 sc := v_0.AuxInt 30475 x := v_0.Args[0] 30476 if !(sc > getARM64BFlsb(bfc) && sc < getARM64BFlsb(bfc)+getARM64BFwidth(bfc)) { 30477 break 30478 } 30479 v.reset(OpARM64UBFIZ) 30480 v.AuxInt = arm64BFAuxInt(sc-getARM64BFlsb(bfc), getARM64BFlsb(bfc)+getARM64BFwidth(bfc)-sc) 30481 v.AddArg(x) 30482 return true 30483 } 30484 return false 30485 } 30486 func rewriteValueARM64_OpARM64UDIV_0(v *Value) bool { 30487 // match: (UDIV x (MOVDconst [1])) 30488 // cond: 30489 // result: x 30490 for { 30491 _ = v.Args[1] 30492 x := v.Args[0] 30493 v_1 := v.Args[1] 30494 if v_1.Op != OpARM64MOVDconst { 30495 break 30496 } 30497 if v_1.AuxInt != 1 { 30498 break 30499 } 30500 v.reset(OpCopy) 30501 v.Type = x.Type 30502 v.AddArg(x) 30503 return true 30504 } 30505 // match: (UDIV x (MOVDconst [c])) 30506 // cond: isPowerOfTwo(c) 30507 // result: (SRLconst [log2(c)] x) 30508 for { 30509 _ = v.Args[1] 30510 x := v.Args[0] 30511 v_1 := v.Args[1] 30512 if v_1.Op != OpARM64MOVDconst { 30513 break 30514 } 30515 c := v_1.AuxInt 30516 if !(isPowerOfTwo(c)) { 30517 break 30518 } 30519 v.reset(OpARM64SRLconst) 30520 v.AuxInt = log2(c) 30521 v.AddArg(x) 30522 return true 30523 } 30524 // match: (UDIV (MOVDconst [c]) (MOVDconst [d])) 30525 // cond: 30526 // result: (MOVDconst [int64(uint64(c)/uint64(d))]) 30527 for { 30528 _ = v.Args[1] 30529 v_0 := v.Args[0] 30530 if v_0.Op != OpARM64MOVDconst { 30531 break 30532 } 30533 c := v_0.AuxInt 30534 v_1 := v.Args[1] 30535 if v_1.Op != OpARM64MOVDconst { 30536 break 30537 } 30538 d := v_1.AuxInt 30539 v.reset(OpARM64MOVDconst) 30540 v.AuxInt = int64(uint64(c) / uint64(d)) 30541 return true 30542 } 30543 return false 30544 } 30545 func rewriteValueARM64_OpARM64UDIVW_0(v *Value) bool { 30546 // match: (UDIVW x (MOVDconst [c])) 30547 // cond: uint32(c)==1 30548 // result: x 30549 for { 30550 _ = v.Args[1] 30551 x := v.Args[0] 30552 v_1 := v.Args[1] 30553 if v_1.Op != OpARM64MOVDconst { 30554 break 30555 } 30556 c := v_1.AuxInt 30557 if !(uint32(c) == 1) { 30558 break 30559 } 30560 v.reset(OpCopy) 30561 v.Type = x.Type 30562 v.AddArg(x) 30563 return true 30564 } 30565 // match: (UDIVW x (MOVDconst [c])) 30566 // cond: isPowerOfTwo(c) && is32Bit(c) 30567 // result: (SRLconst [log2(c)] x) 30568 for { 30569 _ = v.Args[1] 30570 x := v.Args[0] 30571 v_1 := v.Args[1] 30572 if v_1.Op != OpARM64MOVDconst { 30573 break 30574 } 30575 c := v_1.AuxInt 30576 if !(isPowerOfTwo(c) && is32Bit(c)) { 30577 break 30578 } 30579 v.reset(OpARM64SRLconst) 30580 v.AuxInt = log2(c) 30581 v.AddArg(x) 30582 return true 30583 } 30584 // match: (UDIVW (MOVDconst [c]) (MOVDconst [d])) 30585 // cond: 30586 // result: (MOVDconst [int64(uint32(c)/uint32(d))]) 30587 for { 30588 _ = v.Args[1] 30589 v_0 := v.Args[0] 30590 if v_0.Op != OpARM64MOVDconst { 30591 break 30592 } 30593 c := v_0.AuxInt 30594 v_1 := v.Args[1] 30595 if v_1.Op != OpARM64MOVDconst { 30596 break 30597 } 30598 d := v_1.AuxInt 30599 v.reset(OpARM64MOVDconst) 30600 v.AuxInt = int64(uint32(c) / uint32(d)) 30601 return true 30602 } 30603 return false 30604 } 30605 func rewriteValueARM64_OpARM64UMOD_0(v *Value) bool { 30606 // match: (UMOD _ (MOVDconst [1])) 30607 // cond: 30608 // result: (MOVDconst [0]) 30609 for { 30610 _ = v.Args[1] 30611 v_1 := v.Args[1] 30612 if v_1.Op != OpARM64MOVDconst { 30613 break 30614 } 30615 if v_1.AuxInt != 1 { 30616 break 30617 } 30618 v.reset(OpARM64MOVDconst) 30619 v.AuxInt = 0 30620 return true 30621 } 30622 // match: (UMOD x (MOVDconst [c])) 30623 // cond: isPowerOfTwo(c) 30624 // result: (ANDconst [c-1] x) 30625 for { 30626 _ = v.Args[1] 30627 x := v.Args[0] 30628 v_1 := v.Args[1] 30629 if v_1.Op != OpARM64MOVDconst { 30630 break 30631 } 30632 c := v_1.AuxInt 30633 if !(isPowerOfTwo(c)) { 30634 break 30635 } 30636 v.reset(OpARM64ANDconst) 30637 v.AuxInt = c - 1 30638 v.AddArg(x) 30639 return true 30640 } 30641 // match: (UMOD (MOVDconst [c]) (MOVDconst [d])) 30642 // cond: 30643 // result: (MOVDconst [int64(uint64(c)%uint64(d))]) 30644 for { 30645 _ = v.Args[1] 30646 v_0 := v.Args[0] 30647 if v_0.Op != OpARM64MOVDconst { 30648 break 30649 } 30650 c := v_0.AuxInt 30651 v_1 := v.Args[1] 30652 if v_1.Op != OpARM64MOVDconst { 30653 break 30654 } 30655 d := v_1.AuxInt 30656 v.reset(OpARM64MOVDconst) 30657 v.AuxInt = int64(uint64(c) % uint64(d)) 30658 return true 30659 } 30660 return false 30661 } 30662 func rewriteValueARM64_OpARM64UMODW_0(v *Value) bool { 30663 // match: (UMODW _ (MOVDconst [c])) 30664 // cond: uint32(c)==1 30665 // result: (MOVDconst [0]) 30666 for { 30667 _ = v.Args[1] 30668 v_1 := v.Args[1] 30669 if v_1.Op != OpARM64MOVDconst { 30670 break 30671 } 30672 c := v_1.AuxInt 30673 if !(uint32(c) == 1) { 30674 break 30675 } 30676 v.reset(OpARM64MOVDconst) 30677 v.AuxInt = 0 30678 return true 30679 } 30680 // match: (UMODW x (MOVDconst [c])) 30681 // cond: isPowerOfTwo(c) && is32Bit(c) 30682 // result: (ANDconst [c-1] x) 30683 for { 30684 _ = v.Args[1] 30685 x := v.Args[0] 30686 v_1 := v.Args[1] 30687 if v_1.Op != OpARM64MOVDconst { 30688 break 30689 } 30690 c := v_1.AuxInt 30691 if !(isPowerOfTwo(c) && is32Bit(c)) { 30692 break 30693 } 30694 v.reset(OpARM64ANDconst) 30695 v.AuxInt = c - 1 30696 v.AddArg(x) 30697 return true 30698 } 30699 // match: (UMODW (MOVDconst [c]) (MOVDconst [d])) 30700 // cond: 30701 // result: (MOVDconst [int64(uint32(c)%uint32(d))]) 30702 for { 30703 _ = v.Args[1] 30704 v_0 := v.Args[0] 30705 if v_0.Op != OpARM64MOVDconst { 30706 break 30707 } 30708 c := v_0.AuxInt 30709 v_1 := v.Args[1] 30710 if v_1.Op != OpARM64MOVDconst { 30711 break 30712 } 30713 d := v_1.AuxInt 30714 v.reset(OpARM64MOVDconst) 30715 v.AuxInt = int64(uint32(c) % uint32(d)) 30716 return true 30717 } 30718 return false 30719 } 30720 func rewriteValueARM64_OpARM64XOR_0(v *Value) bool { 30721 // match: (XOR x (MOVDconst [c])) 30722 // cond: 30723 // result: (XORconst [c] x) 30724 for { 30725 _ = v.Args[1] 30726 x := v.Args[0] 30727 v_1 := v.Args[1] 30728 if v_1.Op != OpARM64MOVDconst { 30729 break 30730 } 30731 c := v_1.AuxInt 30732 v.reset(OpARM64XORconst) 30733 v.AuxInt = c 30734 v.AddArg(x) 30735 return true 30736 } 30737 // match: (XOR (MOVDconst [c]) x) 30738 // cond: 30739 // result: (XORconst [c] x) 30740 for { 30741 _ = v.Args[1] 30742 v_0 := v.Args[0] 30743 if v_0.Op != OpARM64MOVDconst { 30744 break 30745 } 30746 c := v_0.AuxInt 30747 x := v.Args[1] 30748 v.reset(OpARM64XORconst) 30749 v.AuxInt = c 30750 v.AddArg(x) 30751 return true 30752 } 30753 // match: (XOR x x) 30754 // cond: 30755 // result: (MOVDconst [0]) 30756 for { 30757 _ = v.Args[1] 30758 x := v.Args[0] 30759 if x != v.Args[1] { 30760 break 30761 } 30762 v.reset(OpARM64MOVDconst) 30763 v.AuxInt = 0 30764 return true 30765 } 30766 // match: (XOR x (MVN y)) 30767 // cond: 30768 // result: (EON x y) 30769 for { 30770 _ = v.Args[1] 30771 x := v.Args[0] 30772 v_1 := v.Args[1] 30773 if v_1.Op != OpARM64MVN { 30774 break 30775 } 30776 y := v_1.Args[0] 30777 v.reset(OpARM64EON) 30778 v.AddArg(x) 30779 v.AddArg(y) 30780 return true 30781 } 30782 // match: (XOR (MVN y) x) 30783 // cond: 30784 // result: (EON x y) 30785 for { 30786 _ = v.Args[1] 30787 v_0 := v.Args[0] 30788 if v_0.Op != OpARM64MVN { 30789 break 30790 } 30791 y := v_0.Args[0] 30792 x := v.Args[1] 30793 v.reset(OpARM64EON) 30794 v.AddArg(x) 30795 v.AddArg(y) 30796 return true 30797 } 30798 // match: (XOR x0 x1:(SLLconst [c] y)) 30799 // cond: clobberIfDead(x1) 30800 // result: (XORshiftLL x0 y [c]) 30801 for { 30802 _ = v.Args[1] 30803 x0 := v.Args[0] 30804 x1 := v.Args[1] 30805 if x1.Op != OpARM64SLLconst { 30806 break 30807 } 30808 c := x1.AuxInt 30809 y := x1.Args[0] 30810 if !(clobberIfDead(x1)) { 30811 break 30812 } 30813 v.reset(OpARM64XORshiftLL) 30814 v.AuxInt = c 30815 v.AddArg(x0) 30816 v.AddArg(y) 30817 return true 30818 } 30819 // match: (XOR x1:(SLLconst [c] y) x0) 30820 // cond: clobberIfDead(x1) 30821 // result: (XORshiftLL x0 y [c]) 30822 for { 30823 _ = v.Args[1] 30824 x1 := v.Args[0] 30825 if x1.Op != OpARM64SLLconst { 30826 break 30827 } 30828 c := x1.AuxInt 30829 y := x1.Args[0] 30830 x0 := v.Args[1] 30831 if !(clobberIfDead(x1)) { 30832 break 30833 } 30834 v.reset(OpARM64XORshiftLL) 30835 v.AuxInt = c 30836 v.AddArg(x0) 30837 v.AddArg(y) 30838 return true 30839 } 30840 // match: (XOR x0 x1:(SRLconst [c] y)) 30841 // cond: clobberIfDead(x1) 30842 // result: (XORshiftRL x0 y [c]) 30843 for { 30844 _ = v.Args[1] 30845 x0 := v.Args[0] 30846 x1 := v.Args[1] 30847 if x1.Op != OpARM64SRLconst { 30848 break 30849 } 30850 c := x1.AuxInt 30851 y := x1.Args[0] 30852 if !(clobberIfDead(x1)) { 30853 break 30854 } 30855 v.reset(OpARM64XORshiftRL) 30856 v.AuxInt = c 30857 v.AddArg(x0) 30858 v.AddArg(y) 30859 return true 30860 } 30861 // match: (XOR x1:(SRLconst [c] y) x0) 30862 // cond: clobberIfDead(x1) 30863 // result: (XORshiftRL x0 y [c]) 30864 for { 30865 _ = v.Args[1] 30866 x1 := v.Args[0] 30867 if x1.Op != OpARM64SRLconst { 30868 break 30869 } 30870 c := x1.AuxInt 30871 y := x1.Args[0] 30872 x0 := v.Args[1] 30873 if !(clobberIfDead(x1)) { 30874 break 30875 } 30876 v.reset(OpARM64XORshiftRL) 30877 v.AuxInt = c 30878 v.AddArg(x0) 30879 v.AddArg(y) 30880 return true 30881 } 30882 // match: (XOR x0 x1:(SRAconst [c] y)) 30883 // cond: clobberIfDead(x1) 30884 // result: (XORshiftRA x0 y [c]) 30885 for { 30886 _ = v.Args[1] 30887 x0 := v.Args[0] 30888 x1 := v.Args[1] 30889 if x1.Op != OpARM64SRAconst { 30890 break 30891 } 30892 c := x1.AuxInt 30893 y := x1.Args[0] 30894 if !(clobberIfDead(x1)) { 30895 break 30896 } 30897 v.reset(OpARM64XORshiftRA) 30898 v.AuxInt = c 30899 v.AddArg(x0) 30900 v.AddArg(y) 30901 return true 30902 } 30903 return false 30904 } 30905 func rewriteValueARM64_OpARM64XOR_10(v *Value) bool { 30906 b := v.Block 30907 _ = b 30908 typ := &b.Func.Config.Types 30909 _ = typ 30910 // match: (XOR x1:(SRAconst [c] y) x0) 30911 // cond: clobberIfDead(x1) 30912 // result: (XORshiftRA x0 y [c]) 30913 for { 30914 _ = v.Args[1] 30915 x1 := v.Args[0] 30916 if x1.Op != OpARM64SRAconst { 30917 break 30918 } 30919 c := x1.AuxInt 30920 y := x1.Args[0] 30921 x0 := v.Args[1] 30922 if !(clobberIfDead(x1)) { 30923 break 30924 } 30925 v.reset(OpARM64XORshiftRA) 30926 v.AuxInt = c 30927 v.AddArg(x0) 30928 v.AddArg(y) 30929 return true 30930 } 30931 // match: (XOR (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 30932 // cond: cc.(Op) == OpARM64LessThanU 30933 // result: (ROR x (NEG <t> y)) 30934 for { 30935 _ = v.Args[1] 30936 v_0 := v.Args[0] 30937 if v_0.Op != OpARM64SLL { 30938 break 30939 } 30940 _ = v_0.Args[1] 30941 x := v_0.Args[0] 30942 v_0_1 := v_0.Args[1] 30943 if v_0_1.Op != OpARM64ANDconst { 30944 break 30945 } 30946 t := v_0_1.Type 30947 if v_0_1.AuxInt != 63 { 30948 break 30949 } 30950 y := v_0_1.Args[0] 30951 v_1 := v.Args[1] 30952 if v_1.Op != OpARM64CSEL0 { 30953 break 30954 } 30955 if v_1.Type != typ.UInt64 { 30956 break 30957 } 30958 cc := v_1.Aux 30959 _ = v_1.Args[1] 30960 v_1_0 := v_1.Args[0] 30961 if v_1_0.Op != OpARM64SRL { 30962 break 30963 } 30964 if v_1_0.Type != typ.UInt64 { 30965 break 30966 } 30967 _ = v_1_0.Args[1] 30968 if x != v_1_0.Args[0] { 30969 break 30970 } 30971 v_1_0_1 := v_1_0.Args[1] 30972 if v_1_0_1.Op != OpARM64SUB { 30973 break 30974 } 30975 if v_1_0_1.Type != t { 30976 break 30977 } 30978 _ = v_1_0_1.Args[1] 30979 v_1_0_1_0 := v_1_0_1.Args[0] 30980 if v_1_0_1_0.Op != OpARM64MOVDconst { 30981 break 30982 } 30983 if v_1_0_1_0.AuxInt != 64 { 30984 break 30985 } 30986 v_1_0_1_1 := v_1_0_1.Args[1] 30987 if v_1_0_1_1.Op != OpARM64ANDconst { 30988 break 30989 } 30990 if v_1_0_1_1.Type != t { 30991 break 30992 } 30993 if v_1_0_1_1.AuxInt != 63 { 30994 break 30995 } 30996 if y != v_1_0_1_1.Args[0] { 30997 break 30998 } 30999 v_1_1 := v_1.Args[1] 31000 if v_1_1.Op != OpARM64CMPconst { 31001 break 31002 } 31003 if v_1_1.AuxInt != 64 { 31004 break 31005 } 31006 v_1_1_0 := v_1_1.Args[0] 31007 if v_1_1_0.Op != OpARM64SUB { 31008 break 31009 } 31010 if v_1_1_0.Type != t { 31011 break 31012 } 31013 _ = v_1_1_0.Args[1] 31014 v_1_1_0_0 := v_1_1_0.Args[0] 31015 if v_1_1_0_0.Op != OpARM64MOVDconst { 31016 break 31017 } 31018 if v_1_1_0_0.AuxInt != 64 { 31019 break 31020 } 31021 v_1_1_0_1 := v_1_1_0.Args[1] 31022 if v_1_1_0_1.Op != OpARM64ANDconst { 31023 break 31024 } 31025 if v_1_1_0_1.Type != t { 31026 break 31027 } 31028 if v_1_1_0_1.AuxInt != 63 { 31029 break 31030 } 31031 if y != v_1_1_0_1.Args[0] { 31032 break 31033 } 31034 if !(cc.(Op) == OpARM64LessThanU) { 31035 break 31036 } 31037 v.reset(OpARM64ROR) 31038 v.AddArg(x) 31039 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31040 v0.AddArg(y) 31041 v.AddArg(v0) 31042 return true 31043 } 31044 // match: (XOR (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y))) 31045 // cond: cc.(Op) == OpARM64LessThanU 31046 // result: (ROR x (NEG <t> y)) 31047 for { 31048 _ = v.Args[1] 31049 v_0 := v.Args[0] 31050 if v_0.Op != OpARM64CSEL0 { 31051 break 31052 } 31053 if v_0.Type != typ.UInt64 { 31054 break 31055 } 31056 cc := v_0.Aux 31057 _ = v_0.Args[1] 31058 v_0_0 := v_0.Args[0] 31059 if v_0_0.Op != OpARM64SRL { 31060 break 31061 } 31062 if v_0_0.Type != typ.UInt64 { 31063 break 31064 } 31065 _ = v_0_0.Args[1] 31066 x := v_0_0.Args[0] 31067 v_0_0_1 := v_0_0.Args[1] 31068 if v_0_0_1.Op != OpARM64SUB { 31069 break 31070 } 31071 t := v_0_0_1.Type 31072 _ = v_0_0_1.Args[1] 31073 v_0_0_1_0 := v_0_0_1.Args[0] 31074 if v_0_0_1_0.Op != OpARM64MOVDconst { 31075 break 31076 } 31077 if v_0_0_1_0.AuxInt != 64 { 31078 break 31079 } 31080 v_0_0_1_1 := v_0_0_1.Args[1] 31081 if v_0_0_1_1.Op != OpARM64ANDconst { 31082 break 31083 } 31084 if v_0_0_1_1.Type != t { 31085 break 31086 } 31087 if v_0_0_1_1.AuxInt != 63 { 31088 break 31089 } 31090 y := v_0_0_1_1.Args[0] 31091 v_0_1 := v_0.Args[1] 31092 if v_0_1.Op != OpARM64CMPconst { 31093 break 31094 } 31095 if v_0_1.AuxInt != 64 { 31096 break 31097 } 31098 v_0_1_0 := v_0_1.Args[0] 31099 if v_0_1_0.Op != OpARM64SUB { 31100 break 31101 } 31102 if v_0_1_0.Type != t { 31103 break 31104 } 31105 _ = v_0_1_0.Args[1] 31106 v_0_1_0_0 := v_0_1_0.Args[0] 31107 if v_0_1_0_0.Op != OpARM64MOVDconst { 31108 break 31109 } 31110 if v_0_1_0_0.AuxInt != 64 { 31111 break 31112 } 31113 v_0_1_0_1 := v_0_1_0.Args[1] 31114 if v_0_1_0_1.Op != OpARM64ANDconst { 31115 break 31116 } 31117 if v_0_1_0_1.Type != t { 31118 break 31119 } 31120 if v_0_1_0_1.AuxInt != 63 { 31121 break 31122 } 31123 if y != v_0_1_0_1.Args[0] { 31124 break 31125 } 31126 v_1 := v.Args[1] 31127 if v_1.Op != OpARM64SLL { 31128 break 31129 } 31130 _ = v_1.Args[1] 31131 if x != v_1.Args[0] { 31132 break 31133 } 31134 v_1_1 := v_1.Args[1] 31135 if v_1_1.Op != OpARM64ANDconst { 31136 break 31137 } 31138 if v_1_1.Type != t { 31139 break 31140 } 31141 if v_1_1.AuxInt != 63 { 31142 break 31143 } 31144 if y != v_1_1.Args[0] { 31145 break 31146 } 31147 if !(cc.(Op) == OpARM64LessThanU) { 31148 break 31149 } 31150 v.reset(OpARM64ROR) 31151 v.AddArg(x) 31152 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31153 v0.AddArg(y) 31154 v.AddArg(v0) 31155 return true 31156 } 31157 // match: (XOR (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))))) 31158 // cond: cc.(Op) == OpARM64LessThanU 31159 // result: (ROR x y) 31160 for { 31161 _ = v.Args[1] 31162 v_0 := v.Args[0] 31163 if v_0.Op != OpARM64SRL { 31164 break 31165 } 31166 if v_0.Type != typ.UInt64 { 31167 break 31168 } 31169 _ = v_0.Args[1] 31170 x := v_0.Args[0] 31171 v_0_1 := v_0.Args[1] 31172 if v_0_1.Op != OpARM64ANDconst { 31173 break 31174 } 31175 t := v_0_1.Type 31176 if v_0_1.AuxInt != 63 { 31177 break 31178 } 31179 y := v_0_1.Args[0] 31180 v_1 := v.Args[1] 31181 if v_1.Op != OpARM64CSEL0 { 31182 break 31183 } 31184 if v_1.Type != typ.UInt64 { 31185 break 31186 } 31187 cc := v_1.Aux 31188 _ = v_1.Args[1] 31189 v_1_0 := v_1.Args[0] 31190 if v_1_0.Op != OpARM64SLL { 31191 break 31192 } 31193 _ = v_1_0.Args[1] 31194 if x != v_1_0.Args[0] { 31195 break 31196 } 31197 v_1_0_1 := v_1_0.Args[1] 31198 if v_1_0_1.Op != OpARM64SUB { 31199 break 31200 } 31201 if v_1_0_1.Type != t { 31202 break 31203 } 31204 _ = v_1_0_1.Args[1] 31205 v_1_0_1_0 := v_1_0_1.Args[0] 31206 if v_1_0_1_0.Op != OpARM64MOVDconst { 31207 break 31208 } 31209 if v_1_0_1_0.AuxInt != 64 { 31210 break 31211 } 31212 v_1_0_1_1 := v_1_0_1.Args[1] 31213 if v_1_0_1_1.Op != OpARM64ANDconst { 31214 break 31215 } 31216 if v_1_0_1_1.Type != t { 31217 break 31218 } 31219 if v_1_0_1_1.AuxInt != 63 { 31220 break 31221 } 31222 if y != v_1_0_1_1.Args[0] { 31223 break 31224 } 31225 v_1_1 := v_1.Args[1] 31226 if v_1_1.Op != OpARM64CMPconst { 31227 break 31228 } 31229 if v_1_1.AuxInt != 64 { 31230 break 31231 } 31232 v_1_1_0 := v_1_1.Args[0] 31233 if v_1_1_0.Op != OpARM64SUB { 31234 break 31235 } 31236 if v_1_1_0.Type != t { 31237 break 31238 } 31239 _ = v_1_1_0.Args[1] 31240 v_1_1_0_0 := v_1_1_0.Args[0] 31241 if v_1_1_0_0.Op != OpARM64MOVDconst { 31242 break 31243 } 31244 if v_1_1_0_0.AuxInt != 64 { 31245 break 31246 } 31247 v_1_1_0_1 := v_1_1_0.Args[1] 31248 if v_1_1_0_1.Op != OpARM64ANDconst { 31249 break 31250 } 31251 if v_1_1_0_1.Type != t { 31252 break 31253 } 31254 if v_1_1_0_1.AuxInt != 63 { 31255 break 31256 } 31257 if y != v_1_1_0_1.Args[0] { 31258 break 31259 } 31260 if !(cc.(Op) == OpARM64LessThanU) { 31261 break 31262 } 31263 v.reset(OpARM64ROR) 31264 v.AddArg(x) 31265 v.AddArg(y) 31266 return true 31267 } 31268 // match: (XOR (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y))) 31269 // cond: cc.(Op) == OpARM64LessThanU 31270 // result: (ROR x y) 31271 for { 31272 _ = v.Args[1] 31273 v_0 := v.Args[0] 31274 if v_0.Op != OpARM64CSEL0 { 31275 break 31276 } 31277 if v_0.Type != typ.UInt64 { 31278 break 31279 } 31280 cc := v_0.Aux 31281 _ = v_0.Args[1] 31282 v_0_0 := v_0.Args[0] 31283 if v_0_0.Op != OpARM64SLL { 31284 break 31285 } 31286 _ = v_0_0.Args[1] 31287 x := v_0_0.Args[0] 31288 v_0_0_1 := v_0_0.Args[1] 31289 if v_0_0_1.Op != OpARM64SUB { 31290 break 31291 } 31292 t := v_0_0_1.Type 31293 _ = v_0_0_1.Args[1] 31294 v_0_0_1_0 := v_0_0_1.Args[0] 31295 if v_0_0_1_0.Op != OpARM64MOVDconst { 31296 break 31297 } 31298 if v_0_0_1_0.AuxInt != 64 { 31299 break 31300 } 31301 v_0_0_1_1 := v_0_0_1.Args[1] 31302 if v_0_0_1_1.Op != OpARM64ANDconst { 31303 break 31304 } 31305 if v_0_0_1_1.Type != t { 31306 break 31307 } 31308 if v_0_0_1_1.AuxInt != 63 { 31309 break 31310 } 31311 y := v_0_0_1_1.Args[0] 31312 v_0_1 := v_0.Args[1] 31313 if v_0_1.Op != OpARM64CMPconst { 31314 break 31315 } 31316 if v_0_1.AuxInt != 64 { 31317 break 31318 } 31319 v_0_1_0 := v_0_1.Args[0] 31320 if v_0_1_0.Op != OpARM64SUB { 31321 break 31322 } 31323 if v_0_1_0.Type != t { 31324 break 31325 } 31326 _ = v_0_1_0.Args[1] 31327 v_0_1_0_0 := v_0_1_0.Args[0] 31328 if v_0_1_0_0.Op != OpARM64MOVDconst { 31329 break 31330 } 31331 if v_0_1_0_0.AuxInt != 64 { 31332 break 31333 } 31334 v_0_1_0_1 := v_0_1_0.Args[1] 31335 if v_0_1_0_1.Op != OpARM64ANDconst { 31336 break 31337 } 31338 if v_0_1_0_1.Type != t { 31339 break 31340 } 31341 if v_0_1_0_1.AuxInt != 63 { 31342 break 31343 } 31344 if y != v_0_1_0_1.Args[0] { 31345 break 31346 } 31347 v_1 := v.Args[1] 31348 if v_1.Op != OpARM64SRL { 31349 break 31350 } 31351 if v_1.Type != typ.UInt64 { 31352 break 31353 } 31354 _ = v_1.Args[1] 31355 if x != v_1.Args[0] { 31356 break 31357 } 31358 v_1_1 := v_1.Args[1] 31359 if v_1_1.Op != OpARM64ANDconst { 31360 break 31361 } 31362 if v_1_1.Type != t { 31363 break 31364 } 31365 if v_1_1.AuxInt != 63 { 31366 break 31367 } 31368 if y != v_1_1.Args[0] { 31369 break 31370 } 31371 if !(cc.(Op) == OpARM64LessThanU) { 31372 break 31373 } 31374 v.reset(OpARM64ROR) 31375 v.AddArg(x) 31376 v.AddArg(y) 31377 return true 31378 } 31379 // match: (XOR (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 31380 // cond: cc.(Op) == OpARM64LessThanU 31381 // result: (RORW x (NEG <t> y)) 31382 for { 31383 _ = v.Args[1] 31384 v_0 := v.Args[0] 31385 if v_0.Op != OpARM64SLL { 31386 break 31387 } 31388 _ = v_0.Args[1] 31389 x := v_0.Args[0] 31390 v_0_1 := v_0.Args[1] 31391 if v_0_1.Op != OpARM64ANDconst { 31392 break 31393 } 31394 t := v_0_1.Type 31395 if v_0_1.AuxInt != 31 { 31396 break 31397 } 31398 y := v_0_1.Args[0] 31399 v_1 := v.Args[1] 31400 if v_1.Op != OpARM64CSEL0 { 31401 break 31402 } 31403 if v_1.Type != typ.UInt32 { 31404 break 31405 } 31406 cc := v_1.Aux 31407 _ = v_1.Args[1] 31408 v_1_0 := v_1.Args[0] 31409 if v_1_0.Op != OpARM64SRL { 31410 break 31411 } 31412 if v_1_0.Type != typ.UInt32 { 31413 break 31414 } 31415 _ = v_1_0.Args[1] 31416 v_1_0_0 := v_1_0.Args[0] 31417 if v_1_0_0.Op != OpARM64MOVWUreg { 31418 break 31419 } 31420 if x != v_1_0_0.Args[0] { 31421 break 31422 } 31423 v_1_0_1 := v_1_0.Args[1] 31424 if v_1_0_1.Op != OpARM64SUB { 31425 break 31426 } 31427 if v_1_0_1.Type != t { 31428 break 31429 } 31430 _ = v_1_0_1.Args[1] 31431 v_1_0_1_0 := v_1_0_1.Args[0] 31432 if v_1_0_1_0.Op != OpARM64MOVDconst { 31433 break 31434 } 31435 if v_1_0_1_0.AuxInt != 32 { 31436 break 31437 } 31438 v_1_0_1_1 := v_1_0_1.Args[1] 31439 if v_1_0_1_1.Op != OpARM64ANDconst { 31440 break 31441 } 31442 if v_1_0_1_1.Type != t { 31443 break 31444 } 31445 if v_1_0_1_1.AuxInt != 31 { 31446 break 31447 } 31448 if y != v_1_0_1_1.Args[0] { 31449 break 31450 } 31451 v_1_1 := v_1.Args[1] 31452 if v_1_1.Op != OpARM64CMPconst { 31453 break 31454 } 31455 if v_1_1.AuxInt != 64 { 31456 break 31457 } 31458 v_1_1_0 := v_1_1.Args[0] 31459 if v_1_1_0.Op != OpARM64SUB { 31460 break 31461 } 31462 if v_1_1_0.Type != t { 31463 break 31464 } 31465 _ = v_1_1_0.Args[1] 31466 v_1_1_0_0 := v_1_1_0.Args[0] 31467 if v_1_1_0_0.Op != OpARM64MOVDconst { 31468 break 31469 } 31470 if v_1_1_0_0.AuxInt != 32 { 31471 break 31472 } 31473 v_1_1_0_1 := v_1_1_0.Args[1] 31474 if v_1_1_0_1.Op != OpARM64ANDconst { 31475 break 31476 } 31477 if v_1_1_0_1.Type != t { 31478 break 31479 } 31480 if v_1_1_0_1.AuxInt != 31 { 31481 break 31482 } 31483 if y != v_1_1_0_1.Args[0] { 31484 break 31485 } 31486 if !(cc.(Op) == OpARM64LessThanU) { 31487 break 31488 } 31489 v.reset(OpARM64RORW) 31490 v.AddArg(x) 31491 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31492 v0.AddArg(y) 31493 v.AddArg(v0) 31494 return true 31495 } 31496 // match: (XOR (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y))) 31497 // cond: cc.(Op) == OpARM64LessThanU 31498 // result: (RORW x (NEG <t> y)) 31499 for { 31500 _ = v.Args[1] 31501 v_0 := v.Args[0] 31502 if v_0.Op != OpARM64CSEL0 { 31503 break 31504 } 31505 if v_0.Type != typ.UInt32 { 31506 break 31507 } 31508 cc := v_0.Aux 31509 _ = v_0.Args[1] 31510 v_0_0 := v_0.Args[0] 31511 if v_0_0.Op != OpARM64SRL { 31512 break 31513 } 31514 if v_0_0.Type != typ.UInt32 { 31515 break 31516 } 31517 _ = v_0_0.Args[1] 31518 v_0_0_0 := v_0_0.Args[0] 31519 if v_0_0_0.Op != OpARM64MOVWUreg { 31520 break 31521 } 31522 x := v_0_0_0.Args[0] 31523 v_0_0_1 := v_0_0.Args[1] 31524 if v_0_0_1.Op != OpARM64SUB { 31525 break 31526 } 31527 t := v_0_0_1.Type 31528 _ = v_0_0_1.Args[1] 31529 v_0_0_1_0 := v_0_0_1.Args[0] 31530 if v_0_0_1_0.Op != OpARM64MOVDconst { 31531 break 31532 } 31533 if v_0_0_1_0.AuxInt != 32 { 31534 break 31535 } 31536 v_0_0_1_1 := v_0_0_1.Args[1] 31537 if v_0_0_1_1.Op != OpARM64ANDconst { 31538 break 31539 } 31540 if v_0_0_1_1.Type != t { 31541 break 31542 } 31543 if v_0_0_1_1.AuxInt != 31 { 31544 break 31545 } 31546 y := v_0_0_1_1.Args[0] 31547 v_0_1 := v_0.Args[1] 31548 if v_0_1.Op != OpARM64CMPconst { 31549 break 31550 } 31551 if v_0_1.AuxInt != 64 { 31552 break 31553 } 31554 v_0_1_0 := v_0_1.Args[0] 31555 if v_0_1_0.Op != OpARM64SUB { 31556 break 31557 } 31558 if v_0_1_0.Type != t { 31559 break 31560 } 31561 _ = v_0_1_0.Args[1] 31562 v_0_1_0_0 := v_0_1_0.Args[0] 31563 if v_0_1_0_0.Op != OpARM64MOVDconst { 31564 break 31565 } 31566 if v_0_1_0_0.AuxInt != 32 { 31567 break 31568 } 31569 v_0_1_0_1 := v_0_1_0.Args[1] 31570 if v_0_1_0_1.Op != OpARM64ANDconst { 31571 break 31572 } 31573 if v_0_1_0_1.Type != t { 31574 break 31575 } 31576 if v_0_1_0_1.AuxInt != 31 { 31577 break 31578 } 31579 if y != v_0_1_0_1.Args[0] { 31580 break 31581 } 31582 v_1 := v.Args[1] 31583 if v_1.Op != OpARM64SLL { 31584 break 31585 } 31586 _ = v_1.Args[1] 31587 if x != v_1.Args[0] { 31588 break 31589 } 31590 v_1_1 := v_1.Args[1] 31591 if v_1_1.Op != OpARM64ANDconst { 31592 break 31593 } 31594 if v_1_1.Type != t { 31595 break 31596 } 31597 if v_1_1.AuxInt != 31 { 31598 break 31599 } 31600 if y != v_1_1.Args[0] { 31601 break 31602 } 31603 if !(cc.(Op) == OpARM64LessThanU) { 31604 break 31605 } 31606 v.reset(OpARM64RORW) 31607 v.AddArg(x) 31608 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 31609 v0.AddArg(y) 31610 v.AddArg(v0) 31611 return true 31612 } 31613 // match: (XOR (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))))) 31614 // cond: cc.(Op) == OpARM64LessThanU 31615 // result: (RORW x y) 31616 for { 31617 _ = v.Args[1] 31618 v_0 := v.Args[0] 31619 if v_0.Op != OpARM64SRL { 31620 break 31621 } 31622 if v_0.Type != typ.UInt32 { 31623 break 31624 } 31625 _ = v_0.Args[1] 31626 v_0_0 := v_0.Args[0] 31627 if v_0_0.Op != OpARM64MOVWUreg { 31628 break 31629 } 31630 x := v_0_0.Args[0] 31631 v_0_1 := v_0.Args[1] 31632 if v_0_1.Op != OpARM64ANDconst { 31633 break 31634 } 31635 t := v_0_1.Type 31636 if v_0_1.AuxInt != 31 { 31637 break 31638 } 31639 y := v_0_1.Args[0] 31640 v_1 := v.Args[1] 31641 if v_1.Op != OpARM64CSEL0 { 31642 break 31643 } 31644 if v_1.Type != typ.UInt32 { 31645 break 31646 } 31647 cc := v_1.Aux 31648 _ = v_1.Args[1] 31649 v_1_0 := v_1.Args[0] 31650 if v_1_0.Op != OpARM64SLL { 31651 break 31652 } 31653 _ = v_1_0.Args[1] 31654 if x != v_1_0.Args[0] { 31655 break 31656 } 31657 v_1_0_1 := v_1_0.Args[1] 31658 if v_1_0_1.Op != OpARM64SUB { 31659 break 31660 } 31661 if v_1_0_1.Type != t { 31662 break 31663 } 31664 _ = v_1_0_1.Args[1] 31665 v_1_0_1_0 := v_1_0_1.Args[0] 31666 if v_1_0_1_0.Op != OpARM64MOVDconst { 31667 break 31668 } 31669 if v_1_0_1_0.AuxInt != 32 { 31670 break 31671 } 31672 v_1_0_1_1 := v_1_0_1.Args[1] 31673 if v_1_0_1_1.Op != OpARM64ANDconst { 31674 break 31675 } 31676 if v_1_0_1_1.Type != t { 31677 break 31678 } 31679 if v_1_0_1_1.AuxInt != 31 { 31680 break 31681 } 31682 if y != v_1_0_1_1.Args[0] { 31683 break 31684 } 31685 v_1_1 := v_1.Args[1] 31686 if v_1_1.Op != OpARM64CMPconst { 31687 break 31688 } 31689 if v_1_1.AuxInt != 64 { 31690 break 31691 } 31692 v_1_1_0 := v_1_1.Args[0] 31693 if v_1_1_0.Op != OpARM64SUB { 31694 break 31695 } 31696 if v_1_1_0.Type != t { 31697 break 31698 } 31699 _ = v_1_1_0.Args[1] 31700 v_1_1_0_0 := v_1_1_0.Args[0] 31701 if v_1_1_0_0.Op != OpARM64MOVDconst { 31702 break 31703 } 31704 if v_1_1_0_0.AuxInt != 32 { 31705 break 31706 } 31707 v_1_1_0_1 := v_1_1_0.Args[1] 31708 if v_1_1_0_1.Op != OpARM64ANDconst { 31709 break 31710 } 31711 if v_1_1_0_1.Type != t { 31712 break 31713 } 31714 if v_1_1_0_1.AuxInt != 31 { 31715 break 31716 } 31717 if y != v_1_1_0_1.Args[0] { 31718 break 31719 } 31720 if !(cc.(Op) == OpARM64LessThanU) { 31721 break 31722 } 31723 v.reset(OpARM64RORW) 31724 v.AddArg(x) 31725 v.AddArg(y) 31726 return true 31727 } 31728 // match: (XOR (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y))) 31729 // cond: cc.(Op) == OpARM64LessThanU 31730 // result: (RORW x y) 31731 for { 31732 _ = v.Args[1] 31733 v_0 := v.Args[0] 31734 if v_0.Op != OpARM64CSEL0 { 31735 break 31736 } 31737 if v_0.Type != typ.UInt32 { 31738 break 31739 } 31740 cc := v_0.Aux 31741 _ = v_0.Args[1] 31742 v_0_0 := v_0.Args[0] 31743 if v_0_0.Op != OpARM64SLL { 31744 break 31745 } 31746 _ = v_0_0.Args[1] 31747 x := v_0_0.Args[0] 31748 v_0_0_1 := v_0_0.Args[1] 31749 if v_0_0_1.Op != OpARM64SUB { 31750 break 31751 } 31752 t := v_0_0_1.Type 31753 _ = v_0_0_1.Args[1] 31754 v_0_0_1_0 := v_0_0_1.Args[0] 31755 if v_0_0_1_0.Op != OpARM64MOVDconst { 31756 break 31757 } 31758 if v_0_0_1_0.AuxInt != 32 { 31759 break 31760 } 31761 v_0_0_1_1 := v_0_0_1.Args[1] 31762 if v_0_0_1_1.Op != OpARM64ANDconst { 31763 break 31764 } 31765 if v_0_0_1_1.Type != t { 31766 break 31767 } 31768 if v_0_0_1_1.AuxInt != 31 { 31769 break 31770 } 31771 y := v_0_0_1_1.Args[0] 31772 v_0_1 := v_0.Args[1] 31773 if v_0_1.Op != OpARM64CMPconst { 31774 break 31775 } 31776 if v_0_1.AuxInt != 64 { 31777 break 31778 } 31779 v_0_1_0 := v_0_1.Args[0] 31780 if v_0_1_0.Op != OpARM64SUB { 31781 break 31782 } 31783 if v_0_1_0.Type != t { 31784 break 31785 } 31786 _ = v_0_1_0.Args[1] 31787 v_0_1_0_0 := v_0_1_0.Args[0] 31788 if v_0_1_0_0.Op != OpARM64MOVDconst { 31789 break 31790 } 31791 if v_0_1_0_0.AuxInt != 32 { 31792 break 31793 } 31794 v_0_1_0_1 := v_0_1_0.Args[1] 31795 if v_0_1_0_1.Op != OpARM64ANDconst { 31796 break 31797 } 31798 if v_0_1_0_1.Type != t { 31799 break 31800 } 31801 if v_0_1_0_1.AuxInt != 31 { 31802 break 31803 } 31804 if y != v_0_1_0_1.Args[0] { 31805 break 31806 } 31807 v_1 := v.Args[1] 31808 if v_1.Op != OpARM64SRL { 31809 break 31810 } 31811 if v_1.Type != typ.UInt32 { 31812 break 31813 } 31814 _ = v_1.Args[1] 31815 v_1_0 := v_1.Args[0] 31816 if v_1_0.Op != OpARM64MOVWUreg { 31817 break 31818 } 31819 if x != v_1_0.Args[0] { 31820 break 31821 } 31822 v_1_1 := v_1.Args[1] 31823 if v_1_1.Op != OpARM64ANDconst { 31824 break 31825 } 31826 if v_1_1.Type != t { 31827 break 31828 } 31829 if v_1_1.AuxInt != 31 { 31830 break 31831 } 31832 if y != v_1_1.Args[0] { 31833 break 31834 } 31835 if !(cc.(Op) == OpARM64LessThanU) { 31836 break 31837 } 31838 v.reset(OpARM64RORW) 31839 v.AddArg(x) 31840 v.AddArg(y) 31841 return true 31842 } 31843 return false 31844 } 31845 func rewriteValueARM64_OpARM64XORconst_0(v *Value) bool { 31846 // match: (XORconst [0] x) 31847 // cond: 31848 // result: x 31849 for { 31850 if v.AuxInt != 0 { 31851 break 31852 } 31853 x := v.Args[0] 31854 v.reset(OpCopy) 31855 v.Type = x.Type 31856 v.AddArg(x) 31857 return true 31858 } 31859 // match: (XORconst [-1] x) 31860 // cond: 31861 // result: (MVN x) 31862 for { 31863 if v.AuxInt != -1 { 31864 break 31865 } 31866 x := v.Args[0] 31867 v.reset(OpARM64MVN) 31868 v.AddArg(x) 31869 return true 31870 } 31871 // match: (XORconst [c] (MOVDconst [d])) 31872 // cond: 31873 // result: (MOVDconst [c^d]) 31874 for { 31875 c := v.AuxInt 31876 v_0 := v.Args[0] 31877 if v_0.Op != OpARM64MOVDconst { 31878 break 31879 } 31880 d := v_0.AuxInt 31881 v.reset(OpARM64MOVDconst) 31882 v.AuxInt = c ^ d 31883 return true 31884 } 31885 // match: (XORconst [c] (XORconst [d] x)) 31886 // cond: 31887 // result: (XORconst [c^d] x) 31888 for { 31889 c := v.AuxInt 31890 v_0 := v.Args[0] 31891 if v_0.Op != OpARM64XORconst { 31892 break 31893 } 31894 d := v_0.AuxInt 31895 x := v_0.Args[0] 31896 v.reset(OpARM64XORconst) 31897 v.AuxInt = c ^ d 31898 v.AddArg(x) 31899 return true 31900 } 31901 return false 31902 } 31903 func rewriteValueARM64_OpARM64XORshiftLL_0(v *Value) bool { 31904 b := v.Block 31905 _ = b 31906 // match: (XORshiftLL (MOVDconst [c]) x [d]) 31907 // cond: 31908 // result: (XORconst [c] (SLLconst <x.Type> x [d])) 31909 for { 31910 d := v.AuxInt 31911 _ = v.Args[1] 31912 v_0 := v.Args[0] 31913 if v_0.Op != OpARM64MOVDconst { 31914 break 31915 } 31916 c := v_0.AuxInt 31917 x := v.Args[1] 31918 v.reset(OpARM64XORconst) 31919 v.AuxInt = c 31920 v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type) 31921 v0.AuxInt = d 31922 v0.AddArg(x) 31923 v.AddArg(v0) 31924 return true 31925 } 31926 // match: (XORshiftLL x (MOVDconst [c]) [d]) 31927 // cond: 31928 // result: (XORconst x [int64(uint64(c)<<uint64(d))]) 31929 for { 31930 d := v.AuxInt 31931 _ = v.Args[1] 31932 x := v.Args[0] 31933 v_1 := v.Args[1] 31934 if v_1.Op != OpARM64MOVDconst { 31935 break 31936 } 31937 c := v_1.AuxInt 31938 v.reset(OpARM64XORconst) 31939 v.AuxInt = int64(uint64(c) << uint64(d)) 31940 v.AddArg(x) 31941 return true 31942 } 31943 // match: (XORshiftLL x (SLLconst x [c]) [d]) 31944 // cond: c==d 31945 // result: (MOVDconst [0]) 31946 for { 31947 d := v.AuxInt 31948 _ = v.Args[1] 31949 x := v.Args[0] 31950 v_1 := v.Args[1] 31951 if v_1.Op != OpARM64SLLconst { 31952 break 31953 } 31954 c := v_1.AuxInt 31955 if x != v_1.Args[0] { 31956 break 31957 } 31958 if !(c == d) { 31959 break 31960 } 31961 v.reset(OpARM64MOVDconst) 31962 v.AuxInt = 0 31963 return true 31964 } 31965 // match: (XORshiftLL [c] (SRLconst x [64-c]) x) 31966 // cond: 31967 // result: (RORconst [64-c] x) 31968 for { 31969 c := v.AuxInt 31970 _ = v.Args[1] 31971 v_0 := v.Args[0] 31972 if v_0.Op != OpARM64SRLconst { 31973 break 31974 } 31975 if v_0.AuxInt != 64-c { 31976 break 31977 } 31978 x := v_0.Args[0] 31979 if x != v.Args[1] { 31980 break 31981 } 31982 v.reset(OpARM64RORconst) 31983 v.AuxInt = 64 - c 31984 v.AddArg(x) 31985 return true 31986 } 31987 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x) 31988 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 31989 // result: (RORWconst [32-c] x) 31990 for { 31991 t := v.Type 31992 c := v.AuxInt 31993 _ = v.Args[1] 31994 v_0 := v.Args[0] 31995 if v_0.Op != OpARM64UBFX { 31996 break 31997 } 31998 bfc := v_0.AuxInt 31999 x := v_0.Args[0] 32000 if x != v.Args[1] { 32001 break 32002 } 32003 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 32004 break 32005 } 32006 v.reset(OpARM64RORWconst) 32007 v.AuxInt = 32 - c 32008 v.AddArg(x) 32009 return true 32010 } 32011 // match: (XORshiftLL [c] (SRLconst x [64-c]) x2) 32012 // cond: 32013 // result: (EXTRconst [64-c] x2 x) 32014 for { 32015 c := v.AuxInt 32016 _ = v.Args[1] 32017 v_0 := v.Args[0] 32018 if v_0.Op != OpARM64SRLconst { 32019 break 32020 } 32021 if v_0.AuxInt != 64-c { 32022 break 32023 } 32024 x := v_0.Args[0] 32025 x2 := v.Args[1] 32026 v.reset(OpARM64EXTRconst) 32027 v.AuxInt = 64 - c 32028 v.AddArg(x2) 32029 v.AddArg(x) 32030 return true 32031 } 32032 // match: (XORshiftLL <t> [c] (UBFX [bfc] x) x2) 32033 // cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c) 32034 // result: (EXTRWconst [32-c] x2 x) 32035 for { 32036 t := v.Type 32037 c := v.AuxInt 32038 _ = v.Args[1] 32039 v_0 := v.Args[0] 32040 if v_0.Op != OpARM64UBFX { 32041 break 32042 } 32043 bfc := v_0.AuxInt 32044 x := v_0.Args[0] 32045 x2 := v.Args[1] 32046 if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) { 32047 break 32048 } 32049 v.reset(OpARM64EXTRWconst) 32050 v.AuxInt = 32 - c 32051 v.AddArg(x2) 32052 v.AddArg(x) 32053 return true 32054 } 32055 return false 32056 } 32057 func rewriteValueARM64_OpARM64XORshiftRA_0(v *Value) bool { 32058 b := v.Block 32059 _ = b 32060 // match: (XORshiftRA (MOVDconst [c]) x [d]) 32061 // cond: 32062 // result: (XORconst [c] (SRAconst <x.Type> x [d])) 32063 for { 32064 d := v.AuxInt 32065 _ = v.Args[1] 32066 v_0 := v.Args[0] 32067 if v_0.Op != OpARM64MOVDconst { 32068 break 32069 } 32070 c := v_0.AuxInt 32071 x := v.Args[1] 32072 v.reset(OpARM64XORconst) 32073 v.AuxInt = c 32074 v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type) 32075 v0.AuxInt = d 32076 v0.AddArg(x) 32077 v.AddArg(v0) 32078 return true 32079 } 32080 // match: (XORshiftRA x (MOVDconst [c]) [d]) 32081 // cond: 32082 // result: (XORconst x [c>>uint64(d)]) 32083 for { 32084 d := v.AuxInt 32085 _ = v.Args[1] 32086 x := v.Args[0] 32087 v_1 := v.Args[1] 32088 if v_1.Op != OpARM64MOVDconst { 32089 break 32090 } 32091 c := v_1.AuxInt 32092 v.reset(OpARM64XORconst) 32093 v.AuxInt = c >> uint64(d) 32094 v.AddArg(x) 32095 return true 32096 } 32097 // match: (XORshiftRA x (SRAconst x [c]) [d]) 32098 // cond: c==d 32099 // result: (MOVDconst [0]) 32100 for { 32101 d := v.AuxInt 32102 _ = v.Args[1] 32103 x := v.Args[0] 32104 v_1 := v.Args[1] 32105 if v_1.Op != OpARM64SRAconst { 32106 break 32107 } 32108 c := v_1.AuxInt 32109 if x != v_1.Args[0] { 32110 break 32111 } 32112 if !(c == d) { 32113 break 32114 } 32115 v.reset(OpARM64MOVDconst) 32116 v.AuxInt = 0 32117 return true 32118 } 32119 return false 32120 } 32121 func rewriteValueARM64_OpARM64XORshiftRL_0(v *Value) bool { 32122 b := v.Block 32123 _ = b 32124 // match: (XORshiftRL (MOVDconst [c]) x [d]) 32125 // cond: 32126 // result: (XORconst [c] (SRLconst <x.Type> x [d])) 32127 for { 32128 d := v.AuxInt 32129 _ = v.Args[1] 32130 v_0 := v.Args[0] 32131 if v_0.Op != OpARM64MOVDconst { 32132 break 32133 } 32134 c := v_0.AuxInt 32135 x := v.Args[1] 32136 v.reset(OpARM64XORconst) 32137 v.AuxInt = c 32138 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type) 32139 v0.AuxInt = d 32140 v0.AddArg(x) 32141 v.AddArg(v0) 32142 return true 32143 } 32144 // match: (XORshiftRL x (MOVDconst [c]) [d]) 32145 // cond: 32146 // result: (XORconst x [int64(uint64(c)>>uint64(d))]) 32147 for { 32148 d := v.AuxInt 32149 _ = v.Args[1] 32150 x := v.Args[0] 32151 v_1 := v.Args[1] 32152 if v_1.Op != OpARM64MOVDconst { 32153 break 32154 } 32155 c := v_1.AuxInt 32156 v.reset(OpARM64XORconst) 32157 v.AuxInt = int64(uint64(c) >> uint64(d)) 32158 v.AddArg(x) 32159 return true 32160 } 32161 // match: (XORshiftRL x (SRLconst x [c]) [d]) 32162 // cond: c==d 32163 // result: (MOVDconst [0]) 32164 for { 32165 d := v.AuxInt 32166 _ = v.Args[1] 32167 x := v.Args[0] 32168 v_1 := v.Args[1] 32169 if v_1.Op != OpARM64SRLconst { 32170 break 32171 } 32172 c := v_1.AuxInt 32173 if x != v_1.Args[0] { 32174 break 32175 } 32176 if !(c == d) { 32177 break 32178 } 32179 v.reset(OpARM64MOVDconst) 32180 v.AuxInt = 0 32181 return true 32182 } 32183 // match: (XORshiftRL [c] (SLLconst x [64-c]) x) 32184 // cond: 32185 // result: (RORconst [ c] x) 32186 for { 32187 c := v.AuxInt 32188 _ = v.Args[1] 32189 v_0 := v.Args[0] 32190 if v_0.Op != OpARM64SLLconst { 32191 break 32192 } 32193 if v_0.AuxInt != 64-c { 32194 break 32195 } 32196 x := v_0.Args[0] 32197 if x != v.Args[1] { 32198 break 32199 } 32200 v.reset(OpARM64RORconst) 32201 v.AuxInt = c 32202 v.AddArg(x) 32203 return true 32204 } 32205 // match: (XORshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x)) 32206 // cond: c < 32 && t.Size() == 4 32207 // result: (RORWconst [c] x) 32208 for { 32209 t := v.Type 32210 c := v.AuxInt 32211 _ = v.Args[1] 32212 v_0 := v.Args[0] 32213 if v_0.Op != OpARM64SLLconst { 32214 break 32215 } 32216 if v_0.AuxInt != 32-c { 32217 break 32218 } 32219 x := v_0.Args[0] 32220 v_1 := v.Args[1] 32221 if v_1.Op != OpARM64MOVWUreg { 32222 break 32223 } 32224 if x != v_1.Args[0] { 32225 break 32226 } 32227 if !(c < 32 && t.Size() == 4) { 32228 break 32229 } 32230 v.reset(OpARM64RORWconst) 32231 v.AuxInt = c 32232 v.AddArg(x) 32233 return true 32234 } 32235 return false 32236 } 32237 func rewriteValueARM64_OpAbs_0(v *Value) bool { 32238 // match: (Abs x) 32239 // cond: 32240 // result: (FABSD x) 32241 for { 32242 x := v.Args[0] 32243 v.reset(OpARM64FABSD) 32244 v.AddArg(x) 32245 return true 32246 } 32247 } 32248 func rewriteValueARM64_OpAdd16_0(v *Value) bool { 32249 // match: (Add16 x y) 32250 // cond: 32251 // result: (ADD x y) 32252 for { 32253 _ = v.Args[1] 32254 x := v.Args[0] 32255 y := v.Args[1] 32256 v.reset(OpARM64ADD) 32257 v.AddArg(x) 32258 v.AddArg(y) 32259 return true 32260 } 32261 } 32262 func rewriteValueARM64_OpAdd32_0(v *Value) bool { 32263 // match: (Add32 x y) 32264 // cond: 32265 // result: (ADD x y) 32266 for { 32267 _ = v.Args[1] 32268 x := v.Args[0] 32269 y := v.Args[1] 32270 v.reset(OpARM64ADD) 32271 v.AddArg(x) 32272 v.AddArg(y) 32273 return true 32274 } 32275 } 32276 func rewriteValueARM64_OpAdd32F_0(v *Value) bool { 32277 // match: (Add32F x y) 32278 // cond: 32279 // result: (FADDS x y) 32280 for { 32281 _ = v.Args[1] 32282 x := v.Args[0] 32283 y := v.Args[1] 32284 v.reset(OpARM64FADDS) 32285 v.AddArg(x) 32286 v.AddArg(y) 32287 return true 32288 } 32289 } 32290 func rewriteValueARM64_OpAdd64_0(v *Value) bool { 32291 // match: (Add64 x y) 32292 // cond: 32293 // result: (ADD x y) 32294 for { 32295 _ = v.Args[1] 32296 x := v.Args[0] 32297 y := v.Args[1] 32298 v.reset(OpARM64ADD) 32299 v.AddArg(x) 32300 v.AddArg(y) 32301 return true 32302 } 32303 } 32304 func rewriteValueARM64_OpAdd64F_0(v *Value) bool { 32305 // match: (Add64F x y) 32306 // cond: 32307 // result: (FADDD x y) 32308 for { 32309 _ = v.Args[1] 32310 x := v.Args[0] 32311 y := v.Args[1] 32312 v.reset(OpARM64FADDD) 32313 v.AddArg(x) 32314 v.AddArg(y) 32315 return true 32316 } 32317 } 32318 func rewriteValueARM64_OpAdd8_0(v *Value) bool { 32319 // match: (Add8 x y) 32320 // cond: 32321 // result: (ADD x y) 32322 for { 32323 _ = v.Args[1] 32324 x := v.Args[0] 32325 y := v.Args[1] 32326 v.reset(OpARM64ADD) 32327 v.AddArg(x) 32328 v.AddArg(y) 32329 return true 32330 } 32331 } 32332 func rewriteValueARM64_OpAddPtr_0(v *Value) bool { 32333 // match: (AddPtr x y) 32334 // cond: 32335 // result: (ADD x y) 32336 for { 32337 _ = v.Args[1] 32338 x := v.Args[0] 32339 y := v.Args[1] 32340 v.reset(OpARM64ADD) 32341 v.AddArg(x) 32342 v.AddArg(y) 32343 return true 32344 } 32345 } 32346 func rewriteValueARM64_OpAddr_0(v *Value) bool { 32347 // match: (Addr {sym} base) 32348 // cond: 32349 // result: (MOVDaddr {sym} base) 32350 for { 32351 sym := v.Aux 32352 base := v.Args[0] 32353 v.reset(OpARM64MOVDaddr) 32354 v.Aux = sym 32355 v.AddArg(base) 32356 return true 32357 } 32358 } 32359 func rewriteValueARM64_OpAnd16_0(v *Value) bool { 32360 // match: (And16 x y) 32361 // cond: 32362 // result: (AND x y) 32363 for { 32364 _ = v.Args[1] 32365 x := v.Args[0] 32366 y := v.Args[1] 32367 v.reset(OpARM64AND) 32368 v.AddArg(x) 32369 v.AddArg(y) 32370 return true 32371 } 32372 } 32373 func rewriteValueARM64_OpAnd32_0(v *Value) bool { 32374 // match: (And32 x y) 32375 // cond: 32376 // result: (AND x y) 32377 for { 32378 _ = v.Args[1] 32379 x := v.Args[0] 32380 y := v.Args[1] 32381 v.reset(OpARM64AND) 32382 v.AddArg(x) 32383 v.AddArg(y) 32384 return true 32385 } 32386 } 32387 func rewriteValueARM64_OpAnd64_0(v *Value) bool { 32388 // match: (And64 x y) 32389 // cond: 32390 // result: (AND x y) 32391 for { 32392 _ = v.Args[1] 32393 x := v.Args[0] 32394 y := v.Args[1] 32395 v.reset(OpARM64AND) 32396 v.AddArg(x) 32397 v.AddArg(y) 32398 return true 32399 } 32400 } 32401 func rewriteValueARM64_OpAnd8_0(v *Value) bool { 32402 // match: (And8 x y) 32403 // cond: 32404 // result: (AND x y) 32405 for { 32406 _ = v.Args[1] 32407 x := v.Args[0] 32408 y := v.Args[1] 32409 v.reset(OpARM64AND) 32410 v.AddArg(x) 32411 v.AddArg(y) 32412 return true 32413 } 32414 } 32415 func rewriteValueARM64_OpAndB_0(v *Value) bool { 32416 // match: (AndB x y) 32417 // cond: 32418 // result: (AND x y) 32419 for { 32420 _ = v.Args[1] 32421 x := v.Args[0] 32422 y := v.Args[1] 32423 v.reset(OpARM64AND) 32424 v.AddArg(x) 32425 v.AddArg(y) 32426 return true 32427 } 32428 } 32429 func rewriteValueARM64_OpAtomicAdd32_0(v *Value) bool { 32430 // match: (AtomicAdd32 ptr val mem) 32431 // cond: 32432 // result: (LoweredAtomicAdd32 ptr val mem) 32433 for { 32434 _ = v.Args[2] 32435 ptr := v.Args[0] 32436 val := v.Args[1] 32437 mem := v.Args[2] 32438 v.reset(OpARM64LoweredAtomicAdd32) 32439 v.AddArg(ptr) 32440 v.AddArg(val) 32441 v.AddArg(mem) 32442 return true 32443 } 32444 } 32445 func rewriteValueARM64_OpAtomicAdd32Variant_0(v *Value) bool { 32446 // match: (AtomicAdd32Variant ptr val mem) 32447 // cond: 32448 // result: (LoweredAtomicAdd32Variant ptr val mem) 32449 for { 32450 _ = v.Args[2] 32451 ptr := v.Args[0] 32452 val := v.Args[1] 32453 mem := v.Args[2] 32454 v.reset(OpARM64LoweredAtomicAdd32Variant) 32455 v.AddArg(ptr) 32456 v.AddArg(val) 32457 v.AddArg(mem) 32458 return true 32459 } 32460 } 32461 func rewriteValueARM64_OpAtomicAdd64_0(v *Value) bool { 32462 // match: (AtomicAdd64 ptr val mem) 32463 // cond: 32464 // result: (LoweredAtomicAdd64 ptr val mem) 32465 for { 32466 _ = v.Args[2] 32467 ptr := v.Args[0] 32468 val := v.Args[1] 32469 mem := v.Args[2] 32470 v.reset(OpARM64LoweredAtomicAdd64) 32471 v.AddArg(ptr) 32472 v.AddArg(val) 32473 v.AddArg(mem) 32474 return true 32475 } 32476 } 32477 func rewriteValueARM64_OpAtomicAdd64Variant_0(v *Value) bool { 32478 // match: (AtomicAdd64Variant ptr val mem) 32479 // cond: 32480 // result: (LoweredAtomicAdd64Variant ptr val mem) 32481 for { 32482 _ = v.Args[2] 32483 ptr := v.Args[0] 32484 val := v.Args[1] 32485 mem := v.Args[2] 32486 v.reset(OpARM64LoweredAtomicAdd64Variant) 32487 v.AddArg(ptr) 32488 v.AddArg(val) 32489 v.AddArg(mem) 32490 return true 32491 } 32492 } 32493 func rewriteValueARM64_OpAtomicAnd8_0(v *Value) bool { 32494 b := v.Block 32495 _ = b 32496 typ := &b.Func.Config.Types 32497 _ = typ 32498 // match: (AtomicAnd8 ptr val mem) 32499 // cond: 32500 // result: (Select1 (LoweredAtomicAnd8 ptr val mem)) 32501 for { 32502 _ = v.Args[2] 32503 ptr := v.Args[0] 32504 val := v.Args[1] 32505 mem := v.Args[2] 32506 v.reset(OpSelect1) 32507 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicAnd8, types.NewTuple(typ.UInt8, types.TypeMem)) 32508 v0.AddArg(ptr) 32509 v0.AddArg(val) 32510 v0.AddArg(mem) 32511 v.AddArg(v0) 32512 return true 32513 } 32514 } 32515 func rewriteValueARM64_OpAtomicCompareAndSwap32_0(v *Value) bool { 32516 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 32517 // cond: 32518 // result: (LoweredAtomicCas32 ptr old new_ mem) 32519 for { 32520 _ = v.Args[3] 32521 ptr := v.Args[0] 32522 old := v.Args[1] 32523 new_ := v.Args[2] 32524 mem := v.Args[3] 32525 v.reset(OpARM64LoweredAtomicCas32) 32526 v.AddArg(ptr) 32527 v.AddArg(old) 32528 v.AddArg(new_) 32529 v.AddArg(mem) 32530 return true 32531 } 32532 } 32533 func rewriteValueARM64_OpAtomicCompareAndSwap64_0(v *Value) bool { 32534 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 32535 // cond: 32536 // result: (LoweredAtomicCas64 ptr old new_ mem) 32537 for { 32538 _ = v.Args[3] 32539 ptr := v.Args[0] 32540 old := v.Args[1] 32541 new_ := v.Args[2] 32542 mem := v.Args[3] 32543 v.reset(OpARM64LoweredAtomicCas64) 32544 v.AddArg(ptr) 32545 v.AddArg(old) 32546 v.AddArg(new_) 32547 v.AddArg(mem) 32548 return true 32549 } 32550 } 32551 func rewriteValueARM64_OpAtomicExchange32_0(v *Value) bool { 32552 // match: (AtomicExchange32 ptr val mem) 32553 // cond: 32554 // result: (LoweredAtomicExchange32 ptr val mem) 32555 for { 32556 _ = v.Args[2] 32557 ptr := v.Args[0] 32558 val := v.Args[1] 32559 mem := v.Args[2] 32560 v.reset(OpARM64LoweredAtomicExchange32) 32561 v.AddArg(ptr) 32562 v.AddArg(val) 32563 v.AddArg(mem) 32564 return true 32565 } 32566 } 32567 func rewriteValueARM64_OpAtomicExchange64_0(v *Value) bool { 32568 // match: (AtomicExchange64 ptr val mem) 32569 // cond: 32570 // result: (LoweredAtomicExchange64 ptr val mem) 32571 for { 32572 _ = v.Args[2] 32573 ptr := v.Args[0] 32574 val := v.Args[1] 32575 mem := v.Args[2] 32576 v.reset(OpARM64LoweredAtomicExchange64) 32577 v.AddArg(ptr) 32578 v.AddArg(val) 32579 v.AddArg(mem) 32580 return true 32581 } 32582 } 32583 func rewriteValueARM64_OpAtomicLoad32_0(v *Value) bool { 32584 // match: (AtomicLoad32 ptr mem) 32585 // cond: 32586 // result: (LDARW ptr mem) 32587 for { 32588 _ = v.Args[1] 32589 ptr := v.Args[0] 32590 mem := v.Args[1] 32591 v.reset(OpARM64LDARW) 32592 v.AddArg(ptr) 32593 v.AddArg(mem) 32594 return true 32595 } 32596 } 32597 func rewriteValueARM64_OpAtomicLoad64_0(v *Value) bool { 32598 // match: (AtomicLoad64 ptr mem) 32599 // cond: 32600 // result: (LDAR ptr mem) 32601 for { 32602 _ = v.Args[1] 32603 ptr := v.Args[0] 32604 mem := v.Args[1] 32605 v.reset(OpARM64LDAR) 32606 v.AddArg(ptr) 32607 v.AddArg(mem) 32608 return true 32609 } 32610 } 32611 func rewriteValueARM64_OpAtomicLoadPtr_0(v *Value) bool { 32612 // match: (AtomicLoadPtr ptr mem) 32613 // cond: 32614 // result: (LDAR ptr mem) 32615 for { 32616 _ = v.Args[1] 32617 ptr := v.Args[0] 32618 mem := v.Args[1] 32619 v.reset(OpARM64LDAR) 32620 v.AddArg(ptr) 32621 v.AddArg(mem) 32622 return true 32623 } 32624 } 32625 func rewriteValueARM64_OpAtomicOr8_0(v *Value) bool { 32626 b := v.Block 32627 _ = b 32628 typ := &b.Func.Config.Types 32629 _ = typ 32630 // match: (AtomicOr8 ptr val mem) 32631 // cond: 32632 // result: (Select1 (LoweredAtomicOr8 ptr val mem)) 32633 for { 32634 _ = v.Args[2] 32635 ptr := v.Args[0] 32636 val := v.Args[1] 32637 mem := v.Args[2] 32638 v.reset(OpSelect1) 32639 v0 := b.NewValue0(v.Pos, OpARM64LoweredAtomicOr8, types.NewTuple(typ.UInt8, types.TypeMem)) 32640 v0.AddArg(ptr) 32641 v0.AddArg(val) 32642 v0.AddArg(mem) 32643 v.AddArg(v0) 32644 return true 32645 } 32646 } 32647 func rewriteValueARM64_OpAtomicStore32_0(v *Value) bool { 32648 // match: (AtomicStore32 ptr val mem) 32649 // cond: 32650 // result: (STLRW ptr val mem) 32651 for { 32652 _ = v.Args[2] 32653 ptr := v.Args[0] 32654 val := v.Args[1] 32655 mem := v.Args[2] 32656 v.reset(OpARM64STLRW) 32657 v.AddArg(ptr) 32658 v.AddArg(val) 32659 v.AddArg(mem) 32660 return true 32661 } 32662 } 32663 func rewriteValueARM64_OpAtomicStore64_0(v *Value) bool { 32664 // match: (AtomicStore64 ptr val mem) 32665 // cond: 32666 // result: (STLR ptr val mem) 32667 for { 32668 _ = v.Args[2] 32669 ptr := v.Args[0] 32670 val := v.Args[1] 32671 mem := v.Args[2] 32672 v.reset(OpARM64STLR) 32673 v.AddArg(ptr) 32674 v.AddArg(val) 32675 v.AddArg(mem) 32676 return true 32677 } 32678 } 32679 func rewriteValueARM64_OpAtomicStorePtrNoWB_0(v *Value) bool { 32680 // match: (AtomicStorePtrNoWB ptr val mem) 32681 // cond: 32682 // result: (STLR ptr val mem) 32683 for { 32684 _ = v.Args[2] 32685 ptr := v.Args[0] 32686 val := v.Args[1] 32687 mem := v.Args[2] 32688 v.reset(OpARM64STLR) 32689 v.AddArg(ptr) 32690 v.AddArg(val) 32691 v.AddArg(mem) 32692 return true 32693 } 32694 } 32695 func rewriteValueARM64_OpAvg64u_0(v *Value) bool { 32696 b := v.Block 32697 _ = b 32698 // match: (Avg64u <t> x y) 32699 // cond: 32700 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 32701 for { 32702 t := v.Type 32703 _ = v.Args[1] 32704 x := v.Args[0] 32705 y := v.Args[1] 32706 v.reset(OpARM64ADD) 32707 v0 := b.NewValue0(v.Pos, OpARM64SRLconst, t) 32708 v0.AuxInt = 1 32709 v1 := b.NewValue0(v.Pos, OpARM64SUB, t) 32710 v1.AddArg(x) 32711 v1.AddArg(y) 32712 v0.AddArg(v1) 32713 v.AddArg(v0) 32714 v.AddArg(y) 32715 return true 32716 } 32717 } 32718 func rewriteValueARM64_OpBitLen64_0(v *Value) bool { 32719 b := v.Block 32720 _ = b 32721 typ := &b.Func.Config.Types 32722 _ = typ 32723 // match: (BitLen64 x) 32724 // cond: 32725 // result: (SUB (MOVDconst [64]) (CLZ <typ.Int> x)) 32726 for { 32727 x := v.Args[0] 32728 v.reset(OpARM64SUB) 32729 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 32730 v0.AuxInt = 64 32731 v.AddArg(v0) 32732 v1 := b.NewValue0(v.Pos, OpARM64CLZ, typ.Int) 32733 v1.AddArg(x) 32734 v.AddArg(v1) 32735 return true 32736 } 32737 } 32738 func rewriteValueARM64_OpBitRev16_0(v *Value) bool { 32739 b := v.Block 32740 _ = b 32741 typ := &b.Func.Config.Types 32742 _ = typ 32743 // match: (BitRev16 x) 32744 // cond: 32745 // result: (SRLconst [48] (RBIT <typ.UInt64> x)) 32746 for { 32747 x := v.Args[0] 32748 v.reset(OpARM64SRLconst) 32749 v.AuxInt = 48 32750 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 32751 v0.AddArg(x) 32752 v.AddArg(v0) 32753 return true 32754 } 32755 } 32756 func rewriteValueARM64_OpBitRev32_0(v *Value) bool { 32757 // match: (BitRev32 x) 32758 // cond: 32759 // result: (RBITW x) 32760 for { 32761 x := v.Args[0] 32762 v.reset(OpARM64RBITW) 32763 v.AddArg(x) 32764 return true 32765 } 32766 } 32767 func rewriteValueARM64_OpBitRev64_0(v *Value) bool { 32768 // match: (BitRev64 x) 32769 // cond: 32770 // result: (RBIT x) 32771 for { 32772 x := v.Args[0] 32773 v.reset(OpARM64RBIT) 32774 v.AddArg(x) 32775 return true 32776 } 32777 } 32778 func rewriteValueARM64_OpBitRev8_0(v *Value) bool { 32779 b := v.Block 32780 _ = b 32781 typ := &b.Func.Config.Types 32782 _ = typ 32783 // match: (BitRev8 x) 32784 // cond: 32785 // result: (SRLconst [56] (RBIT <typ.UInt64> x)) 32786 for { 32787 x := v.Args[0] 32788 v.reset(OpARM64SRLconst) 32789 v.AuxInt = 56 32790 v0 := b.NewValue0(v.Pos, OpARM64RBIT, typ.UInt64) 32791 v0.AddArg(x) 32792 v.AddArg(v0) 32793 return true 32794 } 32795 } 32796 func rewriteValueARM64_OpBswap32_0(v *Value) bool { 32797 // match: (Bswap32 x) 32798 // cond: 32799 // result: (REVW x) 32800 for { 32801 x := v.Args[0] 32802 v.reset(OpARM64REVW) 32803 v.AddArg(x) 32804 return true 32805 } 32806 } 32807 func rewriteValueARM64_OpBswap64_0(v *Value) bool { 32808 // match: (Bswap64 x) 32809 // cond: 32810 // result: (REV x) 32811 for { 32812 x := v.Args[0] 32813 v.reset(OpARM64REV) 32814 v.AddArg(x) 32815 return true 32816 } 32817 } 32818 func rewriteValueARM64_OpCeil_0(v *Value) bool { 32819 // match: (Ceil x) 32820 // cond: 32821 // result: (FRINTPD x) 32822 for { 32823 x := v.Args[0] 32824 v.reset(OpARM64FRINTPD) 32825 v.AddArg(x) 32826 return true 32827 } 32828 } 32829 func rewriteValueARM64_OpClosureCall_0(v *Value) bool { 32830 // match: (ClosureCall [argwid] entry closure mem) 32831 // cond: 32832 // result: (CALLclosure [argwid] entry closure mem) 32833 for { 32834 argwid := v.AuxInt 32835 _ = v.Args[2] 32836 entry := v.Args[0] 32837 closure := v.Args[1] 32838 mem := v.Args[2] 32839 v.reset(OpARM64CALLclosure) 32840 v.AuxInt = argwid 32841 v.AddArg(entry) 32842 v.AddArg(closure) 32843 v.AddArg(mem) 32844 return true 32845 } 32846 } 32847 func rewriteValueARM64_OpCom16_0(v *Value) bool { 32848 // match: (Com16 x) 32849 // cond: 32850 // result: (MVN x) 32851 for { 32852 x := v.Args[0] 32853 v.reset(OpARM64MVN) 32854 v.AddArg(x) 32855 return true 32856 } 32857 } 32858 func rewriteValueARM64_OpCom32_0(v *Value) bool { 32859 // match: (Com32 x) 32860 // cond: 32861 // result: (MVN x) 32862 for { 32863 x := v.Args[0] 32864 v.reset(OpARM64MVN) 32865 v.AddArg(x) 32866 return true 32867 } 32868 } 32869 func rewriteValueARM64_OpCom64_0(v *Value) bool { 32870 // match: (Com64 x) 32871 // cond: 32872 // result: (MVN x) 32873 for { 32874 x := v.Args[0] 32875 v.reset(OpARM64MVN) 32876 v.AddArg(x) 32877 return true 32878 } 32879 } 32880 func rewriteValueARM64_OpCom8_0(v *Value) bool { 32881 // match: (Com8 x) 32882 // cond: 32883 // result: (MVN x) 32884 for { 32885 x := v.Args[0] 32886 v.reset(OpARM64MVN) 32887 v.AddArg(x) 32888 return true 32889 } 32890 } 32891 func rewriteValueARM64_OpCondSelect_0(v *Value) bool { 32892 b := v.Block 32893 _ = b 32894 // match: (CondSelect x y bool) 32895 // cond: flagArg(bool) != nil 32896 // result: (CSEL {bool.Op} x y flagArg(bool)) 32897 for { 32898 _ = v.Args[2] 32899 x := v.Args[0] 32900 y := v.Args[1] 32901 bool := v.Args[2] 32902 if !(flagArg(bool) != nil) { 32903 break 32904 } 32905 v.reset(OpARM64CSEL) 32906 v.Aux = bool.Op 32907 v.AddArg(x) 32908 v.AddArg(y) 32909 v.AddArg(flagArg(bool)) 32910 return true 32911 } 32912 // match: (CondSelect x y bool) 32913 // cond: flagArg(bool) == nil 32914 // result: (CSEL {OpARM64NotEqual} x y (CMPWconst [0] bool)) 32915 for { 32916 _ = v.Args[2] 32917 x := v.Args[0] 32918 y := v.Args[1] 32919 bool := v.Args[2] 32920 if !(flagArg(bool) == nil) { 32921 break 32922 } 32923 v.reset(OpARM64CSEL) 32924 v.Aux = OpARM64NotEqual 32925 v.AddArg(x) 32926 v.AddArg(y) 32927 v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags) 32928 v0.AuxInt = 0 32929 v0.AddArg(bool) 32930 v.AddArg(v0) 32931 return true 32932 } 32933 return false 32934 } 32935 func rewriteValueARM64_OpConst16_0(v *Value) bool { 32936 // match: (Const16 [val]) 32937 // cond: 32938 // result: (MOVDconst [val]) 32939 for { 32940 val := v.AuxInt 32941 v.reset(OpARM64MOVDconst) 32942 v.AuxInt = val 32943 return true 32944 } 32945 } 32946 func rewriteValueARM64_OpConst32_0(v *Value) bool { 32947 // match: (Const32 [val]) 32948 // cond: 32949 // result: (MOVDconst [val]) 32950 for { 32951 val := v.AuxInt 32952 v.reset(OpARM64MOVDconst) 32953 v.AuxInt = val 32954 return true 32955 } 32956 } 32957 func rewriteValueARM64_OpConst32F_0(v *Value) bool { 32958 // match: (Const32F [val]) 32959 // cond: 32960 // result: (FMOVSconst [val]) 32961 for { 32962 val := v.AuxInt 32963 v.reset(OpARM64FMOVSconst) 32964 v.AuxInt = val 32965 return true 32966 } 32967 } 32968 func rewriteValueARM64_OpConst64_0(v *Value) bool { 32969 // match: (Const64 [val]) 32970 // cond: 32971 // result: (MOVDconst [val]) 32972 for { 32973 val := v.AuxInt 32974 v.reset(OpARM64MOVDconst) 32975 v.AuxInt = val 32976 return true 32977 } 32978 } 32979 func rewriteValueARM64_OpConst64F_0(v *Value) bool { 32980 // match: (Const64F [val]) 32981 // cond: 32982 // result: (FMOVDconst [val]) 32983 for { 32984 val := v.AuxInt 32985 v.reset(OpARM64FMOVDconst) 32986 v.AuxInt = val 32987 return true 32988 } 32989 } 32990 func rewriteValueARM64_OpConst8_0(v *Value) bool { 32991 // match: (Const8 [val]) 32992 // cond: 32993 // result: (MOVDconst [val]) 32994 for { 32995 val := v.AuxInt 32996 v.reset(OpARM64MOVDconst) 32997 v.AuxInt = val 32998 return true 32999 } 33000 } 33001 func rewriteValueARM64_OpConstBool_0(v *Value) bool { 33002 // match: (ConstBool [b]) 33003 // cond: 33004 // result: (MOVDconst [b]) 33005 for { 33006 b := v.AuxInt 33007 v.reset(OpARM64MOVDconst) 33008 v.AuxInt = b 33009 return true 33010 } 33011 } 33012 func rewriteValueARM64_OpConstNil_0(v *Value) bool { 33013 // match: (ConstNil) 33014 // cond: 33015 // result: (MOVDconst [0]) 33016 for { 33017 v.reset(OpARM64MOVDconst) 33018 v.AuxInt = 0 33019 return true 33020 } 33021 } 33022 func rewriteValueARM64_OpCtz32_0(v *Value) bool { 33023 b := v.Block 33024 _ = b 33025 // match: (Ctz32 <t> x) 33026 // cond: 33027 // result: (CLZW (RBITW <t> x)) 33028 for { 33029 t := v.Type 33030 x := v.Args[0] 33031 v.reset(OpARM64CLZW) 33032 v0 := b.NewValue0(v.Pos, OpARM64RBITW, t) 33033 v0.AddArg(x) 33034 v.AddArg(v0) 33035 return true 33036 } 33037 } 33038 func rewriteValueARM64_OpCtz32NonZero_0(v *Value) bool { 33039 // match: (Ctz32NonZero x) 33040 // cond: 33041 // result: (Ctz32 x) 33042 for { 33043 x := v.Args[0] 33044 v.reset(OpCtz32) 33045 v.AddArg(x) 33046 return true 33047 } 33048 } 33049 func rewriteValueARM64_OpCtz64_0(v *Value) bool { 33050 b := v.Block 33051 _ = b 33052 // match: (Ctz64 <t> x) 33053 // cond: 33054 // result: (CLZ (RBIT <t> x)) 33055 for { 33056 t := v.Type 33057 x := v.Args[0] 33058 v.reset(OpARM64CLZ) 33059 v0 := b.NewValue0(v.Pos, OpARM64RBIT, t) 33060 v0.AddArg(x) 33061 v.AddArg(v0) 33062 return true 33063 } 33064 } 33065 func rewriteValueARM64_OpCtz64NonZero_0(v *Value) bool { 33066 // match: (Ctz64NonZero x) 33067 // cond: 33068 // result: (Ctz64 x) 33069 for { 33070 x := v.Args[0] 33071 v.reset(OpCtz64) 33072 v.AddArg(x) 33073 return true 33074 } 33075 } 33076 func rewriteValueARM64_OpCvt32Fto32_0(v *Value) bool { 33077 // match: (Cvt32Fto32 x) 33078 // cond: 33079 // result: (FCVTZSSW x) 33080 for { 33081 x := v.Args[0] 33082 v.reset(OpARM64FCVTZSSW) 33083 v.AddArg(x) 33084 return true 33085 } 33086 } 33087 func rewriteValueARM64_OpCvt32Fto32U_0(v *Value) bool { 33088 // match: (Cvt32Fto32U x) 33089 // cond: 33090 // result: (FCVTZUSW x) 33091 for { 33092 x := v.Args[0] 33093 v.reset(OpARM64FCVTZUSW) 33094 v.AddArg(x) 33095 return true 33096 } 33097 } 33098 func rewriteValueARM64_OpCvt32Fto64_0(v *Value) bool { 33099 // match: (Cvt32Fto64 x) 33100 // cond: 33101 // result: (FCVTZSS x) 33102 for { 33103 x := v.Args[0] 33104 v.reset(OpARM64FCVTZSS) 33105 v.AddArg(x) 33106 return true 33107 } 33108 } 33109 func rewriteValueARM64_OpCvt32Fto64F_0(v *Value) bool { 33110 // match: (Cvt32Fto64F x) 33111 // cond: 33112 // result: (FCVTSD x) 33113 for { 33114 x := v.Args[0] 33115 v.reset(OpARM64FCVTSD) 33116 v.AddArg(x) 33117 return true 33118 } 33119 } 33120 func rewriteValueARM64_OpCvt32Fto64U_0(v *Value) bool { 33121 // match: (Cvt32Fto64U x) 33122 // cond: 33123 // result: (FCVTZUS x) 33124 for { 33125 x := v.Args[0] 33126 v.reset(OpARM64FCVTZUS) 33127 v.AddArg(x) 33128 return true 33129 } 33130 } 33131 func rewriteValueARM64_OpCvt32Uto32F_0(v *Value) bool { 33132 // match: (Cvt32Uto32F x) 33133 // cond: 33134 // result: (UCVTFWS x) 33135 for { 33136 x := v.Args[0] 33137 v.reset(OpARM64UCVTFWS) 33138 v.AddArg(x) 33139 return true 33140 } 33141 } 33142 func rewriteValueARM64_OpCvt32Uto64F_0(v *Value) bool { 33143 // match: (Cvt32Uto64F x) 33144 // cond: 33145 // result: (UCVTFWD x) 33146 for { 33147 x := v.Args[0] 33148 v.reset(OpARM64UCVTFWD) 33149 v.AddArg(x) 33150 return true 33151 } 33152 } 33153 func rewriteValueARM64_OpCvt32to32F_0(v *Value) bool { 33154 // match: (Cvt32to32F x) 33155 // cond: 33156 // result: (SCVTFWS x) 33157 for { 33158 x := v.Args[0] 33159 v.reset(OpARM64SCVTFWS) 33160 v.AddArg(x) 33161 return true 33162 } 33163 } 33164 func rewriteValueARM64_OpCvt32to64F_0(v *Value) bool { 33165 // match: (Cvt32to64F x) 33166 // cond: 33167 // result: (SCVTFWD x) 33168 for { 33169 x := v.Args[0] 33170 v.reset(OpARM64SCVTFWD) 33171 v.AddArg(x) 33172 return true 33173 } 33174 } 33175 func rewriteValueARM64_OpCvt64Fto32_0(v *Value) bool { 33176 // match: (Cvt64Fto32 x) 33177 // cond: 33178 // result: (FCVTZSDW x) 33179 for { 33180 x := v.Args[0] 33181 v.reset(OpARM64FCVTZSDW) 33182 v.AddArg(x) 33183 return true 33184 } 33185 } 33186 func rewriteValueARM64_OpCvt64Fto32F_0(v *Value) bool { 33187 // match: (Cvt64Fto32F x) 33188 // cond: 33189 // result: (FCVTDS x) 33190 for { 33191 x := v.Args[0] 33192 v.reset(OpARM64FCVTDS) 33193 v.AddArg(x) 33194 return true 33195 } 33196 } 33197 func rewriteValueARM64_OpCvt64Fto32U_0(v *Value) bool { 33198 // match: (Cvt64Fto32U x) 33199 // cond: 33200 // result: (FCVTZUDW x) 33201 for { 33202 x := v.Args[0] 33203 v.reset(OpARM64FCVTZUDW) 33204 v.AddArg(x) 33205 return true 33206 } 33207 } 33208 func rewriteValueARM64_OpCvt64Fto64_0(v *Value) bool { 33209 // match: (Cvt64Fto64 x) 33210 // cond: 33211 // result: (FCVTZSD x) 33212 for { 33213 x := v.Args[0] 33214 v.reset(OpARM64FCVTZSD) 33215 v.AddArg(x) 33216 return true 33217 } 33218 } 33219 func rewriteValueARM64_OpCvt64Fto64U_0(v *Value) bool { 33220 // match: (Cvt64Fto64U x) 33221 // cond: 33222 // result: (FCVTZUD x) 33223 for { 33224 x := v.Args[0] 33225 v.reset(OpARM64FCVTZUD) 33226 v.AddArg(x) 33227 return true 33228 } 33229 } 33230 func rewriteValueARM64_OpCvt64Uto32F_0(v *Value) bool { 33231 // match: (Cvt64Uto32F x) 33232 // cond: 33233 // result: (UCVTFS x) 33234 for { 33235 x := v.Args[0] 33236 v.reset(OpARM64UCVTFS) 33237 v.AddArg(x) 33238 return true 33239 } 33240 } 33241 func rewriteValueARM64_OpCvt64Uto64F_0(v *Value) bool { 33242 // match: (Cvt64Uto64F x) 33243 // cond: 33244 // result: (UCVTFD x) 33245 for { 33246 x := v.Args[0] 33247 v.reset(OpARM64UCVTFD) 33248 v.AddArg(x) 33249 return true 33250 } 33251 } 33252 func rewriteValueARM64_OpCvt64to32F_0(v *Value) bool { 33253 // match: (Cvt64to32F x) 33254 // cond: 33255 // result: (SCVTFS x) 33256 for { 33257 x := v.Args[0] 33258 v.reset(OpARM64SCVTFS) 33259 v.AddArg(x) 33260 return true 33261 } 33262 } 33263 func rewriteValueARM64_OpCvt64to64F_0(v *Value) bool { 33264 // match: (Cvt64to64F x) 33265 // cond: 33266 // result: (SCVTFD x) 33267 for { 33268 x := v.Args[0] 33269 v.reset(OpARM64SCVTFD) 33270 v.AddArg(x) 33271 return true 33272 } 33273 } 33274 func rewriteValueARM64_OpDiv16_0(v *Value) bool { 33275 b := v.Block 33276 _ = b 33277 typ := &b.Func.Config.Types 33278 _ = typ 33279 // match: (Div16 x y) 33280 // cond: 33281 // result: (DIVW (SignExt16to32 x) (SignExt16to32 y)) 33282 for { 33283 _ = v.Args[1] 33284 x := v.Args[0] 33285 y := v.Args[1] 33286 v.reset(OpARM64DIVW) 33287 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33288 v0.AddArg(x) 33289 v.AddArg(v0) 33290 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33291 v1.AddArg(y) 33292 v.AddArg(v1) 33293 return true 33294 } 33295 } 33296 func rewriteValueARM64_OpDiv16u_0(v *Value) bool { 33297 b := v.Block 33298 _ = b 33299 typ := &b.Func.Config.Types 33300 _ = typ 33301 // match: (Div16u x y) 33302 // cond: 33303 // result: (UDIVW (ZeroExt16to32 x) (ZeroExt16to32 y)) 33304 for { 33305 _ = v.Args[1] 33306 x := v.Args[0] 33307 y := v.Args[1] 33308 v.reset(OpARM64UDIVW) 33309 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33310 v0.AddArg(x) 33311 v.AddArg(v0) 33312 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33313 v1.AddArg(y) 33314 v.AddArg(v1) 33315 return true 33316 } 33317 } 33318 func rewriteValueARM64_OpDiv32_0(v *Value) bool { 33319 // match: (Div32 x y) 33320 // cond: 33321 // result: (DIVW x y) 33322 for { 33323 _ = v.Args[1] 33324 x := v.Args[0] 33325 y := v.Args[1] 33326 v.reset(OpARM64DIVW) 33327 v.AddArg(x) 33328 v.AddArg(y) 33329 return true 33330 } 33331 } 33332 func rewriteValueARM64_OpDiv32F_0(v *Value) bool { 33333 // match: (Div32F x y) 33334 // cond: 33335 // result: (FDIVS x y) 33336 for { 33337 _ = v.Args[1] 33338 x := v.Args[0] 33339 y := v.Args[1] 33340 v.reset(OpARM64FDIVS) 33341 v.AddArg(x) 33342 v.AddArg(y) 33343 return true 33344 } 33345 } 33346 func rewriteValueARM64_OpDiv32u_0(v *Value) bool { 33347 // match: (Div32u x y) 33348 // cond: 33349 // result: (UDIVW x y) 33350 for { 33351 _ = v.Args[1] 33352 x := v.Args[0] 33353 y := v.Args[1] 33354 v.reset(OpARM64UDIVW) 33355 v.AddArg(x) 33356 v.AddArg(y) 33357 return true 33358 } 33359 } 33360 func rewriteValueARM64_OpDiv64_0(v *Value) bool { 33361 // match: (Div64 x y) 33362 // cond: 33363 // result: (DIV x y) 33364 for { 33365 _ = v.Args[1] 33366 x := v.Args[0] 33367 y := v.Args[1] 33368 v.reset(OpARM64DIV) 33369 v.AddArg(x) 33370 v.AddArg(y) 33371 return true 33372 } 33373 } 33374 func rewriteValueARM64_OpDiv64F_0(v *Value) bool { 33375 // match: (Div64F x y) 33376 // cond: 33377 // result: (FDIVD x y) 33378 for { 33379 _ = v.Args[1] 33380 x := v.Args[0] 33381 y := v.Args[1] 33382 v.reset(OpARM64FDIVD) 33383 v.AddArg(x) 33384 v.AddArg(y) 33385 return true 33386 } 33387 } 33388 func rewriteValueARM64_OpDiv64u_0(v *Value) bool { 33389 // match: (Div64u x y) 33390 // cond: 33391 // result: (UDIV x y) 33392 for { 33393 _ = v.Args[1] 33394 x := v.Args[0] 33395 y := v.Args[1] 33396 v.reset(OpARM64UDIV) 33397 v.AddArg(x) 33398 v.AddArg(y) 33399 return true 33400 } 33401 } 33402 func rewriteValueARM64_OpDiv8_0(v *Value) bool { 33403 b := v.Block 33404 _ = b 33405 typ := &b.Func.Config.Types 33406 _ = typ 33407 // match: (Div8 x y) 33408 // cond: 33409 // result: (DIVW (SignExt8to32 x) (SignExt8to32 y)) 33410 for { 33411 _ = v.Args[1] 33412 x := v.Args[0] 33413 y := v.Args[1] 33414 v.reset(OpARM64DIVW) 33415 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33416 v0.AddArg(x) 33417 v.AddArg(v0) 33418 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33419 v1.AddArg(y) 33420 v.AddArg(v1) 33421 return true 33422 } 33423 } 33424 func rewriteValueARM64_OpDiv8u_0(v *Value) bool { 33425 b := v.Block 33426 _ = b 33427 typ := &b.Func.Config.Types 33428 _ = typ 33429 // match: (Div8u x y) 33430 // cond: 33431 // result: (UDIVW (ZeroExt8to32 x) (ZeroExt8to32 y)) 33432 for { 33433 _ = v.Args[1] 33434 x := v.Args[0] 33435 y := v.Args[1] 33436 v.reset(OpARM64UDIVW) 33437 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33438 v0.AddArg(x) 33439 v.AddArg(v0) 33440 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33441 v1.AddArg(y) 33442 v.AddArg(v1) 33443 return true 33444 } 33445 } 33446 func rewriteValueARM64_OpEq16_0(v *Value) bool { 33447 b := v.Block 33448 _ = b 33449 typ := &b.Func.Config.Types 33450 _ = typ 33451 // match: (Eq16 x y) 33452 // cond: 33453 // result: (Equal (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 33454 for { 33455 _ = v.Args[1] 33456 x := v.Args[0] 33457 y := v.Args[1] 33458 v.reset(OpARM64Equal) 33459 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33460 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33461 v1.AddArg(x) 33462 v0.AddArg(v1) 33463 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33464 v2.AddArg(y) 33465 v0.AddArg(v2) 33466 v.AddArg(v0) 33467 return true 33468 } 33469 } 33470 func rewriteValueARM64_OpEq32_0(v *Value) bool { 33471 b := v.Block 33472 _ = b 33473 // match: (Eq32 x y) 33474 // cond: 33475 // result: (Equal (CMPW x y)) 33476 for { 33477 _ = v.Args[1] 33478 x := v.Args[0] 33479 y := v.Args[1] 33480 v.reset(OpARM64Equal) 33481 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33482 v0.AddArg(x) 33483 v0.AddArg(y) 33484 v.AddArg(v0) 33485 return true 33486 } 33487 } 33488 func rewriteValueARM64_OpEq32F_0(v *Value) bool { 33489 b := v.Block 33490 _ = b 33491 // match: (Eq32F x y) 33492 // cond: 33493 // result: (Equal (FCMPS x y)) 33494 for { 33495 _ = v.Args[1] 33496 x := v.Args[0] 33497 y := v.Args[1] 33498 v.reset(OpARM64Equal) 33499 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 33500 v0.AddArg(x) 33501 v0.AddArg(y) 33502 v.AddArg(v0) 33503 return true 33504 } 33505 } 33506 func rewriteValueARM64_OpEq64_0(v *Value) bool { 33507 b := v.Block 33508 _ = b 33509 // match: (Eq64 x y) 33510 // cond: 33511 // result: (Equal (CMP x y)) 33512 for { 33513 _ = v.Args[1] 33514 x := v.Args[0] 33515 y := v.Args[1] 33516 v.reset(OpARM64Equal) 33517 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33518 v0.AddArg(x) 33519 v0.AddArg(y) 33520 v.AddArg(v0) 33521 return true 33522 } 33523 } 33524 func rewriteValueARM64_OpEq64F_0(v *Value) bool { 33525 b := v.Block 33526 _ = b 33527 // match: (Eq64F x y) 33528 // cond: 33529 // result: (Equal (FCMPD x y)) 33530 for { 33531 _ = v.Args[1] 33532 x := v.Args[0] 33533 y := v.Args[1] 33534 v.reset(OpARM64Equal) 33535 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 33536 v0.AddArg(x) 33537 v0.AddArg(y) 33538 v.AddArg(v0) 33539 return true 33540 } 33541 } 33542 func rewriteValueARM64_OpEq8_0(v *Value) bool { 33543 b := v.Block 33544 _ = b 33545 typ := &b.Func.Config.Types 33546 _ = typ 33547 // match: (Eq8 x y) 33548 // cond: 33549 // result: (Equal (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 33550 for { 33551 _ = v.Args[1] 33552 x := v.Args[0] 33553 y := v.Args[1] 33554 v.reset(OpARM64Equal) 33555 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33556 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33557 v1.AddArg(x) 33558 v0.AddArg(v1) 33559 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33560 v2.AddArg(y) 33561 v0.AddArg(v2) 33562 v.AddArg(v0) 33563 return true 33564 } 33565 } 33566 func rewriteValueARM64_OpEqB_0(v *Value) bool { 33567 b := v.Block 33568 _ = b 33569 typ := &b.Func.Config.Types 33570 _ = typ 33571 // match: (EqB x y) 33572 // cond: 33573 // result: (XOR (MOVDconst [1]) (XOR <typ.Bool> x y)) 33574 for { 33575 _ = v.Args[1] 33576 x := v.Args[0] 33577 y := v.Args[1] 33578 v.reset(OpARM64XOR) 33579 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 33580 v0.AuxInt = 1 33581 v.AddArg(v0) 33582 v1 := b.NewValue0(v.Pos, OpARM64XOR, typ.Bool) 33583 v1.AddArg(x) 33584 v1.AddArg(y) 33585 v.AddArg(v1) 33586 return true 33587 } 33588 } 33589 func rewriteValueARM64_OpEqPtr_0(v *Value) bool { 33590 b := v.Block 33591 _ = b 33592 // match: (EqPtr x y) 33593 // cond: 33594 // result: (Equal (CMP x y)) 33595 for { 33596 _ = v.Args[1] 33597 x := v.Args[0] 33598 y := v.Args[1] 33599 v.reset(OpARM64Equal) 33600 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33601 v0.AddArg(x) 33602 v0.AddArg(y) 33603 v.AddArg(v0) 33604 return true 33605 } 33606 } 33607 func rewriteValueARM64_OpFloor_0(v *Value) bool { 33608 // match: (Floor x) 33609 // cond: 33610 // result: (FRINTMD x) 33611 for { 33612 x := v.Args[0] 33613 v.reset(OpARM64FRINTMD) 33614 v.AddArg(x) 33615 return true 33616 } 33617 } 33618 func rewriteValueARM64_OpGeq16_0(v *Value) bool { 33619 b := v.Block 33620 _ = b 33621 typ := &b.Func.Config.Types 33622 _ = typ 33623 // match: (Geq16 x y) 33624 // cond: 33625 // result: (GreaterEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 33626 for { 33627 _ = v.Args[1] 33628 x := v.Args[0] 33629 y := v.Args[1] 33630 v.reset(OpARM64GreaterEqual) 33631 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33632 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33633 v1.AddArg(x) 33634 v0.AddArg(v1) 33635 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33636 v2.AddArg(y) 33637 v0.AddArg(v2) 33638 v.AddArg(v0) 33639 return true 33640 } 33641 } 33642 func rewriteValueARM64_OpGeq16U_0(v *Value) bool { 33643 b := v.Block 33644 _ = b 33645 typ := &b.Func.Config.Types 33646 _ = typ 33647 // match: (Geq16U x y) 33648 // cond: 33649 // result: (GreaterEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 33650 for { 33651 _ = v.Args[1] 33652 x := v.Args[0] 33653 y := v.Args[1] 33654 v.reset(OpARM64GreaterEqualU) 33655 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33656 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33657 v1.AddArg(x) 33658 v0.AddArg(v1) 33659 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33660 v2.AddArg(y) 33661 v0.AddArg(v2) 33662 v.AddArg(v0) 33663 return true 33664 } 33665 } 33666 func rewriteValueARM64_OpGeq32_0(v *Value) bool { 33667 b := v.Block 33668 _ = b 33669 // match: (Geq32 x y) 33670 // cond: 33671 // result: (GreaterEqual (CMPW x y)) 33672 for { 33673 _ = v.Args[1] 33674 x := v.Args[0] 33675 y := v.Args[1] 33676 v.reset(OpARM64GreaterEqual) 33677 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33678 v0.AddArg(x) 33679 v0.AddArg(y) 33680 v.AddArg(v0) 33681 return true 33682 } 33683 } 33684 func rewriteValueARM64_OpGeq32F_0(v *Value) bool { 33685 b := v.Block 33686 _ = b 33687 // match: (Geq32F x y) 33688 // cond: 33689 // result: (GreaterEqual (FCMPS x y)) 33690 for { 33691 _ = v.Args[1] 33692 x := v.Args[0] 33693 y := v.Args[1] 33694 v.reset(OpARM64GreaterEqual) 33695 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 33696 v0.AddArg(x) 33697 v0.AddArg(y) 33698 v.AddArg(v0) 33699 return true 33700 } 33701 } 33702 func rewriteValueARM64_OpGeq32U_0(v *Value) bool { 33703 b := v.Block 33704 _ = b 33705 // match: (Geq32U x y) 33706 // cond: 33707 // result: (GreaterEqualU (CMPW x y)) 33708 for { 33709 _ = v.Args[1] 33710 x := v.Args[0] 33711 y := v.Args[1] 33712 v.reset(OpARM64GreaterEqualU) 33713 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33714 v0.AddArg(x) 33715 v0.AddArg(y) 33716 v.AddArg(v0) 33717 return true 33718 } 33719 } 33720 func rewriteValueARM64_OpGeq64_0(v *Value) bool { 33721 b := v.Block 33722 _ = b 33723 // match: (Geq64 x y) 33724 // cond: 33725 // result: (GreaterEqual (CMP x y)) 33726 for { 33727 _ = v.Args[1] 33728 x := v.Args[0] 33729 y := v.Args[1] 33730 v.reset(OpARM64GreaterEqual) 33731 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33732 v0.AddArg(x) 33733 v0.AddArg(y) 33734 v.AddArg(v0) 33735 return true 33736 } 33737 } 33738 func rewriteValueARM64_OpGeq64F_0(v *Value) bool { 33739 b := v.Block 33740 _ = b 33741 // match: (Geq64F x y) 33742 // cond: 33743 // result: (GreaterEqual (FCMPD x y)) 33744 for { 33745 _ = v.Args[1] 33746 x := v.Args[0] 33747 y := v.Args[1] 33748 v.reset(OpARM64GreaterEqual) 33749 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 33750 v0.AddArg(x) 33751 v0.AddArg(y) 33752 v.AddArg(v0) 33753 return true 33754 } 33755 } 33756 func rewriteValueARM64_OpGeq64U_0(v *Value) bool { 33757 b := v.Block 33758 _ = b 33759 // match: (Geq64U x y) 33760 // cond: 33761 // result: (GreaterEqualU (CMP x y)) 33762 for { 33763 _ = v.Args[1] 33764 x := v.Args[0] 33765 y := v.Args[1] 33766 v.reset(OpARM64GreaterEqualU) 33767 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33768 v0.AddArg(x) 33769 v0.AddArg(y) 33770 v.AddArg(v0) 33771 return true 33772 } 33773 } 33774 func rewriteValueARM64_OpGeq8_0(v *Value) bool { 33775 b := v.Block 33776 _ = b 33777 typ := &b.Func.Config.Types 33778 _ = typ 33779 // match: (Geq8 x y) 33780 // cond: 33781 // result: (GreaterEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 33782 for { 33783 _ = v.Args[1] 33784 x := v.Args[0] 33785 y := v.Args[1] 33786 v.reset(OpARM64GreaterEqual) 33787 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33788 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33789 v1.AddArg(x) 33790 v0.AddArg(v1) 33791 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 33792 v2.AddArg(y) 33793 v0.AddArg(v2) 33794 v.AddArg(v0) 33795 return true 33796 } 33797 } 33798 func rewriteValueARM64_OpGeq8U_0(v *Value) bool { 33799 b := v.Block 33800 _ = b 33801 typ := &b.Func.Config.Types 33802 _ = typ 33803 // match: (Geq8U x y) 33804 // cond: 33805 // result: (GreaterEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 33806 for { 33807 _ = v.Args[1] 33808 x := v.Args[0] 33809 y := v.Args[1] 33810 v.reset(OpARM64GreaterEqualU) 33811 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33812 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33813 v1.AddArg(x) 33814 v0.AddArg(v1) 33815 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 33816 v2.AddArg(y) 33817 v0.AddArg(v2) 33818 v.AddArg(v0) 33819 return true 33820 } 33821 } 33822 func rewriteValueARM64_OpGetCallerPC_0(v *Value) bool { 33823 // match: (GetCallerPC) 33824 // cond: 33825 // result: (LoweredGetCallerPC) 33826 for { 33827 v.reset(OpARM64LoweredGetCallerPC) 33828 return true 33829 } 33830 } 33831 func rewriteValueARM64_OpGetCallerSP_0(v *Value) bool { 33832 // match: (GetCallerSP) 33833 // cond: 33834 // result: (LoweredGetCallerSP) 33835 for { 33836 v.reset(OpARM64LoweredGetCallerSP) 33837 return true 33838 } 33839 } 33840 func rewriteValueARM64_OpGetClosurePtr_0(v *Value) bool { 33841 // match: (GetClosurePtr) 33842 // cond: 33843 // result: (LoweredGetClosurePtr) 33844 for { 33845 v.reset(OpARM64LoweredGetClosurePtr) 33846 return true 33847 } 33848 } 33849 func rewriteValueARM64_OpGreater16_0(v *Value) bool { 33850 b := v.Block 33851 _ = b 33852 typ := &b.Func.Config.Types 33853 _ = typ 33854 // match: (Greater16 x y) 33855 // cond: 33856 // result: (GreaterThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 33857 for { 33858 _ = v.Args[1] 33859 x := v.Args[0] 33860 y := v.Args[1] 33861 v.reset(OpARM64GreaterThan) 33862 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33863 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33864 v1.AddArg(x) 33865 v0.AddArg(v1) 33866 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 33867 v2.AddArg(y) 33868 v0.AddArg(v2) 33869 v.AddArg(v0) 33870 return true 33871 } 33872 } 33873 func rewriteValueARM64_OpGreater16U_0(v *Value) bool { 33874 b := v.Block 33875 _ = b 33876 typ := &b.Func.Config.Types 33877 _ = typ 33878 // match: (Greater16U x y) 33879 // cond: 33880 // result: (GreaterThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 33881 for { 33882 _ = v.Args[1] 33883 x := v.Args[0] 33884 y := v.Args[1] 33885 v.reset(OpARM64GreaterThanU) 33886 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33887 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33888 v1.AddArg(x) 33889 v0.AddArg(v1) 33890 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 33891 v2.AddArg(y) 33892 v0.AddArg(v2) 33893 v.AddArg(v0) 33894 return true 33895 } 33896 } 33897 func rewriteValueARM64_OpGreater32_0(v *Value) bool { 33898 b := v.Block 33899 _ = b 33900 // match: (Greater32 x y) 33901 // cond: 33902 // result: (GreaterThan (CMPW x y)) 33903 for { 33904 _ = v.Args[1] 33905 x := v.Args[0] 33906 y := v.Args[1] 33907 v.reset(OpARM64GreaterThan) 33908 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33909 v0.AddArg(x) 33910 v0.AddArg(y) 33911 v.AddArg(v0) 33912 return true 33913 } 33914 } 33915 func rewriteValueARM64_OpGreater32F_0(v *Value) bool { 33916 b := v.Block 33917 _ = b 33918 // match: (Greater32F x y) 33919 // cond: 33920 // result: (GreaterThan (FCMPS x y)) 33921 for { 33922 _ = v.Args[1] 33923 x := v.Args[0] 33924 y := v.Args[1] 33925 v.reset(OpARM64GreaterThan) 33926 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 33927 v0.AddArg(x) 33928 v0.AddArg(y) 33929 v.AddArg(v0) 33930 return true 33931 } 33932 } 33933 func rewriteValueARM64_OpGreater32U_0(v *Value) bool { 33934 b := v.Block 33935 _ = b 33936 // match: (Greater32U x y) 33937 // cond: 33938 // result: (GreaterThanU (CMPW x y)) 33939 for { 33940 _ = v.Args[1] 33941 x := v.Args[0] 33942 y := v.Args[1] 33943 v.reset(OpARM64GreaterThanU) 33944 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 33945 v0.AddArg(x) 33946 v0.AddArg(y) 33947 v.AddArg(v0) 33948 return true 33949 } 33950 } 33951 func rewriteValueARM64_OpGreater64_0(v *Value) bool { 33952 b := v.Block 33953 _ = b 33954 // match: (Greater64 x y) 33955 // cond: 33956 // result: (GreaterThan (CMP x y)) 33957 for { 33958 _ = v.Args[1] 33959 x := v.Args[0] 33960 y := v.Args[1] 33961 v.reset(OpARM64GreaterThan) 33962 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33963 v0.AddArg(x) 33964 v0.AddArg(y) 33965 v.AddArg(v0) 33966 return true 33967 } 33968 } 33969 func rewriteValueARM64_OpGreater64F_0(v *Value) bool { 33970 b := v.Block 33971 _ = b 33972 // match: (Greater64F x y) 33973 // cond: 33974 // result: (GreaterThan (FCMPD x y)) 33975 for { 33976 _ = v.Args[1] 33977 x := v.Args[0] 33978 y := v.Args[1] 33979 v.reset(OpARM64GreaterThan) 33980 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 33981 v0.AddArg(x) 33982 v0.AddArg(y) 33983 v.AddArg(v0) 33984 return true 33985 } 33986 } 33987 func rewriteValueARM64_OpGreater64U_0(v *Value) bool { 33988 b := v.Block 33989 _ = b 33990 // match: (Greater64U x y) 33991 // cond: 33992 // result: (GreaterThanU (CMP x y)) 33993 for { 33994 _ = v.Args[1] 33995 x := v.Args[0] 33996 y := v.Args[1] 33997 v.reset(OpARM64GreaterThanU) 33998 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 33999 v0.AddArg(x) 34000 v0.AddArg(y) 34001 v.AddArg(v0) 34002 return true 34003 } 34004 } 34005 func rewriteValueARM64_OpGreater8_0(v *Value) bool { 34006 b := v.Block 34007 _ = b 34008 typ := &b.Func.Config.Types 34009 _ = typ 34010 // match: (Greater8 x y) 34011 // cond: 34012 // result: (GreaterThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 34013 for { 34014 _ = v.Args[1] 34015 x := v.Args[0] 34016 y := v.Args[1] 34017 v.reset(OpARM64GreaterThan) 34018 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34019 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34020 v1.AddArg(x) 34021 v0.AddArg(v1) 34022 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34023 v2.AddArg(y) 34024 v0.AddArg(v2) 34025 v.AddArg(v0) 34026 return true 34027 } 34028 } 34029 func rewriteValueARM64_OpGreater8U_0(v *Value) bool { 34030 b := v.Block 34031 _ = b 34032 typ := &b.Func.Config.Types 34033 _ = typ 34034 // match: (Greater8U x y) 34035 // cond: 34036 // result: (GreaterThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 34037 for { 34038 _ = v.Args[1] 34039 x := v.Args[0] 34040 y := v.Args[1] 34041 v.reset(OpARM64GreaterThanU) 34042 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34043 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34044 v1.AddArg(x) 34045 v0.AddArg(v1) 34046 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34047 v2.AddArg(y) 34048 v0.AddArg(v2) 34049 v.AddArg(v0) 34050 return true 34051 } 34052 } 34053 func rewriteValueARM64_OpHmul32_0(v *Value) bool { 34054 b := v.Block 34055 _ = b 34056 typ := &b.Func.Config.Types 34057 _ = typ 34058 // match: (Hmul32 x y) 34059 // cond: 34060 // result: (SRAconst (MULL <typ.Int64> x y) [32]) 34061 for { 34062 _ = v.Args[1] 34063 x := v.Args[0] 34064 y := v.Args[1] 34065 v.reset(OpARM64SRAconst) 34066 v.AuxInt = 32 34067 v0 := b.NewValue0(v.Pos, OpARM64MULL, typ.Int64) 34068 v0.AddArg(x) 34069 v0.AddArg(y) 34070 v.AddArg(v0) 34071 return true 34072 } 34073 } 34074 func rewriteValueARM64_OpHmul32u_0(v *Value) bool { 34075 b := v.Block 34076 _ = b 34077 typ := &b.Func.Config.Types 34078 _ = typ 34079 // match: (Hmul32u x y) 34080 // cond: 34081 // result: (SRAconst (UMULL <typ.UInt64> x y) [32]) 34082 for { 34083 _ = v.Args[1] 34084 x := v.Args[0] 34085 y := v.Args[1] 34086 v.reset(OpARM64SRAconst) 34087 v.AuxInt = 32 34088 v0 := b.NewValue0(v.Pos, OpARM64UMULL, typ.UInt64) 34089 v0.AddArg(x) 34090 v0.AddArg(y) 34091 v.AddArg(v0) 34092 return true 34093 } 34094 } 34095 func rewriteValueARM64_OpHmul64_0(v *Value) bool { 34096 // match: (Hmul64 x y) 34097 // cond: 34098 // result: (MULH x y) 34099 for { 34100 _ = v.Args[1] 34101 x := v.Args[0] 34102 y := v.Args[1] 34103 v.reset(OpARM64MULH) 34104 v.AddArg(x) 34105 v.AddArg(y) 34106 return true 34107 } 34108 } 34109 func rewriteValueARM64_OpHmul64u_0(v *Value) bool { 34110 // match: (Hmul64u x y) 34111 // cond: 34112 // result: (UMULH x y) 34113 for { 34114 _ = v.Args[1] 34115 x := v.Args[0] 34116 y := v.Args[1] 34117 v.reset(OpARM64UMULH) 34118 v.AddArg(x) 34119 v.AddArg(y) 34120 return true 34121 } 34122 } 34123 func rewriteValueARM64_OpInterCall_0(v *Value) bool { 34124 // match: (InterCall [argwid] entry mem) 34125 // cond: 34126 // result: (CALLinter [argwid] entry mem) 34127 for { 34128 argwid := v.AuxInt 34129 _ = v.Args[1] 34130 entry := v.Args[0] 34131 mem := v.Args[1] 34132 v.reset(OpARM64CALLinter) 34133 v.AuxInt = argwid 34134 v.AddArg(entry) 34135 v.AddArg(mem) 34136 return true 34137 } 34138 } 34139 func rewriteValueARM64_OpIsInBounds_0(v *Value) bool { 34140 b := v.Block 34141 _ = b 34142 // match: (IsInBounds idx len) 34143 // cond: 34144 // result: (LessThanU (CMP idx len)) 34145 for { 34146 _ = v.Args[1] 34147 idx := v.Args[0] 34148 len := v.Args[1] 34149 v.reset(OpARM64LessThanU) 34150 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34151 v0.AddArg(idx) 34152 v0.AddArg(len) 34153 v.AddArg(v0) 34154 return true 34155 } 34156 } 34157 func rewriteValueARM64_OpIsNonNil_0(v *Value) bool { 34158 b := v.Block 34159 _ = b 34160 // match: (IsNonNil ptr) 34161 // cond: 34162 // result: (NotEqual (CMPconst [0] ptr)) 34163 for { 34164 ptr := v.Args[0] 34165 v.reset(OpARM64NotEqual) 34166 v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34167 v0.AuxInt = 0 34168 v0.AddArg(ptr) 34169 v.AddArg(v0) 34170 return true 34171 } 34172 } 34173 func rewriteValueARM64_OpIsSliceInBounds_0(v *Value) bool { 34174 b := v.Block 34175 _ = b 34176 // match: (IsSliceInBounds idx len) 34177 // cond: 34178 // result: (LessEqualU (CMP idx len)) 34179 for { 34180 _ = v.Args[1] 34181 idx := v.Args[0] 34182 len := v.Args[1] 34183 v.reset(OpARM64LessEqualU) 34184 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34185 v0.AddArg(idx) 34186 v0.AddArg(len) 34187 v.AddArg(v0) 34188 return true 34189 } 34190 } 34191 func rewriteValueARM64_OpLeq16_0(v *Value) bool { 34192 b := v.Block 34193 _ = b 34194 typ := &b.Func.Config.Types 34195 _ = typ 34196 // match: (Leq16 x y) 34197 // cond: 34198 // result: (LessEqual (CMPW (SignExt16to32 x) (SignExt16to32 y))) 34199 for { 34200 _ = v.Args[1] 34201 x := v.Args[0] 34202 y := v.Args[1] 34203 v.reset(OpARM64LessEqual) 34204 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34205 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34206 v1.AddArg(x) 34207 v0.AddArg(v1) 34208 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34209 v2.AddArg(y) 34210 v0.AddArg(v2) 34211 v.AddArg(v0) 34212 return true 34213 } 34214 } 34215 func rewriteValueARM64_OpLeq16U_0(v *Value) bool { 34216 b := v.Block 34217 _ = b 34218 typ := &b.Func.Config.Types 34219 _ = typ 34220 // match: (Leq16U x y) 34221 // cond: 34222 // result: (LessEqualU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 34223 for { 34224 _ = v.Args[1] 34225 x := v.Args[0] 34226 y := v.Args[1] 34227 v.reset(OpARM64LessEqualU) 34228 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34229 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34230 v1.AddArg(x) 34231 v0.AddArg(v1) 34232 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34233 v2.AddArg(y) 34234 v0.AddArg(v2) 34235 v.AddArg(v0) 34236 return true 34237 } 34238 } 34239 func rewriteValueARM64_OpLeq32_0(v *Value) bool { 34240 b := v.Block 34241 _ = b 34242 // match: (Leq32 x y) 34243 // cond: 34244 // result: (LessEqual (CMPW x y)) 34245 for { 34246 _ = v.Args[1] 34247 x := v.Args[0] 34248 y := v.Args[1] 34249 v.reset(OpARM64LessEqual) 34250 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34251 v0.AddArg(x) 34252 v0.AddArg(y) 34253 v.AddArg(v0) 34254 return true 34255 } 34256 } 34257 func rewriteValueARM64_OpLeq32F_0(v *Value) bool { 34258 b := v.Block 34259 _ = b 34260 // match: (Leq32F x y) 34261 // cond: 34262 // result: (GreaterEqual (FCMPS y x)) 34263 for { 34264 _ = v.Args[1] 34265 x := v.Args[0] 34266 y := v.Args[1] 34267 v.reset(OpARM64GreaterEqual) 34268 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 34269 v0.AddArg(y) 34270 v0.AddArg(x) 34271 v.AddArg(v0) 34272 return true 34273 } 34274 } 34275 func rewriteValueARM64_OpLeq32U_0(v *Value) bool { 34276 b := v.Block 34277 _ = b 34278 // match: (Leq32U x y) 34279 // cond: 34280 // result: (LessEqualU (CMPW x y)) 34281 for { 34282 _ = v.Args[1] 34283 x := v.Args[0] 34284 y := v.Args[1] 34285 v.reset(OpARM64LessEqualU) 34286 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34287 v0.AddArg(x) 34288 v0.AddArg(y) 34289 v.AddArg(v0) 34290 return true 34291 } 34292 } 34293 func rewriteValueARM64_OpLeq64_0(v *Value) bool { 34294 b := v.Block 34295 _ = b 34296 // match: (Leq64 x y) 34297 // cond: 34298 // result: (LessEqual (CMP x y)) 34299 for { 34300 _ = v.Args[1] 34301 x := v.Args[0] 34302 y := v.Args[1] 34303 v.reset(OpARM64LessEqual) 34304 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34305 v0.AddArg(x) 34306 v0.AddArg(y) 34307 v.AddArg(v0) 34308 return true 34309 } 34310 } 34311 func rewriteValueARM64_OpLeq64F_0(v *Value) bool { 34312 b := v.Block 34313 _ = b 34314 // match: (Leq64F x y) 34315 // cond: 34316 // result: (GreaterEqual (FCMPD y x)) 34317 for { 34318 _ = v.Args[1] 34319 x := v.Args[0] 34320 y := v.Args[1] 34321 v.reset(OpARM64GreaterEqual) 34322 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 34323 v0.AddArg(y) 34324 v0.AddArg(x) 34325 v.AddArg(v0) 34326 return true 34327 } 34328 } 34329 func rewriteValueARM64_OpLeq64U_0(v *Value) bool { 34330 b := v.Block 34331 _ = b 34332 // match: (Leq64U x y) 34333 // cond: 34334 // result: (LessEqualU (CMP x y)) 34335 for { 34336 _ = v.Args[1] 34337 x := v.Args[0] 34338 y := v.Args[1] 34339 v.reset(OpARM64LessEqualU) 34340 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34341 v0.AddArg(x) 34342 v0.AddArg(y) 34343 v.AddArg(v0) 34344 return true 34345 } 34346 } 34347 func rewriteValueARM64_OpLeq8_0(v *Value) bool { 34348 b := v.Block 34349 _ = b 34350 typ := &b.Func.Config.Types 34351 _ = typ 34352 // match: (Leq8 x y) 34353 // cond: 34354 // result: (LessEqual (CMPW (SignExt8to32 x) (SignExt8to32 y))) 34355 for { 34356 _ = v.Args[1] 34357 x := v.Args[0] 34358 y := v.Args[1] 34359 v.reset(OpARM64LessEqual) 34360 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34361 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34362 v1.AddArg(x) 34363 v0.AddArg(v1) 34364 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34365 v2.AddArg(y) 34366 v0.AddArg(v2) 34367 v.AddArg(v0) 34368 return true 34369 } 34370 } 34371 func rewriteValueARM64_OpLeq8U_0(v *Value) bool { 34372 b := v.Block 34373 _ = b 34374 typ := &b.Func.Config.Types 34375 _ = typ 34376 // match: (Leq8U x y) 34377 // cond: 34378 // result: (LessEqualU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 34379 for { 34380 _ = v.Args[1] 34381 x := v.Args[0] 34382 y := v.Args[1] 34383 v.reset(OpARM64LessEqualU) 34384 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34385 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34386 v1.AddArg(x) 34387 v0.AddArg(v1) 34388 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34389 v2.AddArg(y) 34390 v0.AddArg(v2) 34391 v.AddArg(v0) 34392 return true 34393 } 34394 } 34395 func rewriteValueARM64_OpLess16_0(v *Value) bool { 34396 b := v.Block 34397 _ = b 34398 typ := &b.Func.Config.Types 34399 _ = typ 34400 // match: (Less16 x y) 34401 // cond: 34402 // result: (LessThan (CMPW (SignExt16to32 x) (SignExt16to32 y))) 34403 for { 34404 _ = v.Args[1] 34405 x := v.Args[0] 34406 y := v.Args[1] 34407 v.reset(OpARM64LessThan) 34408 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34409 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34410 v1.AddArg(x) 34411 v0.AddArg(v1) 34412 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 34413 v2.AddArg(y) 34414 v0.AddArg(v2) 34415 v.AddArg(v0) 34416 return true 34417 } 34418 } 34419 func rewriteValueARM64_OpLess16U_0(v *Value) bool { 34420 b := v.Block 34421 _ = b 34422 typ := &b.Func.Config.Types 34423 _ = typ 34424 // match: (Less16U x y) 34425 // cond: 34426 // result: (LessThanU (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 34427 for { 34428 _ = v.Args[1] 34429 x := v.Args[0] 34430 y := v.Args[1] 34431 v.reset(OpARM64LessThanU) 34432 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34433 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34434 v1.AddArg(x) 34435 v0.AddArg(v1) 34436 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 34437 v2.AddArg(y) 34438 v0.AddArg(v2) 34439 v.AddArg(v0) 34440 return true 34441 } 34442 } 34443 func rewriteValueARM64_OpLess32_0(v *Value) bool { 34444 b := v.Block 34445 _ = b 34446 // match: (Less32 x y) 34447 // cond: 34448 // result: (LessThan (CMPW x y)) 34449 for { 34450 _ = v.Args[1] 34451 x := v.Args[0] 34452 y := v.Args[1] 34453 v.reset(OpARM64LessThan) 34454 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34455 v0.AddArg(x) 34456 v0.AddArg(y) 34457 v.AddArg(v0) 34458 return true 34459 } 34460 } 34461 func rewriteValueARM64_OpLess32F_0(v *Value) bool { 34462 b := v.Block 34463 _ = b 34464 // match: (Less32F x y) 34465 // cond: 34466 // result: (GreaterThan (FCMPS y x)) 34467 for { 34468 _ = v.Args[1] 34469 x := v.Args[0] 34470 y := v.Args[1] 34471 v.reset(OpARM64GreaterThan) 34472 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 34473 v0.AddArg(y) 34474 v0.AddArg(x) 34475 v.AddArg(v0) 34476 return true 34477 } 34478 } 34479 func rewriteValueARM64_OpLess32U_0(v *Value) bool { 34480 b := v.Block 34481 _ = b 34482 // match: (Less32U x y) 34483 // cond: 34484 // result: (LessThanU (CMPW x y)) 34485 for { 34486 _ = v.Args[1] 34487 x := v.Args[0] 34488 y := v.Args[1] 34489 v.reset(OpARM64LessThanU) 34490 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34491 v0.AddArg(x) 34492 v0.AddArg(y) 34493 v.AddArg(v0) 34494 return true 34495 } 34496 } 34497 func rewriteValueARM64_OpLess64_0(v *Value) bool { 34498 b := v.Block 34499 _ = b 34500 // match: (Less64 x y) 34501 // cond: 34502 // result: (LessThan (CMP x y)) 34503 for { 34504 _ = v.Args[1] 34505 x := v.Args[0] 34506 y := v.Args[1] 34507 v.reset(OpARM64LessThan) 34508 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34509 v0.AddArg(x) 34510 v0.AddArg(y) 34511 v.AddArg(v0) 34512 return true 34513 } 34514 } 34515 func rewriteValueARM64_OpLess64F_0(v *Value) bool { 34516 b := v.Block 34517 _ = b 34518 // match: (Less64F x y) 34519 // cond: 34520 // result: (GreaterThan (FCMPD y x)) 34521 for { 34522 _ = v.Args[1] 34523 x := v.Args[0] 34524 y := v.Args[1] 34525 v.reset(OpARM64GreaterThan) 34526 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 34527 v0.AddArg(y) 34528 v0.AddArg(x) 34529 v.AddArg(v0) 34530 return true 34531 } 34532 } 34533 func rewriteValueARM64_OpLess64U_0(v *Value) bool { 34534 b := v.Block 34535 _ = b 34536 // match: (Less64U x y) 34537 // cond: 34538 // result: (LessThanU (CMP x y)) 34539 for { 34540 _ = v.Args[1] 34541 x := v.Args[0] 34542 y := v.Args[1] 34543 v.reset(OpARM64LessThanU) 34544 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 34545 v0.AddArg(x) 34546 v0.AddArg(y) 34547 v.AddArg(v0) 34548 return true 34549 } 34550 } 34551 func rewriteValueARM64_OpLess8_0(v *Value) bool { 34552 b := v.Block 34553 _ = b 34554 typ := &b.Func.Config.Types 34555 _ = typ 34556 // match: (Less8 x y) 34557 // cond: 34558 // result: (LessThan (CMPW (SignExt8to32 x) (SignExt8to32 y))) 34559 for { 34560 _ = v.Args[1] 34561 x := v.Args[0] 34562 y := v.Args[1] 34563 v.reset(OpARM64LessThan) 34564 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34565 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34566 v1.AddArg(x) 34567 v0.AddArg(v1) 34568 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 34569 v2.AddArg(y) 34570 v0.AddArg(v2) 34571 v.AddArg(v0) 34572 return true 34573 } 34574 } 34575 func rewriteValueARM64_OpLess8U_0(v *Value) bool { 34576 b := v.Block 34577 _ = b 34578 typ := &b.Func.Config.Types 34579 _ = typ 34580 // match: (Less8U x y) 34581 // cond: 34582 // result: (LessThanU (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 34583 for { 34584 _ = v.Args[1] 34585 x := v.Args[0] 34586 y := v.Args[1] 34587 v.reset(OpARM64LessThanU) 34588 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 34589 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34590 v1.AddArg(x) 34591 v0.AddArg(v1) 34592 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 34593 v2.AddArg(y) 34594 v0.AddArg(v2) 34595 v.AddArg(v0) 34596 return true 34597 } 34598 } 34599 func rewriteValueARM64_OpLoad_0(v *Value) bool { 34600 // match: (Load <t> ptr mem) 34601 // cond: t.IsBoolean() 34602 // result: (MOVBUload ptr mem) 34603 for { 34604 t := v.Type 34605 _ = v.Args[1] 34606 ptr := v.Args[0] 34607 mem := v.Args[1] 34608 if !(t.IsBoolean()) { 34609 break 34610 } 34611 v.reset(OpARM64MOVBUload) 34612 v.AddArg(ptr) 34613 v.AddArg(mem) 34614 return true 34615 } 34616 // match: (Load <t> ptr mem) 34617 // cond: (is8BitInt(t) && isSigned(t)) 34618 // result: (MOVBload ptr mem) 34619 for { 34620 t := v.Type 34621 _ = v.Args[1] 34622 ptr := v.Args[0] 34623 mem := v.Args[1] 34624 if !(is8BitInt(t) && isSigned(t)) { 34625 break 34626 } 34627 v.reset(OpARM64MOVBload) 34628 v.AddArg(ptr) 34629 v.AddArg(mem) 34630 return true 34631 } 34632 // match: (Load <t> ptr mem) 34633 // cond: (is8BitInt(t) && !isSigned(t)) 34634 // result: (MOVBUload ptr mem) 34635 for { 34636 t := v.Type 34637 _ = v.Args[1] 34638 ptr := v.Args[0] 34639 mem := v.Args[1] 34640 if !(is8BitInt(t) && !isSigned(t)) { 34641 break 34642 } 34643 v.reset(OpARM64MOVBUload) 34644 v.AddArg(ptr) 34645 v.AddArg(mem) 34646 return true 34647 } 34648 // match: (Load <t> ptr mem) 34649 // cond: (is16BitInt(t) && isSigned(t)) 34650 // result: (MOVHload ptr mem) 34651 for { 34652 t := v.Type 34653 _ = v.Args[1] 34654 ptr := v.Args[0] 34655 mem := v.Args[1] 34656 if !(is16BitInt(t) && isSigned(t)) { 34657 break 34658 } 34659 v.reset(OpARM64MOVHload) 34660 v.AddArg(ptr) 34661 v.AddArg(mem) 34662 return true 34663 } 34664 // match: (Load <t> ptr mem) 34665 // cond: (is16BitInt(t) && !isSigned(t)) 34666 // result: (MOVHUload ptr mem) 34667 for { 34668 t := v.Type 34669 _ = v.Args[1] 34670 ptr := v.Args[0] 34671 mem := v.Args[1] 34672 if !(is16BitInt(t) && !isSigned(t)) { 34673 break 34674 } 34675 v.reset(OpARM64MOVHUload) 34676 v.AddArg(ptr) 34677 v.AddArg(mem) 34678 return true 34679 } 34680 // match: (Load <t> ptr mem) 34681 // cond: (is32BitInt(t) && isSigned(t)) 34682 // result: (MOVWload ptr mem) 34683 for { 34684 t := v.Type 34685 _ = v.Args[1] 34686 ptr := v.Args[0] 34687 mem := v.Args[1] 34688 if !(is32BitInt(t) && isSigned(t)) { 34689 break 34690 } 34691 v.reset(OpARM64MOVWload) 34692 v.AddArg(ptr) 34693 v.AddArg(mem) 34694 return true 34695 } 34696 // match: (Load <t> ptr mem) 34697 // cond: (is32BitInt(t) && !isSigned(t)) 34698 // result: (MOVWUload ptr mem) 34699 for { 34700 t := v.Type 34701 _ = v.Args[1] 34702 ptr := v.Args[0] 34703 mem := v.Args[1] 34704 if !(is32BitInt(t) && !isSigned(t)) { 34705 break 34706 } 34707 v.reset(OpARM64MOVWUload) 34708 v.AddArg(ptr) 34709 v.AddArg(mem) 34710 return true 34711 } 34712 // match: (Load <t> ptr mem) 34713 // cond: (is64BitInt(t) || isPtr(t)) 34714 // result: (MOVDload ptr mem) 34715 for { 34716 t := v.Type 34717 _ = v.Args[1] 34718 ptr := v.Args[0] 34719 mem := v.Args[1] 34720 if !(is64BitInt(t) || isPtr(t)) { 34721 break 34722 } 34723 v.reset(OpARM64MOVDload) 34724 v.AddArg(ptr) 34725 v.AddArg(mem) 34726 return true 34727 } 34728 // match: (Load <t> ptr mem) 34729 // cond: is32BitFloat(t) 34730 // result: (FMOVSload ptr mem) 34731 for { 34732 t := v.Type 34733 _ = v.Args[1] 34734 ptr := v.Args[0] 34735 mem := v.Args[1] 34736 if !(is32BitFloat(t)) { 34737 break 34738 } 34739 v.reset(OpARM64FMOVSload) 34740 v.AddArg(ptr) 34741 v.AddArg(mem) 34742 return true 34743 } 34744 // match: (Load <t> ptr mem) 34745 // cond: is64BitFloat(t) 34746 // result: (FMOVDload ptr mem) 34747 for { 34748 t := v.Type 34749 _ = v.Args[1] 34750 ptr := v.Args[0] 34751 mem := v.Args[1] 34752 if !(is64BitFloat(t)) { 34753 break 34754 } 34755 v.reset(OpARM64FMOVDload) 34756 v.AddArg(ptr) 34757 v.AddArg(mem) 34758 return true 34759 } 34760 return false 34761 } 34762 func rewriteValueARM64_OpLocalAddr_0(v *Value) bool { 34763 // match: (LocalAddr {sym} base _) 34764 // cond: 34765 // result: (MOVDaddr {sym} base) 34766 for { 34767 sym := v.Aux 34768 _ = v.Args[1] 34769 base := v.Args[0] 34770 v.reset(OpARM64MOVDaddr) 34771 v.Aux = sym 34772 v.AddArg(base) 34773 return true 34774 } 34775 } 34776 func rewriteValueARM64_OpLsh16x16_0(v *Value) bool { 34777 b := v.Block 34778 _ = b 34779 typ := &b.Func.Config.Types 34780 _ = typ 34781 // match: (Lsh16x16 <t> x y) 34782 // cond: 34783 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 34784 for { 34785 t := v.Type 34786 _ = v.Args[1] 34787 x := v.Args[0] 34788 y := v.Args[1] 34789 v.reset(OpARM64CSEL) 34790 v.Aux = OpARM64LessThanU 34791 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34792 v0.AddArg(x) 34793 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34794 v1.AddArg(y) 34795 v0.AddArg(v1) 34796 v.AddArg(v0) 34797 v2 := b.NewValue0(v.Pos, OpConst64, t) 34798 v2.AuxInt = 0 34799 v.AddArg(v2) 34800 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34801 v3.AuxInt = 64 34802 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34803 v4.AddArg(y) 34804 v3.AddArg(v4) 34805 v.AddArg(v3) 34806 return true 34807 } 34808 } 34809 func rewriteValueARM64_OpLsh16x32_0(v *Value) bool { 34810 b := v.Block 34811 _ = b 34812 typ := &b.Func.Config.Types 34813 _ = typ 34814 // match: (Lsh16x32 <t> x y) 34815 // cond: 34816 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 34817 for { 34818 t := v.Type 34819 _ = v.Args[1] 34820 x := v.Args[0] 34821 y := v.Args[1] 34822 v.reset(OpARM64CSEL) 34823 v.Aux = OpARM64LessThanU 34824 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34825 v0.AddArg(x) 34826 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34827 v1.AddArg(y) 34828 v0.AddArg(v1) 34829 v.AddArg(v0) 34830 v2 := b.NewValue0(v.Pos, OpConst64, t) 34831 v2.AuxInt = 0 34832 v.AddArg(v2) 34833 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34834 v3.AuxInt = 64 34835 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34836 v4.AddArg(y) 34837 v3.AddArg(v4) 34838 v.AddArg(v3) 34839 return true 34840 } 34841 } 34842 func rewriteValueARM64_OpLsh16x64_0(v *Value) bool { 34843 b := v.Block 34844 _ = b 34845 // match: (Lsh16x64 <t> x y) 34846 // cond: 34847 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 34848 for { 34849 t := v.Type 34850 _ = v.Args[1] 34851 x := v.Args[0] 34852 y := v.Args[1] 34853 v.reset(OpARM64CSEL) 34854 v.Aux = OpARM64LessThanU 34855 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34856 v0.AddArg(x) 34857 v0.AddArg(y) 34858 v.AddArg(v0) 34859 v1 := b.NewValue0(v.Pos, OpConst64, t) 34860 v1.AuxInt = 0 34861 v.AddArg(v1) 34862 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34863 v2.AuxInt = 64 34864 v2.AddArg(y) 34865 v.AddArg(v2) 34866 return true 34867 } 34868 } 34869 func rewriteValueARM64_OpLsh16x8_0(v *Value) bool { 34870 b := v.Block 34871 _ = b 34872 typ := &b.Func.Config.Types 34873 _ = typ 34874 // match: (Lsh16x8 <t> x y) 34875 // cond: 34876 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 34877 for { 34878 t := v.Type 34879 _ = v.Args[1] 34880 x := v.Args[0] 34881 y := v.Args[1] 34882 v.reset(OpARM64CSEL) 34883 v.Aux = OpARM64LessThanU 34884 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34885 v0.AddArg(x) 34886 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 34887 v1.AddArg(y) 34888 v0.AddArg(v1) 34889 v.AddArg(v0) 34890 v2 := b.NewValue0(v.Pos, OpConst64, t) 34891 v2.AuxInt = 0 34892 v.AddArg(v2) 34893 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34894 v3.AuxInt = 64 34895 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 34896 v4.AddArg(y) 34897 v3.AddArg(v4) 34898 v.AddArg(v3) 34899 return true 34900 } 34901 } 34902 func rewriteValueARM64_OpLsh32x16_0(v *Value) bool { 34903 b := v.Block 34904 _ = b 34905 typ := &b.Func.Config.Types 34906 _ = typ 34907 // match: (Lsh32x16 <t> x y) 34908 // cond: 34909 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 34910 for { 34911 t := v.Type 34912 _ = v.Args[1] 34913 x := v.Args[0] 34914 y := v.Args[1] 34915 v.reset(OpARM64CSEL) 34916 v.Aux = OpARM64LessThanU 34917 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34918 v0.AddArg(x) 34919 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34920 v1.AddArg(y) 34921 v0.AddArg(v1) 34922 v.AddArg(v0) 34923 v2 := b.NewValue0(v.Pos, OpConst64, t) 34924 v2.AuxInt = 0 34925 v.AddArg(v2) 34926 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34927 v3.AuxInt = 64 34928 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 34929 v4.AddArg(y) 34930 v3.AddArg(v4) 34931 v.AddArg(v3) 34932 return true 34933 } 34934 } 34935 func rewriteValueARM64_OpLsh32x32_0(v *Value) bool { 34936 b := v.Block 34937 _ = b 34938 typ := &b.Func.Config.Types 34939 _ = typ 34940 // match: (Lsh32x32 <t> x y) 34941 // cond: 34942 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 34943 for { 34944 t := v.Type 34945 _ = v.Args[1] 34946 x := v.Args[0] 34947 y := v.Args[1] 34948 v.reset(OpARM64CSEL) 34949 v.Aux = OpARM64LessThanU 34950 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34951 v0.AddArg(x) 34952 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34953 v1.AddArg(y) 34954 v0.AddArg(v1) 34955 v.AddArg(v0) 34956 v2 := b.NewValue0(v.Pos, OpConst64, t) 34957 v2.AuxInt = 0 34958 v.AddArg(v2) 34959 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34960 v3.AuxInt = 64 34961 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 34962 v4.AddArg(y) 34963 v3.AddArg(v4) 34964 v.AddArg(v3) 34965 return true 34966 } 34967 } 34968 func rewriteValueARM64_OpLsh32x64_0(v *Value) bool { 34969 b := v.Block 34970 _ = b 34971 // match: (Lsh32x64 <t> x y) 34972 // cond: 34973 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 34974 for { 34975 t := v.Type 34976 _ = v.Args[1] 34977 x := v.Args[0] 34978 y := v.Args[1] 34979 v.reset(OpARM64CSEL) 34980 v.Aux = OpARM64LessThanU 34981 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 34982 v0.AddArg(x) 34983 v0.AddArg(y) 34984 v.AddArg(v0) 34985 v1 := b.NewValue0(v.Pos, OpConst64, t) 34986 v1.AuxInt = 0 34987 v.AddArg(v1) 34988 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 34989 v2.AuxInt = 64 34990 v2.AddArg(y) 34991 v.AddArg(v2) 34992 return true 34993 } 34994 } 34995 func rewriteValueARM64_OpLsh32x8_0(v *Value) bool { 34996 b := v.Block 34997 _ = b 34998 typ := &b.Func.Config.Types 34999 _ = typ 35000 // match: (Lsh32x8 <t> x y) 35001 // cond: 35002 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 35003 for { 35004 t := v.Type 35005 _ = v.Args[1] 35006 x := v.Args[0] 35007 y := v.Args[1] 35008 v.reset(OpARM64CSEL) 35009 v.Aux = OpARM64LessThanU 35010 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35011 v0.AddArg(x) 35012 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35013 v1.AddArg(y) 35014 v0.AddArg(v1) 35015 v.AddArg(v0) 35016 v2 := b.NewValue0(v.Pos, OpConst64, t) 35017 v2.AuxInt = 0 35018 v.AddArg(v2) 35019 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35020 v3.AuxInt = 64 35021 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35022 v4.AddArg(y) 35023 v3.AddArg(v4) 35024 v.AddArg(v3) 35025 return true 35026 } 35027 } 35028 func rewriteValueARM64_OpLsh64x16_0(v *Value) bool { 35029 b := v.Block 35030 _ = b 35031 typ := &b.Func.Config.Types 35032 _ = typ 35033 // match: (Lsh64x16 <t> x y) 35034 // cond: 35035 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 35036 for { 35037 t := v.Type 35038 _ = v.Args[1] 35039 x := v.Args[0] 35040 y := v.Args[1] 35041 v.reset(OpARM64CSEL) 35042 v.Aux = OpARM64LessThanU 35043 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35044 v0.AddArg(x) 35045 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35046 v1.AddArg(y) 35047 v0.AddArg(v1) 35048 v.AddArg(v0) 35049 v2 := b.NewValue0(v.Pos, OpConst64, t) 35050 v2.AuxInt = 0 35051 v.AddArg(v2) 35052 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35053 v3.AuxInt = 64 35054 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35055 v4.AddArg(y) 35056 v3.AddArg(v4) 35057 v.AddArg(v3) 35058 return true 35059 } 35060 } 35061 func rewriteValueARM64_OpLsh64x32_0(v *Value) bool { 35062 b := v.Block 35063 _ = b 35064 typ := &b.Func.Config.Types 35065 _ = typ 35066 // match: (Lsh64x32 <t> x y) 35067 // cond: 35068 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 35069 for { 35070 t := v.Type 35071 _ = v.Args[1] 35072 x := v.Args[0] 35073 y := v.Args[1] 35074 v.reset(OpARM64CSEL) 35075 v.Aux = OpARM64LessThanU 35076 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35077 v0.AddArg(x) 35078 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35079 v1.AddArg(y) 35080 v0.AddArg(v1) 35081 v.AddArg(v0) 35082 v2 := b.NewValue0(v.Pos, OpConst64, t) 35083 v2.AuxInt = 0 35084 v.AddArg(v2) 35085 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35086 v3.AuxInt = 64 35087 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35088 v4.AddArg(y) 35089 v3.AddArg(v4) 35090 v.AddArg(v3) 35091 return true 35092 } 35093 } 35094 func rewriteValueARM64_OpLsh64x64_0(v *Value) bool { 35095 b := v.Block 35096 _ = b 35097 // match: (Lsh64x64 <t> x y) 35098 // cond: 35099 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 35100 for { 35101 t := v.Type 35102 _ = v.Args[1] 35103 x := v.Args[0] 35104 y := v.Args[1] 35105 v.reset(OpARM64CSEL) 35106 v.Aux = OpARM64LessThanU 35107 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35108 v0.AddArg(x) 35109 v0.AddArg(y) 35110 v.AddArg(v0) 35111 v1 := b.NewValue0(v.Pos, OpConst64, t) 35112 v1.AuxInt = 0 35113 v.AddArg(v1) 35114 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35115 v2.AuxInt = 64 35116 v2.AddArg(y) 35117 v.AddArg(v2) 35118 return true 35119 } 35120 } 35121 func rewriteValueARM64_OpLsh64x8_0(v *Value) bool { 35122 b := v.Block 35123 _ = b 35124 typ := &b.Func.Config.Types 35125 _ = typ 35126 // match: (Lsh64x8 <t> x y) 35127 // cond: 35128 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 35129 for { 35130 t := v.Type 35131 _ = v.Args[1] 35132 x := v.Args[0] 35133 y := v.Args[1] 35134 v.reset(OpARM64CSEL) 35135 v.Aux = OpARM64LessThanU 35136 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35137 v0.AddArg(x) 35138 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35139 v1.AddArg(y) 35140 v0.AddArg(v1) 35141 v.AddArg(v0) 35142 v2 := b.NewValue0(v.Pos, OpConst64, t) 35143 v2.AuxInt = 0 35144 v.AddArg(v2) 35145 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35146 v3.AuxInt = 64 35147 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35148 v4.AddArg(y) 35149 v3.AddArg(v4) 35150 v.AddArg(v3) 35151 return true 35152 } 35153 } 35154 func rewriteValueARM64_OpLsh8x16_0(v *Value) bool { 35155 b := v.Block 35156 _ = b 35157 typ := &b.Func.Config.Types 35158 _ = typ 35159 // match: (Lsh8x16 <t> x y) 35160 // cond: 35161 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 35162 for { 35163 t := v.Type 35164 _ = v.Args[1] 35165 x := v.Args[0] 35166 y := v.Args[1] 35167 v.reset(OpARM64CSEL) 35168 v.Aux = OpARM64LessThanU 35169 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35170 v0.AddArg(x) 35171 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35172 v1.AddArg(y) 35173 v0.AddArg(v1) 35174 v.AddArg(v0) 35175 v2 := b.NewValue0(v.Pos, OpConst64, t) 35176 v2.AuxInt = 0 35177 v.AddArg(v2) 35178 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35179 v3.AuxInt = 64 35180 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 35181 v4.AddArg(y) 35182 v3.AddArg(v4) 35183 v.AddArg(v3) 35184 return true 35185 } 35186 } 35187 func rewriteValueARM64_OpLsh8x32_0(v *Value) bool { 35188 b := v.Block 35189 _ = b 35190 typ := &b.Func.Config.Types 35191 _ = typ 35192 // match: (Lsh8x32 <t> x y) 35193 // cond: 35194 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 35195 for { 35196 t := v.Type 35197 _ = v.Args[1] 35198 x := v.Args[0] 35199 y := v.Args[1] 35200 v.reset(OpARM64CSEL) 35201 v.Aux = OpARM64LessThanU 35202 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35203 v0.AddArg(x) 35204 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35205 v1.AddArg(y) 35206 v0.AddArg(v1) 35207 v.AddArg(v0) 35208 v2 := b.NewValue0(v.Pos, OpConst64, t) 35209 v2.AuxInt = 0 35210 v.AddArg(v2) 35211 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35212 v3.AuxInt = 64 35213 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 35214 v4.AddArg(y) 35215 v3.AddArg(v4) 35216 v.AddArg(v3) 35217 return true 35218 } 35219 } 35220 func rewriteValueARM64_OpLsh8x64_0(v *Value) bool { 35221 b := v.Block 35222 _ = b 35223 // match: (Lsh8x64 <t> x y) 35224 // cond: 35225 // result: (CSEL {OpARM64LessThanU} (SLL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 35226 for { 35227 t := v.Type 35228 _ = v.Args[1] 35229 x := v.Args[0] 35230 y := v.Args[1] 35231 v.reset(OpARM64CSEL) 35232 v.Aux = OpARM64LessThanU 35233 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35234 v0.AddArg(x) 35235 v0.AddArg(y) 35236 v.AddArg(v0) 35237 v1 := b.NewValue0(v.Pos, OpConst64, t) 35238 v1.AuxInt = 0 35239 v.AddArg(v1) 35240 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35241 v2.AuxInt = 64 35242 v2.AddArg(y) 35243 v.AddArg(v2) 35244 return true 35245 } 35246 } 35247 func rewriteValueARM64_OpLsh8x8_0(v *Value) bool { 35248 b := v.Block 35249 _ = b 35250 typ := &b.Func.Config.Types 35251 _ = typ 35252 // match: (Lsh8x8 <t> x y) 35253 // cond: 35254 // result: (CSEL {OpARM64LessThanU} (SLL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 35255 for { 35256 t := v.Type 35257 _ = v.Args[1] 35258 x := v.Args[0] 35259 y := v.Args[1] 35260 v.reset(OpARM64CSEL) 35261 v.Aux = OpARM64LessThanU 35262 v0 := b.NewValue0(v.Pos, OpARM64SLL, t) 35263 v0.AddArg(x) 35264 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35265 v1.AddArg(y) 35266 v0.AddArg(v1) 35267 v.AddArg(v0) 35268 v2 := b.NewValue0(v.Pos, OpConst64, t) 35269 v2.AuxInt = 0 35270 v.AddArg(v2) 35271 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 35272 v3.AuxInt = 64 35273 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 35274 v4.AddArg(y) 35275 v3.AddArg(v4) 35276 v.AddArg(v3) 35277 return true 35278 } 35279 } 35280 func rewriteValueARM64_OpMod16_0(v *Value) bool { 35281 b := v.Block 35282 _ = b 35283 typ := &b.Func.Config.Types 35284 _ = typ 35285 // match: (Mod16 x y) 35286 // cond: 35287 // result: (MODW (SignExt16to32 x) (SignExt16to32 y)) 35288 for { 35289 _ = v.Args[1] 35290 x := v.Args[0] 35291 y := v.Args[1] 35292 v.reset(OpARM64MODW) 35293 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 35294 v0.AddArg(x) 35295 v.AddArg(v0) 35296 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 35297 v1.AddArg(y) 35298 v.AddArg(v1) 35299 return true 35300 } 35301 } 35302 func rewriteValueARM64_OpMod16u_0(v *Value) bool { 35303 b := v.Block 35304 _ = b 35305 typ := &b.Func.Config.Types 35306 _ = typ 35307 // match: (Mod16u x y) 35308 // cond: 35309 // result: (UMODW (ZeroExt16to32 x) (ZeroExt16to32 y)) 35310 for { 35311 _ = v.Args[1] 35312 x := v.Args[0] 35313 y := v.Args[1] 35314 v.reset(OpARM64UMODW) 35315 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 35316 v0.AddArg(x) 35317 v.AddArg(v0) 35318 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 35319 v1.AddArg(y) 35320 v.AddArg(v1) 35321 return true 35322 } 35323 } 35324 func rewriteValueARM64_OpMod32_0(v *Value) bool { 35325 // match: (Mod32 x y) 35326 // cond: 35327 // result: (MODW x y) 35328 for { 35329 _ = v.Args[1] 35330 x := v.Args[0] 35331 y := v.Args[1] 35332 v.reset(OpARM64MODW) 35333 v.AddArg(x) 35334 v.AddArg(y) 35335 return true 35336 } 35337 } 35338 func rewriteValueARM64_OpMod32u_0(v *Value) bool { 35339 // match: (Mod32u x y) 35340 // cond: 35341 // result: (UMODW x y) 35342 for { 35343 _ = v.Args[1] 35344 x := v.Args[0] 35345 y := v.Args[1] 35346 v.reset(OpARM64UMODW) 35347 v.AddArg(x) 35348 v.AddArg(y) 35349 return true 35350 } 35351 } 35352 func rewriteValueARM64_OpMod64_0(v *Value) bool { 35353 // match: (Mod64 x y) 35354 // cond: 35355 // result: (MOD x y) 35356 for { 35357 _ = v.Args[1] 35358 x := v.Args[0] 35359 y := v.Args[1] 35360 v.reset(OpARM64MOD) 35361 v.AddArg(x) 35362 v.AddArg(y) 35363 return true 35364 } 35365 } 35366 func rewriteValueARM64_OpMod64u_0(v *Value) bool { 35367 // match: (Mod64u x y) 35368 // cond: 35369 // result: (UMOD x y) 35370 for { 35371 _ = v.Args[1] 35372 x := v.Args[0] 35373 y := v.Args[1] 35374 v.reset(OpARM64UMOD) 35375 v.AddArg(x) 35376 v.AddArg(y) 35377 return true 35378 } 35379 } 35380 func rewriteValueARM64_OpMod8_0(v *Value) bool { 35381 b := v.Block 35382 _ = b 35383 typ := &b.Func.Config.Types 35384 _ = typ 35385 // match: (Mod8 x y) 35386 // cond: 35387 // result: (MODW (SignExt8to32 x) (SignExt8to32 y)) 35388 for { 35389 _ = v.Args[1] 35390 x := v.Args[0] 35391 y := v.Args[1] 35392 v.reset(OpARM64MODW) 35393 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 35394 v0.AddArg(x) 35395 v.AddArg(v0) 35396 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 35397 v1.AddArg(y) 35398 v.AddArg(v1) 35399 return true 35400 } 35401 } 35402 func rewriteValueARM64_OpMod8u_0(v *Value) bool { 35403 b := v.Block 35404 _ = b 35405 typ := &b.Func.Config.Types 35406 _ = typ 35407 // match: (Mod8u x y) 35408 // cond: 35409 // result: (UMODW (ZeroExt8to32 x) (ZeroExt8to32 y)) 35410 for { 35411 _ = v.Args[1] 35412 x := v.Args[0] 35413 y := v.Args[1] 35414 v.reset(OpARM64UMODW) 35415 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 35416 v0.AddArg(x) 35417 v.AddArg(v0) 35418 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 35419 v1.AddArg(y) 35420 v.AddArg(v1) 35421 return true 35422 } 35423 } 35424 func rewriteValueARM64_OpMove_0(v *Value) bool { 35425 b := v.Block 35426 _ = b 35427 typ := &b.Func.Config.Types 35428 _ = typ 35429 // match: (Move [0] _ _ mem) 35430 // cond: 35431 // result: mem 35432 for { 35433 if v.AuxInt != 0 { 35434 break 35435 } 35436 _ = v.Args[2] 35437 mem := v.Args[2] 35438 v.reset(OpCopy) 35439 v.Type = mem.Type 35440 v.AddArg(mem) 35441 return true 35442 } 35443 // match: (Move [1] dst src mem) 35444 // cond: 35445 // result: (MOVBstore dst (MOVBUload src mem) mem) 35446 for { 35447 if v.AuxInt != 1 { 35448 break 35449 } 35450 _ = v.Args[2] 35451 dst := v.Args[0] 35452 src := v.Args[1] 35453 mem := v.Args[2] 35454 v.reset(OpARM64MOVBstore) 35455 v.AddArg(dst) 35456 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35457 v0.AddArg(src) 35458 v0.AddArg(mem) 35459 v.AddArg(v0) 35460 v.AddArg(mem) 35461 return true 35462 } 35463 // match: (Move [2] dst src mem) 35464 // cond: 35465 // result: (MOVHstore dst (MOVHUload src mem) mem) 35466 for { 35467 if v.AuxInt != 2 { 35468 break 35469 } 35470 _ = v.Args[2] 35471 dst := v.Args[0] 35472 src := v.Args[1] 35473 mem := v.Args[2] 35474 v.reset(OpARM64MOVHstore) 35475 v.AddArg(dst) 35476 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35477 v0.AddArg(src) 35478 v0.AddArg(mem) 35479 v.AddArg(v0) 35480 v.AddArg(mem) 35481 return true 35482 } 35483 // match: (Move [4] dst src mem) 35484 // cond: 35485 // result: (MOVWstore dst (MOVWUload src mem) mem) 35486 for { 35487 if v.AuxInt != 4 { 35488 break 35489 } 35490 _ = v.Args[2] 35491 dst := v.Args[0] 35492 src := v.Args[1] 35493 mem := v.Args[2] 35494 v.reset(OpARM64MOVWstore) 35495 v.AddArg(dst) 35496 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35497 v0.AddArg(src) 35498 v0.AddArg(mem) 35499 v.AddArg(v0) 35500 v.AddArg(mem) 35501 return true 35502 } 35503 // match: (Move [8] dst src mem) 35504 // cond: 35505 // result: (MOVDstore dst (MOVDload src mem) mem) 35506 for { 35507 if v.AuxInt != 8 { 35508 break 35509 } 35510 _ = v.Args[2] 35511 dst := v.Args[0] 35512 src := v.Args[1] 35513 mem := v.Args[2] 35514 v.reset(OpARM64MOVDstore) 35515 v.AddArg(dst) 35516 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35517 v0.AddArg(src) 35518 v0.AddArg(mem) 35519 v.AddArg(v0) 35520 v.AddArg(mem) 35521 return true 35522 } 35523 // match: (Move [3] dst src mem) 35524 // cond: 35525 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 35526 for { 35527 if v.AuxInt != 3 { 35528 break 35529 } 35530 _ = v.Args[2] 35531 dst := v.Args[0] 35532 src := v.Args[1] 35533 mem := v.Args[2] 35534 v.reset(OpARM64MOVBstore) 35535 v.AuxInt = 2 35536 v.AddArg(dst) 35537 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35538 v0.AuxInt = 2 35539 v0.AddArg(src) 35540 v0.AddArg(mem) 35541 v.AddArg(v0) 35542 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 35543 v1.AddArg(dst) 35544 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35545 v2.AddArg(src) 35546 v2.AddArg(mem) 35547 v1.AddArg(v2) 35548 v1.AddArg(mem) 35549 v.AddArg(v1) 35550 return true 35551 } 35552 // match: (Move [5] dst src mem) 35553 // cond: 35554 // result: (MOVBstore [4] dst (MOVBUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 35555 for { 35556 if v.AuxInt != 5 { 35557 break 35558 } 35559 _ = v.Args[2] 35560 dst := v.Args[0] 35561 src := v.Args[1] 35562 mem := v.Args[2] 35563 v.reset(OpARM64MOVBstore) 35564 v.AuxInt = 4 35565 v.AddArg(dst) 35566 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35567 v0.AuxInt = 4 35568 v0.AddArg(src) 35569 v0.AddArg(mem) 35570 v.AddArg(v0) 35571 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 35572 v1.AddArg(dst) 35573 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35574 v2.AddArg(src) 35575 v2.AddArg(mem) 35576 v1.AddArg(v2) 35577 v1.AddArg(mem) 35578 v.AddArg(v1) 35579 return true 35580 } 35581 // match: (Move [6] dst src mem) 35582 // cond: 35583 // result: (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem)) 35584 for { 35585 if v.AuxInt != 6 { 35586 break 35587 } 35588 _ = v.Args[2] 35589 dst := v.Args[0] 35590 src := v.Args[1] 35591 mem := v.Args[2] 35592 v.reset(OpARM64MOVHstore) 35593 v.AuxInt = 4 35594 v.AddArg(dst) 35595 v0 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35596 v0.AuxInt = 4 35597 v0.AddArg(src) 35598 v0.AddArg(mem) 35599 v.AddArg(v0) 35600 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 35601 v1.AddArg(dst) 35602 v2 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35603 v2.AddArg(src) 35604 v2.AddArg(mem) 35605 v1.AddArg(v2) 35606 v1.AddArg(mem) 35607 v.AddArg(v1) 35608 return true 35609 } 35610 // match: (Move [7] dst src mem) 35611 // cond: 35612 // result: (MOVBstore [6] dst (MOVBUload [6] src mem) (MOVHstore [4] dst (MOVHUload [4] src mem) (MOVWstore dst (MOVWUload src mem) mem))) 35613 for { 35614 if v.AuxInt != 7 { 35615 break 35616 } 35617 _ = v.Args[2] 35618 dst := v.Args[0] 35619 src := v.Args[1] 35620 mem := v.Args[2] 35621 v.reset(OpARM64MOVBstore) 35622 v.AuxInt = 6 35623 v.AddArg(dst) 35624 v0 := b.NewValue0(v.Pos, OpARM64MOVBUload, typ.UInt8) 35625 v0.AuxInt = 6 35626 v0.AddArg(src) 35627 v0.AddArg(mem) 35628 v.AddArg(v0) 35629 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 35630 v1.AuxInt = 4 35631 v1.AddArg(dst) 35632 v2 := b.NewValue0(v.Pos, OpARM64MOVHUload, typ.UInt16) 35633 v2.AuxInt = 4 35634 v2.AddArg(src) 35635 v2.AddArg(mem) 35636 v1.AddArg(v2) 35637 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 35638 v3.AddArg(dst) 35639 v4 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35640 v4.AddArg(src) 35641 v4.AddArg(mem) 35642 v3.AddArg(v4) 35643 v3.AddArg(mem) 35644 v1.AddArg(v3) 35645 v.AddArg(v1) 35646 return true 35647 } 35648 // match: (Move [12] dst src mem) 35649 // cond: 35650 // result: (MOVWstore [8] dst (MOVWUload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 35651 for { 35652 if v.AuxInt != 12 { 35653 break 35654 } 35655 _ = v.Args[2] 35656 dst := v.Args[0] 35657 src := v.Args[1] 35658 mem := v.Args[2] 35659 v.reset(OpARM64MOVWstore) 35660 v.AuxInt = 8 35661 v.AddArg(dst) 35662 v0 := b.NewValue0(v.Pos, OpARM64MOVWUload, typ.UInt32) 35663 v0.AuxInt = 8 35664 v0.AddArg(src) 35665 v0.AddArg(mem) 35666 v.AddArg(v0) 35667 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35668 v1.AddArg(dst) 35669 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35670 v2.AddArg(src) 35671 v2.AddArg(mem) 35672 v1.AddArg(v2) 35673 v1.AddArg(mem) 35674 v.AddArg(v1) 35675 return true 35676 } 35677 return false 35678 } 35679 func rewriteValueARM64_OpMove_10(v *Value) bool { 35680 b := v.Block 35681 _ = b 35682 config := b.Func.Config 35683 _ = config 35684 typ := &b.Func.Config.Types 35685 _ = typ 35686 // match: (Move [16] dst src mem) 35687 // cond: 35688 // result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)) 35689 for { 35690 if v.AuxInt != 16 { 35691 break 35692 } 35693 _ = v.Args[2] 35694 dst := v.Args[0] 35695 src := v.Args[1] 35696 mem := v.Args[2] 35697 v.reset(OpARM64MOVDstore) 35698 v.AuxInt = 8 35699 v.AddArg(dst) 35700 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35701 v0.AuxInt = 8 35702 v0.AddArg(src) 35703 v0.AddArg(mem) 35704 v.AddArg(v0) 35705 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35706 v1.AddArg(dst) 35707 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35708 v2.AddArg(src) 35709 v2.AddArg(mem) 35710 v1.AddArg(v2) 35711 v1.AddArg(mem) 35712 v.AddArg(v1) 35713 return true 35714 } 35715 // match: (Move [24] dst src mem) 35716 // cond: 35717 // result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))) 35718 for { 35719 if v.AuxInt != 24 { 35720 break 35721 } 35722 _ = v.Args[2] 35723 dst := v.Args[0] 35724 src := v.Args[1] 35725 mem := v.Args[2] 35726 v.reset(OpARM64MOVDstore) 35727 v.AuxInt = 16 35728 v.AddArg(dst) 35729 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35730 v0.AuxInt = 16 35731 v0.AddArg(src) 35732 v0.AddArg(mem) 35733 v.AddArg(v0) 35734 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35735 v1.AuxInt = 8 35736 v1.AddArg(dst) 35737 v2 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35738 v2.AuxInt = 8 35739 v2.AddArg(src) 35740 v2.AddArg(mem) 35741 v1.AddArg(v2) 35742 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 35743 v3.AddArg(dst) 35744 v4 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35745 v4.AddArg(src) 35746 v4.AddArg(mem) 35747 v3.AddArg(v4) 35748 v3.AddArg(mem) 35749 v1.AddArg(v3) 35750 v.AddArg(v1) 35751 return true 35752 } 35753 // match: (Move [s] dst src mem) 35754 // cond: s%8 != 0 && s > 8 35755 // result: (Move [s%8] (OffPtr <dst.Type> dst [s-s%8]) (OffPtr <src.Type> src [s-s%8]) (Move [s-s%8] dst src mem)) 35756 for { 35757 s := v.AuxInt 35758 _ = v.Args[2] 35759 dst := v.Args[0] 35760 src := v.Args[1] 35761 mem := v.Args[2] 35762 if !(s%8 != 0 && s > 8) { 35763 break 35764 } 35765 v.reset(OpMove) 35766 v.AuxInt = s % 8 35767 v0 := b.NewValue0(v.Pos, OpOffPtr, dst.Type) 35768 v0.AuxInt = s - s%8 35769 v0.AddArg(dst) 35770 v.AddArg(v0) 35771 v1 := b.NewValue0(v.Pos, OpOffPtr, src.Type) 35772 v1.AuxInt = s - s%8 35773 v1.AddArg(src) 35774 v.AddArg(v1) 35775 v2 := b.NewValue0(v.Pos, OpMove, types.TypeMem) 35776 v2.AuxInt = s - s%8 35777 v2.AddArg(dst) 35778 v2.AddArg(src) 35779 v2.AddArg(mem) 35780 v.AddArg(v2) 35781 return true 35782 } 35783 // match: (Move [s] dst src mem) 35784 // cond: s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice 35785 // result: (MOVDstore [s-8] dst (MOVDload [s-8] src mem) (DUFFCOPY <types.TypeMem> [8*(64-(s-8)/16)] dst src mem)) 35786 for { 35787 s := v.AuxInt 35788 _ = v.Args[2] 35789 dst := v.Args[0] 35790 src := v.Args[1] 35791 mem := v.Args[2] 35792 if !(s > 32 && s <= 16*64 && s%16 == 8 && !config.noDuffDevice) { 35793 break 35794 } 35795 v.reset(OpARM64MOVDstore) 35796 v.AuxInt = s - 8 35797 v.AddArg(dst) 35798 v0 := b.NewValue0(v.Pos, OpARM64MOVDload, typ.UInt64) 35799 v0.AuxInt = s - 8 35800 v0.AddArg(src) 35801 v0.AddArg(mem) 35802 v.AddArg(v0) 35803 v1 := b.NewValue0(v.Pos, OpARM64DUFFCOPY, types.TypeMem) 35804 v1.AuxInt = 8 * (64 - (s-8)/16) 35805 v1.AddArg(dst) 35806 v1.AddArg(src) 35807 v1.AddArg(mem) 35808 v.AddArg(v1) 35809 return true 35810 } 35811 // match: (Move [s] dst src mem) 35812 // cond: s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice 35813 // result: (DUFFCOPY [8 * (64 - s/16)] dst src mem) 35814 for { 35815 s := v.AuxInt 35816 _ = v.Args[2] 35817 dst := v.Args[0] 35818 src := v.Args[1] 35819 mem := v.Args[2] 35820 if !(s > 32 && s <= 16*64 && s%16 == 0 && !config.noDuffDevice) { 35821 break 35822 } 35823 v.reset(OpARM64DUFFCOPY) 35824 v.AuxInt = 8 * (64 - s/16) 35825 v.AddArg(dst) 35826 v.AddArg(src) 35827 v.AddArg(mem) 35828 return true 35829 } 35830 // match: (Move [s] dst src mem) 35831 // cond: s > 24 && s%8 == 0 35832 // result: (LoweredMove dst src (ADDconst <src.Type> src [s-8]) mem) 35833 for { 35834 s := v.AuxInt 35835 _ = v.Args[2] 35836 dst := v.Args[0] 35837 src := v.Args[1] 35838 mem := v.Args[2] 35839 if !(s > 24 && s%8 == 0) { 35840 break 35841 } 35842 v.reset(OpARM64LoweredMove) 35843 v.AddArg(dst) 35844 v.AddArg(src) 35845 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, src.Type) 35846 v0.AuxInt = s - 8 35847 v0.AddArg(src) 35848 v.AddArg(v0) 35849 v.AddArg(mem) 35850 return true 35851 } 35852 return false 35853 } 35854 func rewriteValueARM64_OpMul16_0(v *Value) bool { 35855 // match: (Mul16 x y) 35856 // cond: 35857 // result: (MULW x y) 35858 for { 35859 _ = v.Args[1] 35860 x := v.Args[0] 35861 y := v.Args[1] 35862 v.reset(OpARM64MULW) 35863 v.AddArg(x) 35864 v.AddArg(y) 35865 return true 35866 } 35867 } 35868 func rewriteValueARM64_OpMul32_0(v *Value) bool { 35869 // match: (Mul32 x y) 35870 // cond: 35871 // result: (MULW x y) 35872 for { 35873 _ = v.Args[1] 35874 x := v.Args[0] 35875 y := v.Args[1] 35876 v.reset(OpARM64MULW) 35877 v.AddArg(x) 35878 v.AddArg(y) 35879 return true 35880 } 35881 } 35882 func rewriteValueARM64_OpMul32F_0(v *Value) bool { 35883 // match: (Mul32F x y) 35884 // cond: 35885 // result: (FMULS x y) 35886 for { 35887 _ = v.Args[1] 35888 x := v.Args[0] 35889 y := v.Args[1] 35890 v.reset(OpARM64FMULS) 35891 v.AddArg(x) 35892 v.AddArg(y) 35893 return true 35894 } 35895 } 35896 func rewriteValueARM64_OpMul64_0(v *Value) bool { 35897 // match: (Mul64 x y) 35898 // cond: 35899 // result: (MUL x y) 35900 for { 35901 _ = v.Args[1] 35902 x := v.Args[0] 35903 y := v.Args[1] 35904 v.reset(OpARM64MUL) 35905 v.AddArg(x) 35906 v.AddArg(y) 35907 return true 35908 } 35909 } 35910 func rewriteValueARM64_OpMul64F_0(v *Value) bool { 35911 // match: (Mul64F x y) 35912 // cond: 35913 // result: (FMULD x y) 35914 for { 35915 _ = v.Args[1] 35916 x := v.Args[0] 35917 y := v.Args[1] 35918 v.reset(OpARM64FMULD) 35919 v.AddArg(x) 35920 v.AddArg(y) 35921 return true 35922 } 35923 } 35924 func rewriteValueARM64_OpMul64uhilo_0(v *Value) bool { 35925 // match: (Mul64uhilo x y) 35926 // cond: 35927 // result: (LoweredMuluhilo x y) 35928 for { 35929 _ = v.Args[1] 35930 x := v.Args[0] 35931 y := v.Args[1] 35932 v.reset(OpARM64LoweredMuluhilo) 35933 v.AddArg(x) 35934 v.AddArg(y) 35935 return true 35936 } 35937 } 35938 func rewriteValueARM64_OpMul8_0(v *Value) bool { 35939 // match: (Mul8 x y) 35940 // cond: 35941 // result: (MULW x y) 35942 for { 35943 _ = v.Args[1] 35944 x := v.Args[0] 35945 y := v.Args[1] 35946 v.reset(OpARM64MULW) 35947 v.AddArg(x) 35948 v.AddArg(y) 35949 return true 35950 } 35951 } 35952 func rewriteValueARM64_OpNeg16_0(v *Value) bool { 35953 // match: (Neg16 x) 35954 // cond: 35955 // result: (NEG x) 35956 for { 35957 x := v.Args[0] 35958 v.reset(OpARM64NEG) 35959 v.AddArg(x) 35960 return true 35961 } 35962 } 35963 func rewriteValueARM64_OpNeg32_0(v *Value) bool { 35964 // match: (Neg32 x) 35965 // cond: 35966 // result: (NEG x) 35967 for { 35968 x := v.Args[0] 35969 v.reset(OpARM64NEG) 35970 v.AddArg(x) 35971 return true 35972 } 35973 } 35974 func rewriteValueARM64_OpNeg32F_0(v *Value) bool { 35975 // match: (Neg32F x) 35976 // cond: 35977 // result: (FNEGS x) 35978 for { 35979 x := v.Args[0] 35980 v.reset(OpARM64FNEGS) 35981 v.AddArg(x) 35982 return true 35983 } 35984 } 35985 func rewriteValueARM64_OpNeg64_0(v *Value) bool { 35986 // match: (Neg64 x) 35987 // cond: 35988 // result: (NEG x) 35989 for { 35990 x := v.Args[0] 35991 v.reset(OpARM64NEG) 35992 v.AddArg(x) 35993 return true 35994 } 35995 } 35996 func rewriteValueARM64_OpNeg64F_0(v *Value) bool { 35997 // match: (Neg64F x) 35998 // cond: 35999 // result: (FNEGD x) 36000 for { 36001 x := v.Args[0] 36002 v.reset(OpARM64FNEGD) 36003 v.AddArg(x) 36004 return true 36005 } 36006 } 36007 func rewriteValueARM64_OpNeg8_0(v *Value) bool { 36008 // match: (Neg8 x) 36009 // cond: 36010 // result: (NEG x) 36011 for { 36012 x := v.Args[0] 36013 v.reset(OpARM64NEG) 36014 v.AddArg(x) 36015 return true 36016 } 36017 } 36018 func rewriteValueARM64_OpNeq16_0(v *Value) bool { 36019 b := v.Block 36020 _ = b 36021 typ := &b.Func.Config.Types 36022 _ = typ 36023 // match: (Neq16 x y) 36024 // cond: 36025 // result: (NotEqual (CMPW (ZeroExt16to32 x) (ZeroExt16to32 y))) 36026 for { 36027 _ = v.Args[1] 36028 x := v.Args[0] 36029 y := v.Args[1] 36030 v.reset(OpARM64NotEqual) 36031 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 36032 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 36033 v1.AddArg(x) 36034 v0.AddArg(v1) 36035 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 36036 v2.AddArg(y) 36037 v0.AddArg(v2) 36038 v.AddArg(v0) 36039 return true 36040 } 36041 } 36042 func rewriteValueARM64_OpNeq32_0(v *Value) bool { 36043 b := v.Block 36044 _ = b 36045 // match: (Neq32 x y) 36046 // cond: 36047 // result: (NotEqual (CMPW x y)) 36048 for { 36049 _ = v.Args[1] 36050 x := v.Args[0] 36051 y := v.Args[1] 36052 v.reset(OpARM64NotEqual) 36053 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 36054 v0.AddArg(x) 36055 v0.AddArg(y) 36056 v.AddArg(v0) 36057 return true 36058 } 36059 } 36060 func rewriteValueARM64_OpNeq32F_0(v *Value) bool { 36061 b := v.Block 36062 _ = b 36063 // match: (Neq32F x y) 36064 // cond: 36065 // result: (NotEqual (FCMPS x y)) 36066 for { 36067 _ = v.Args[1] 36068 x := v.Args[0] 36069 y := v.Args[1] 36070 v.reset(OpARM64NotEqual) 36071 v0 := b.NewValue0(v.Pos, OpARM64FCMPS, types.TypeFlags) 36072 v0.AddArg(x) 36073 v0.AddArg(y) 36074 v.AddArg(v0) 36075 return true 36076 } 36077 } 36078 func rewriteValueARM64_OpNeq64_0(v *Value) bool { 36079 b := v.Block 36080 _ = b 36081 // match: (Neq64 x y) 36082 // cond: 36083 // result: (NotEqual (CMP x y)) 36084 for { 36085 _ = v.Args[1] 36086 x := v.Args[0] 36087 y := v.Args[1] 36088 v.reset(OpARM64NotEqual) 36089 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 36090 v0.AddArg(x) 36091 v0.AddArg(y) 36092 v.AddArg(v0) 36093 return true 36094 } 36095 } 36096 func rewriteValueARM64_OpNeq64F_0(v *Value) bool { 36097 b := v.Block 36098 _ = b 36099 // match: (Neq64F x y) 36100 // cond: 36101 // result: (NotEqual (FCMPD x y)) 36102 for { 36103 _ = v.Args[1] 36104 x := v.Args[0] 36105 y := v.Args[1] 36106 v.reset(OpARM64NotEqual) 36107 v0 := b.NewValue0(v.Pos, OpARM64FCMPD, types.TypeFlags) 36108 v0.AddArg(x) 36109 v0.AddArg(y) 36110 v.AddArg(v0) 36111 return true 36112 } 36113 } 36114 func rewriteValueARM64_OpNeq8_0(v *Value) bool { 36115 b := v.Block 36116 _ = b 36117 typ := &b.Func.Config.Types 36118 _ = typ 36119 // match: (Neq8 x y) 36120 // cond: 36121 // result: (NotEqual (CMPW (ZeroExt8to32 x) (ZeroExt8to32 y))) 36122 for { 36123 _ = v.Args[1] 36124 x := v.Args[0] 36125 y := v.Args[1] 36126 v.reset(OpARM64NotEqual) 36127 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 36128 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 36129 v1.AddArg(x) 36130 v0.AddArg(v1) 36131 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 36132 v2.AddArg(y) 36133 v0.AddArg(v2) 36134 v.AddArg(v0) 36135 return true 36136 } 36137 } 36138 func rewriteValueARM64_OpNeqB_0(v *Value) bool { 36139 // match: (NeqB x y) 36140 // cond: 36141 // result: (XOR x y) 36142 for { 36143 _ = v.Args[1] 36144 x := v.Args[0] 36145 y := v.Args[1] 36146 v.reset(OpARM64XOR) 36147 v.AddArg(x) 36148 v.AddArg(y) 36149 return true 36150 } 36151 } 36152 func rewriteValueARM64_OpNeqPtr_0(v *Value) bool { 36153 b := v.Block 36154 _ = b 36155 // match: (NeqPtr x y) 36156 // cond: 36157 // result: (NotEqual (CMP x y)) 36158 for { 36159 _ = v.Args[1] 36160 x := v.Args[0] 36161 y := v.Args[1] 36162 v.reset(OpARM64NotEqual) 36163 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 36164 v0.AddArg(x) 36165 v0.AddArg(y) 36166 v.AddArg(v0) 36167 return true 36168 } 36169 } 36170 func rewriteValueARM64_OpNilCheck_0(v *Value) bool { 36171 // match: (NilCheck ptr mem) 36172 // cond: 36173 // result: (LoweredNilCheck ptr mem) 36174 for { 36175 _ = v.Args[1] 36176 ptr := v.Args[0] 36177 mem := v.Args[1] 36178 v.reset(OpARM64LoweredNilCheck) 36179 v.AddArg(ptr) 36180 v.AddArg(mem) 36181 return true 36182 } 36183 } 36184 func rewriteValueARM64_OpNot_0(v *Value) bool { 36185 b := v.Block 36186 _ = b 36187 typ := &b.Func.Config.Types 36188 _ = typ 36189 // match: (Not x) 36190 // cond: 36191 // result: (XOR (MOVDconst [1]) x) 36192 for { 36193 x := v.Args[0] 36194 v.reset(OpARM64XOR) 36195 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 36196 v0.AuxInt = 1 36197 v.AddArg(v0) 36198 v.AddArg(x) 36199 return true 36200 } 36201 } 36202 func rewriteValueARM64_OpOffPtr_0(v *Value) bool { 36203 // match: (OffPtr [off] ptr:(SP)) 36204 // cond: 36205 // result: (MOVDaddr [off] ptr) 36206 for { 36207 off := v.AuxInt 36208 ptr := v.Args[0] 36209 if ptr.Op != OpSP { 36210 break 36211 } 36212 v.reset(OpARM64MOVDaddr) 36213 v.AuxInt = off 36214 v.AddArg(ptr) 36215 return true 36216 } 36217 // match: (OffPtr [off] ptr) 36218 // cond: 36219 // result: (ADDconst [off] ptr) 36220 for { 36221 off := v.AuxInt 36222 ptr := v.Args[0] 36223 v.reset(OpARM64ADDconst) 36224 v.AuxInt = off 36225 v.AddArg(ptr) 36226 return true 36227 } 36228 } 36229 func rewriteValueARM64_OpOr16_0(v *Value) bool { 36230 // match: (Or16 x y) 36231 // cond: 36232 // result: (OR x y) 36233 for { 36234 _ = v.Args[1] 36235 x := v.Args[0] 36236 y := v.Args[1] 36237 v.reset(OpARM64OR) 36238 v.AddArg(x) 36239 v.AddArg(y) 36240 return true 36241 } 36242 } 36243 func rewriteValueARM64_OpOr32_0(v *Value) bool { 36244 // match: (Or32 x y) 36245 // cond: 36246 // result: (OR x y) 36247 for { 36248 _ = v.Args[1] 36249 x := v.Args[0] 36250 y := v.Args[1] 36251 v.reset(OpARM64OR) 36252 v.AddArg(x) 36253 v.AddArg(y) 36254 return true 36255 } 36256 } 36257 func rewriteValueARM64_OpOr64_0(v *Value) bool { 36258 // match: (Or64 x y) 36259 // cond: 36260 // result: (OR x y) 36261 for { 36262 _ = v.Args[1] 36263 x := v.Args[0] 36264 y := v.Args[1] 36265 v.reset(OpARM64OR) 36266 v.AddArg(x) 36267 v.AddArg(y) 36268 return true 36269 } 36270 } 36271 func rewriteValueARM64_OpOr8_0(v *Value) bool { 36272 // match: (Or8 x y) 36273 // cond: 36274 // result: (OR x y) 36275 for { 36276 _ = v.Args[1] 36277 x := v.Args[0] 36278 y := v.Args[1] 36279 v.reset(OpARM64OR) 36280 v.AddArg(x) 36281 v.AddArg(y) 36282 return true 36283 } 36284 } 36285 func rewriteValueARM64_OpOrB_0(v *Value) bool { 36286 // match: (OrB x y) 36287 // cond: 36288 // result: (OR x y) 36289 for { 36290 _ = v.Args[1] 36291 x := v.Args[0] 36292 y := v.Args[1] 36293 v.reset(OpARM64OR) 36294 v.AddArg(x) 36295 v.AddArg(y) 36296 return true 36297 } 36298 } 36299 func rewriteValueARM64_OpPopCount16_0(v *Value) bool { 36300 b := v.Block 36301 _ = b 36302 typ := &b.Func.Config.Types 36303 _ = typ 36304 // match: (PopCount16 <t> x) 36305 // cond: 36306 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt16to64 x))))) 36307 for { 36308 t := v.Type 36309 x := v.Args[0] 36310 v.reset(OpARM64FMOVDfpgp) 36311 v.Type = t 36312 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 36313 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 36314 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 36315 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36316 v3.AddArg(x) 36317 v2.AddArg(v3) 36318 v1.AddArg(v2) 36319 v0.AddArg(v1) 36320 v.AddArg(v0) 36321 return true 36322 } 36323 } 36324 func rewriteValueARM64_OpPopCount32_0(v *Value) bool { 36325 b := v.Block 36326 _ = b 36327 typ := &b.Func.Config.Types 36328 _ = typ 36329 // match: (PopCount32 <t> x) 36330 // cond: 36331 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> (ZeroExt32to64 x))))) 36332 for { 36333 t := v.Type 36334 x := v.Args[0] 36335 v.reset(OpARM64FMOVDfpgp) 36336 v.Type = t 36337 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 36338 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 36339 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 36340 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36341 v3.AddArg(x) 36342 v2.AddArg(v3) 36343 v1.AddArg(v2) 36344 v0.AddArg(v1) 36345 v.AddArg(v0) 36346 return true 36347 } 36348 } 36349 func rewriteValueARM64_OpPopCount64_0(v *Value) bool { 36350 b := v.Block 36351 _ = b 36352 typ := &b.Func.Config.Types 36353 _ = typ 36354 // match: (PopCount64 <t> x) 36355 // cond: 36356 // result: (FMOVDfpgp <t> (VUADDLV <typ.Float64> (VCNT <typ.Float64> (FMOVDgpfp <typ.Float64> x)))) 36357 for { 36358 t := v.Type 36359 x := v.Args[0] 36360 v.reset(OpARM64FMOVDfpgp) 36361 v.Type = t 36362 v0 := b.NewValue0(v.Pos, OpARM64VUADDLV, typ.Float64) 36363 v1 := b.NewValue0(v.Pos, OpARM64VCNT, typ.Float64) 36364 v2 := b.NewValue0(v.Pos, OpARM64FMOVDgpfp, typ.Float64) 36365 v2.AddArg(x) 36366 v1.AddArg(v2) 36367 v0.AddArg(v1) 36368 v.AddArg(v0) 36369 return true 36370 } 36371 } 36372 func rewriteValueARM64_OpRotateLeft32_0(v *Value) bool { 36373 b := v.Block 36374 _ = b 36375 // match: (RotateLeft32 x y) 36376 // cond: 36377 // result: (RORW x (NEG <y.Type> y)) 36378 for { 36379 _ = v.Args[1] 36380 x := v.Args[0] 36381 y := v.Args[1] 36382 v.reset(OpARM64RORW) 36383 v.AddArg(x) 36384 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 36385 v0.AddArg(y) 36386 v.AddArg(v0) 36387 return true 36388 } 36389 } 36390 func rewriteValueARM64_OpRotateLeft64_0(v *Value) bool { 36391 b := v.Block 36392 _ = b 36393 // match: (RotateLeft64 x y) 36394 // cond: 36395 // result: (ROR x (NEG <y.Type> y)) 36396 for { 36397 _ = v.Args[1] 36398 x := v.Args[0] 36399 y := v.Args[1] 36400 v.reset(OpARM64ROR) 36401 v.AddArg(x) 36402 v0 := b.NewValue0(v.Pos, OpARM64NEG, y.Type) 36403 v0.AddArg(y) 36404 v.AddArg(v0) 36405 return true 36406 } 36407 } 36408 func rewriteValueARM64_OpRound_0(v *Value) bool { 36409 // match: (Round x) 36410 // cond: 36411 // result: (FRINTAD x) 36412 for { 36413 x := v.Args[0] 36414 v.reset(OpARM64FRINTAD) 36415 v.AddArg(x) 36416 return true 36417 } 36418 } 36419 func rewriteValueARM64_OpRound32F_0(v *Value) bool { 36420 // match: (Round32F x) 36421 // cond: 36422 // result: (LoweredRound32F x) 36423 for { 36424 x := v.Args[0] 36425 v.reset(OpARM64LoweredRound32F) 36426 v.AddArg(x) 36427 return true 36428 } 36429 } 36430 func rewriteValueARM64_OpRound64F_0(v *Value) bool { 36431 // match: (Round64F x) 36432 // cond: 36433 // result: (LoweredRound64F x) 36434 for { 36435 x := v.Args[0] 36436 v.reset(OpARM64LoweredRound64F) 36437 v.AddArg(x) 36438 return true 36439 } 36440 } 36441 func rewriteValueARM64_OpRoundToEven_0(v *Value) bool { 36442 // match: (RoundToEven x) 36443 // cond: 36444 // result: (FRINTND x) 36445 for { 36446 x := v.Args[0] 36447 v.reset(OpARM64FRINTND) 36448 v.AddArg(x) 36449 return true 36450 } 36451 } 36452 func rewriteValueARM64_OpRsh16Ux16_0(v *Value) bool { 36453 b := v.Block 36454 _ = b 36455 typ := &b.Func.Config.Types 36456 _ = typ 36457 // match: (Rsh16Ux16 <t> x y) 36458 // cond: 36459 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 36460 for { 36461 t := v.Type 36462 _ = v.Args[1] 36463 x := v.Args[0] 36464 y := v.Args[1] 36465 v.reset(OpARM64CSEL) 36466 v.Aux = OpARM64LessThanU 36467 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36468 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36469 v1.AddArg(x) 36470 v0.AddArg(v1) 36471 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36472 v2.AddArg(y) 36473 v0.AddArg(v2) 36474 v.AddArg(v0) 36475 v3 := b.NewValue0(v.Pos, OpConst64, t) 36476 v3.AuxInt = 0 36477 v.AddArg(v3) 36478 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36479 v4.AuxInt = 64 36480 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36481 v5.AddArg(y) 36482 v4.AddArg(v5) 36483 v.AddArg(v4) 36484 return true 36485 } 36486 } 36487 func rewriteValueARM64_OpRsh16Ux32_0(v *Value) bool { 36488 b := v.Block 36489 _ = b 36490 typ := &b.Func.Config.Types 36491 _ = typ 36492 // match: (Rsh16Ux32 <t> x y) 36493 // cond: 36494 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 36495 for { 36496 t := v.Type 36497 _ = v.Args[1] 36498 x := v.Args[0] 36499 y := v.Args[1] 36500 v.reset(OpARM64CSEL) 36501 v.Aux = OpARM64LessThanU 36502 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36503 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36504 v1.AddArg(x) 36505 v0.AddArg(v1) 36506 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36507 v2.AddArg(y) 36508 v0.AddArg(v2) 36509 v.AddArg(v0) 36510 v3 := b.NewValue0(v.Pos, OpConst64, t) 36511 v3.AuxInt = 0 36512 v.AddArg(v3) 36513 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36514 v4.AuxInt = 64 36515 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36516 v5.AddArg(y) 36517 v4.AddArg(v5) 36518 v.AddArg(v4) 36519 return true 36520 } 36521 } 36522 func rewriteValueARM64_OpRsh16Ux64_0(v *Value) bool { 36523 b := v.Block 36524 _ = b 36525 typ := &b.Func.Config.Types 36526 _ = typ 36527 // match: (Rsh16Ux64 <t> x y) 36528 // cond: 36529 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 36530 for { 36531 t := v.Type 36532 _ = v.Args[1] 36533 x := v.Args[0] 36534 y := v.Args[1] 36535 v.reset(OpARM64CSEL) 36536 v.Aux = OpARM64LessThanU 36537 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36538 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36539 v1.AddArg(x) 36540 v0.AddArg(v1) 36541 v0.AddArg(y) 36542 v.AddArg(v0) 36543 v2 := b.NewValue0(v.Pos, OpConst64, t) 36544 v2.AuxInt = 0 36545 v.AddArg(v2) 36546 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36547 v3.AuxInt = 64 36548 v3.AddArg(y) 36549 v.AddArg(v3) 36550 return true 36551 } 36552 } 36553 func rewriteValueARM64_OpRsh16Ux8_0(v *Value) bool { 36554 b := v.Block 36555 _ = b 36556 typ := &b.Func.Config.Types 36557 _ = typ 36558 // match: (Rsh16Ux8 <t> x y) 36559 // cond: 36560 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt16to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 36561 for { 36562 t := v.Type 36563 _ = v.Args[1] 36564 x := v.Args[0] 36565 y := v.Args[1] 36566 v.reset(OpARM64CSEL) 36567 v.Aux = OpARM64LessThanU 36568 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36569 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36570 v1.AddArg(x) 36571 v0.AddArg(v1) 36572 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36573 v2.AddArg(y) 36574 v0.AddArg(v2) 36575 v.AddArg(v0) 36576 v3 := b.NewValue0(v.Pos, OpConst64, t) 36577 v3.AuxInt = 0 36578 v.AddArg(v3) 36579 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36580 v4.AuxInt = 64 36581 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36582 v5.AddArg(y) 36583 v4.AddArg(v5) 36584 v.AddArg(v4) 36585 return true 36586 } 36587 } 36588 func rewriteValueARM64_OpRsh16x16_0(v *Value) bool { 36589 b := v.Block 36590 _ = b 36591 typ := &b.Func.Config.Types 36592 _ = typ 36593 // match: (Rsh16x16 x y) 36594 // cond: 36595 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 36596 for { 36597 _ = v.Args[1] 36598 x := v.Args[0] 36599 y := v.Args[1] 36600 v.reset(OpARM64SRA) 36601 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36602 v0.AddArg(x) 36603 v.AddArg(v0) 36604 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36605 v1.Aux = OpARM64LessThanU 36606 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36607 v2.AddArg(y) 36608 v1.AddArg(v2) 36609 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36610 v3.AuxInt = 63 36611 v1.AddArg(v3) 36612 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36613 v4.AuxInt = 64 36614 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36615 v5.AddArg(y) 36616 v4.AddArg(v5) 36617 v1.AddArg(v4) 36618 v.AddArg(v1) 36619 return true 36620 } 36621 } 36622 func rewriteValueARM64_OpRsh16x32_0(v *Value) bool { 36623 b := v.Block 36624 _ = b 36625 typ := &b.Func.Config.Types 36626 _ = typ 36627 // match: (Rsh16x32 x y) 36628 // cond: 36629 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 36630 for { 36631 _ = v.Args[1] 36632 x := v.Args[0] 36633 y := v.Args[1] 36634 v.reset(OpARM64SRA) 36635 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36636 v0.AddArg(x) 36637 v.AddArg(v0) 36638 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36639 v1.Aux = OpARM64LessThanU 36640 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36641 v2.AddArg(y) 36642 v1.AddArg(v2) 36643 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36644 v3.AuxInt = 63 36645 v1.AddArg(v3) 36646 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36647 v4.AuxInt = 64 36648 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36649 v5.AddArg(y) 36650 v4.AddArg(v5) 36651 v1.AddArg(v4) 36652 v.AddArg(v1) 36653 return true 36654 } 36655 } 36656 func rewriteValueARM64_OpRsh16x64_0(v *Value) bool { 36657 b := v.Block 36658 _ = b 36659 typ := &b.Func.Config.Types 36660 _ = typ 36661 // match: (Rsh16x64 x y) 36662 // cond: 36663 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 36664 for { 36665 _ = v.Args[1] 36666 x := v.Args[0] 36667 y := v.Args[1] 36668 v.reset(OpARM64SRA) 36669 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36670 v0.AddArg(x) 36671 v.AddArg(v0) 36672 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36673 v1.Aux = OpARM64LessThanU 36674 v1.AddArg(y) 36675 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 36676 v2.AuxInt = 63 36677 v1.AddArg(v2) 36678 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36679 v3.AuxInt = 64 36680 v3.AddArg(y) 36681 v1.AddArg(v3) 36682 v.AddArg(v1) 36683 return true 36684 } 36685 } 36686 func rewriteValueARM64_OpRsh16x8_0(v *Value) bool { 36687 b := v.Block 36688 _ = b 36689 typ := &b.Func.Config.Types 36690 _ = typ 36691 // match: (Rsh16x8 x y) 36692 // cond: 36693 // result: (SRA (SignExt16to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 36694 for { 36695 _ = v.Args[1] 36696 x := v.Args[0] 36697 y := v.Args[1] 36698 v.reset(OpARM64SRA) 36699 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 36700 v0.AddArg(x) 36701 v.AddArg(v0) 36702 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36703 v1.Aux = OpARM64LessThanU 36704 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36705 v2.AddArg(y) 36706 v1.AddArg(v2) 36707 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36708 v3.AuxInt = 63 36709 v1.AddArg(v3) 36710 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36711 v4.AuxInt = 64 36712 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36713 v5.AddArg(y) 36714 v4.AddArg(v5) 36715 v1.AddArg(v4) 36716 v.AddArg(v1) 36717 return true 36718 } 36719 } 36720 func rewriteValueARM64_OpRsh32Ux16_0(v *Value) bool { 36721 b := v.Block 36722 _ = b 36723 typ := &b.Func.Config.Types 36724 _ = typ 36725 // match: (Rsh32Ux16 <t> x y) 36726 // cond: 36727 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 36728 for { 36729 t := v.Type 36730 _ = v.Args[1] 36731 x := v.Args[0] 36732 y := v.Args[1] 36733 v.reset(OpARM64CSEL) 36734 v.Aux = OpARM64LessThanU 36735 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36736 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36737 v1.AddArg(x) 36738 v0.AddArg(v1) 36739 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36740 v2.AddArg(y) 36741 v0.AddArg(v2) 36742 v.AddArg(v0) 36743 v3 := b.NewValue0(v.Pos, OpConst64, t) 36744 v3.AuxInt = 0 36745 v.AddArg(v3) 36746 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36747 v4.AuxInt = 64 36748 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36749 v5.AddArg(y) 36750 v4.AddArg(v5) 36751 v.AddArg(v4) 36752 return true 36753 } 36754 } 36755 func rewriteValueARM64_OpRsh32Ux32_0(v *Value) bool { 36756 b := v.Block 36757 _ = b 36758 typ := &b.Func.Config.Types 36759 _ = typ 36760 // match: (Rsh32Ux32 <t> x y) 36761 // cond: 36762 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 36763 for { 36764 t := v.Type 36765 _ = v.Args[1] 36766 x := v.Args[0] 36767 y := v.Args[1] 36768 v.reset(OpARM64CSEL) 36769 v.Aux = OpARM64LessThanU 36770 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36771 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36772 v1.AddArg(x) 36773 v0.AddArg(v1) 36774 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36775 v2.AddArg(y) 36776 v0.AddArg(v2) 36777 v.AddArg(v0) 36778 v3 := b.NewValue0(v.Pos, OpConst64, t) 36779 v3.AuxInt = 0 36780 v.AddArg(v3) 36781 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36782 v4.AuxInt = 64 36783 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36784 v5.AddArg(y) 36785 v4.AddArg(v5) 36786 v.AddArg(v4) 36787 return true 36788 } 36789 } 36790 func rewriteValueARM64_OpRsh32Ux64_0(v *Value) bool { 36791 b := v.Block 36792 _ = b 36793 typ := &b.Func.Config.Types 36794 _ = typ 36795 // match: (Rsh32Ux64 <t> x y) 36796 // cond: 36797 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 36798 for { 36799 t := v.Type 36800 _ = v.Args[1] 36801 x := v.Args[0] 36802 y := v.Args[1] 36803 v.reset(OpARM64CSEL) 36804 v.Aux = OpARM64LessThanU 36805 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36806 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36807 v1.AddArg(x) 36808 v0.AddArg(v1) 36809 v0.AddArg(y) 36810 v.AddArg(v0) 36811 v2 := b.NewValue0(v.Pos, OpConst64, t) 36812 v2.AuxInt = 0 36813 v.AddArg(v2) 36814 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36815 v3.AuxInt = 64 36816 v3.AddArg(y) 36817 v.AddArg(v3) 36818 return true 36819 } 36820 } 36821 func rewriteValueARM64_OpRsh32Ux8_0(v *Value) bool { 36822 b := v.Block 36823 _ = b 36824 typ := &b.Func.Config.Types 36825 _ = typ 36826 // match: (Rsh32Ux8 <t> x y) 36827 // cond: 36828 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt32to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 36829 for { 36830 t := v.Type 36831 _ = v.Args[1] 36832 x := v.Args[0] 36833 y := v.Args[1] 36834 v.reset(OpARM64CSEL) 36835 v.Aux = OpARM64LessThanU 36836 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 36837 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36838 v1.AddArg(x) 36839 v0.AddArg(v1) 36840 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36841 v2.AddArg(y) 36842 v0.AddArg(v2) 36843 v.AddArg(v0) 36844 v3 := b.NewValue0(v.Pos, OpConst64, t) 36845 v3.AuxInt = 0 36846 v.AddArg(v3) 36847 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36848 v4.AuxInt = 64 36849 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36850 v5.AddArg(y) 36851 v4.AddArg(v5) 36852 v.AddArg(v4) 36853 return true 36854 } 36855 } 36856 func rewriteValueARM64_OpRsh32x16_0(v *Value) bool { 36857 b := v.Block 36858 _ = b 36859 typ := &b.Func.Config.Types 36860 _ = typ 36861 // match: (Rsh32x16 x y) 36862 // cond: 36863 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 36864 for { 36865 _ = v.Args[1] 36866 x := v.Args[0] 36867 y := v.Args[1] 36868 v.reset(OpARM64SRA) 36869 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36870 v0.AddArg(x) 36871 v.AddArg(v0) 36872 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36873 v1.Aux = OpARM64LessThanU 36874 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36875 v2.AddArg(y) 36876 v1.AddArg(v2) 36877 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36878 v3.AuxInt = 63 36879 v1.AddArg(v3) 36880 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36881 v4.AuxInt = 64 36882 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 36883 v5.AddArg(y) 36884 v4.AddArg(v5) 36885 v1.AddArg(v4) 36886 v.AddArg(v1) 36887 return true 36888 } 36889 } 36890 func rewriteValueARM64_OpRsh32x32_0(v *Value) bool { 36891 b := v.Block 36892 _ = b 36893 typ := &b.Func.Config.Types 36894 _ = typ 36895 // match: (Rsh32x32 x y) 36896 // cond: 36897 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 36898 for { 36899 _ = v.Args[1] 36900 x := v.Args[0] 36901 y := v.Args[1] 36902 v.reset(OpARM64SRA) 36903 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36904 v0.AddArg(x) 36905 v.AddArg(v0) 36906 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36907 v1.Aux = OpARM64LessThanU 36908 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36909 v2.AddArg(y) 36910 v1.AddArg(v2) 36911 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36912 v3.AuxInt = 63 36913 v1.AddArg(v3) 36914 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36915 v4.AuxInt = 64 36916 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 36917 v5.AddArg(y) 36918 v4.AddArg(v5) 36919 v1.AddArg(v4) 36920 v.AddArg(v1) 36921 return true 36922 } 36923 } 36924 func rewriteValueARM64_OpRsh32x64_0(v *Value) bool { 36925 b := v.Block 36926 _ = b 36927 typ := &b.Func.Config.Types 36928 _ = typ 36929 // match: (Rsh32x64 x y) 36930 // cond: 36931 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 36932 for { 36933 _ = v.Args[1] 36934 x := v.Args[0] 36935 y := v.Args[1] 36936 v.reset(OpARM64SRA) 36937 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36938 v0.AddArg(x) 36939 v.AddArg(v0) 36940 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36941 v1.Aux = OpARM64LessThanU 36942 v1.AddArg(y) 36943 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 36944 v2.AuxInt = 63 36945 v1.AddArg(v2) 36946 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36947 v3.AuxInt = 64 36948 v3.AddArg(y) 36949 v1.AddArg(v3) 36950 v.AddArg(v1) 36951 return true 36952 } 36953 } 36954 func rewriteValueARM64_OpRsh32x8_0(v *Value) bool { 36955 b := v.Block 36956 _ = b 36957 typ := &b.Func.Config.Types 36958 _ = typ 36959 // match: (Rsh32x8 x y) 36960 // cond: 36961 // result: (SRA (SignExt32to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 36962 for { 36963 _ = v.Args[1] 36964 x := v.Args[0] 36965 y := v.Args[1] 36966 v.reset(OpARM64SRA) 36967 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 36968 v0.AddArg(x) 36969 v.AddArg(v0) 36970 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 36971 v1.Aux = OpARM64LessThanU 36972 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36973 v2.AddArg(y) 36974 v1.AddArg(v2) 36975 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 36976 v3.AuxInt = 63 36977 v1.AddArg(v3) 36978 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 36979 v4.AuxInt = 64 36980 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 36981 v5.AddArg(y) 36982 v4.AddArg(v5) 36983 v1.AddArg(v4) 36984 v.AddArg(v1) 36985 return true 36986 } 36987 } 36988 func rewriteValueARM64_OpRsh64Ux16_0(v *Value) bool { 36989 b := v.Block 36990 _ = b 36991 typ := &b.Func.Config.Types 36992 _ = typ 36993 // match: (Rsh64Ux16 <t> x y) 36994 // cond: 36995 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 36996 for { 36997 t := v.Type 36998 _ = v.Args[1] 36999 x := v.Args[0] 37000 y := v.Args[1] 37001 v.reset(OpARM64CSEL) 37002 v.Aux = OpARM64LessThanU 37003 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37004 v0.AddArg(x) 37005 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37006 v1.AddArg(y) 37007 v0.AddArg(v1) 37008 v.AddArg(v0) 37009 v2 := b.NewValue0(v.Pos, OpConst64, t) 37010 v2.AuxInt = 0 37011 v.AddArg(v2) 37012 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37013 v3.AuxInt = 64 37014 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37015 v4.AddArg(y) 37016 v3.AddArg(v4) 37017 v.AddArg(v3) 37018 return true 37019 } 37020 } 37021 func rewriteValueARM64_OpRsh64Ux32_0(v *Value) bool { 37022 b := v.Block 37023 _ = b 37024 typ := &b.Func.Config.Types 37025 _ = typ 37026 // match: (Rsh64Ux32 <t> x y) 37027 // cond: 37028 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 37029 for { 37030 t := v.Type 37031 _ = v.Args[1] 37032 x := v.Args[0] 37033 y := v.Args[1] 37034 v.reset(OpARM64CSEL) 37035 v.Aux = OpARM64LessThanU 37036 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37037 v0.AddArg(x) 37038 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37039 v1.AddArg(y) 37040 v0.AddArg(v1) 37041 v.AddArg(v0) 37042 v2 := b.NewValue0(v.Pos, OpConst64, t) 37043 v2.AuxInt = 0 37044 v.AddArg(v2) 37045 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37046 v3.AuxInt = 64 37047 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37048 v4.AddArg(y) 37049 v3.AddArg(v4) 37050 v.AddArg(v3) 37051 return true 37052 } 37053 } 37054 func rewriteValueARM64_OpRsh64Ux64_0(v *Value) bool { 37055 b := v.Block 37056 _ = b 37057 // match: (Rsh64Ux64 <t> x y) 37058 // cond: 37059 // result: (CSEL {OpARM64LessThanU} (SRL <t> x y) (Const64 <t> [0]) (CMPconst [64] y)) 37060 for { 37061 t := v.Type 37062 _ = v.Args[1] 37063 x := v.Args[0] 37064 y := v.Args[1] 37065 v.reset(OpARM64CSEL) 37066 v.Aux = OpARM64LessThanU 37067 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37068 v0.AddArg(x) 37069 v0.AddArg(y) 37070 v.AddArg(v0) 37071 v1 := b.NewValue0(v.Pos, OpConst64, t) 37072 v1.AuxInt = 0 37073 v.AddArg(v1) 37074 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37075 v2.AuxInt = 64 37076 v2.AddArg(y) 37077 v.AddArg(v2) 37078 return true 37079 } 37080 } 37081 func rewriteValueARM64_OpRsh64Ux8_0(v *Value) bool { 37082 b := v.Block 37083 _ = b 37084 typ := &b.Func.Config.Types 37085 _ = typ 37086 // match: (Rsh64Ux8 <t> x y) 37087 // cond: 37088 // result: (CSEL {OpARM64LessThanU} (SRL <t> x (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 37089 for { 37090 t := v.Type 37091 _ = v.Args[1] 37092 x := v.Args[0] 37093 y := v.Args[1] 37094 v.reset(OpARM64CSEL) 37095 v.Aux = OpARM64LessThanU 37096 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37097 v0.AddArg(x) 37098 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37099 v1.AddArg(y) 37100 v0.AddArg(v1) 37101 v.AddArg(v0) 37102 v2 := b.NewValue0(v.Pos, OpConst64, t) 37103 v2.AuxInt = 0 37104 v.AddArg(v2) 37105 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37106 v3.AuxInt = 64 37107 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37108 v4.AddArg(y) 37109 v3.AddArg(v4) 37110 v.AddArg(v3) 37111 return true 37112 } 37113 } 37114 func rewriteValueARM64_OpRsh64x16_0(v *Value) bool { 37115 b := v.Block 37116 _ = b 37117 typ := &b.Func.Config.Types 37118 _ = typ 37119 // match: (Rsh64x16 x y) 37120 // cond: 37121 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 37122 for { 37123 _ = v.Args[1] 37124 x := v.Args[0] 37125 y := v.Args[1] 37126 v.reset(OpARM64SRA) 37127 v.AddArg(x) 37128 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37129 v0.Aux = OpARM64LessThanU 37130 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37131 v1.AddArg(y) 37132 v0.AddArg(v1) 37133 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37134 v2.AuxInt = 63 37135 v0.AddArg(v2) 37136 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37137 v3.AuxInt = 64 37138 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37139 v4.AddArg(y) 37140 v3.AddArg(v4) 37141 v0.AddArg(v3) 37142 v.AddArg(v0) 37143 return true 37144 } 37145 } 37146 func rewriteValueARM64_OpRsh64x32_0(v *Value) bool { 37147 b := v.Block 37148 _ = b 37149 typ := &b.Func.Config.Types 37150 _ = typ 37151 // match: (Rsh64x32 x y) 37152 // cond: 37153 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 37154 for { 37155 _ = v.Args[1] 37156 x := v.Args[0] 37157 y := v.Args[1] 37158 v.reset(OpARM64SRA) 37159 v.AddArg(x) 37160 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37161 v0.Aux = OpARM64LessThanU 37162 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37163 v1.AddArg(y) 37164 v0.AddArg(v1) 37165 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37166 v2.AuxInt = 63 37167 v0.AddArg(v2) 37168 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37169 v3.AuxInt = 64 37170 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37171 v4.AddArg(y) 37172 v3.AddArg(v4) 37173 v0.AddArg(v3) 37174 v.AddArg(v0) 37175 return true 37176 } 37177 } 37178 func rewriteValueARM64_OpRsh64x64_0(v *Value) bool { 37179 b := v.Block 37180 _ = b 37181 // match: (Rsh64x64 x y) 37182 // cond: 37183 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 37184 for { 37185 _ = v.Args[1] 37186 x := v.Args[0] 37187 y := v.Args[1] 37188 v.reset(OpARM64SRA) 37189 v.AddArg(x) 37190 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37191 v0.Aux = OpARM64LessThanU 37192 v0.AddArg(y) 37193 v1 := b.NewValue0(v.Pos, OpConst64, y.Type) 37194 v1.AuxInt = 63 37195 v0.AddArg(v1) 37196 v2 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37197 v2.AuxInt = 64 37198 v2.AddArg(y) 37199 v0.AddArg(v2) 37200 v.AddArg(v0) 37201 return true 37202 } 37203 } 37204 func rewriteValueARM64_OpRsh64x8_0(v *Value) bool { 37205 b := v.Block 37206 _ = b 37207 typ := &b.Func.Config.Types 37208 _ = typ 37209 // match: (Rsh64x8 x y) 37210 // cond: 37211 // result: (SRA x (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 37212 for { 37213 _ = v.Args[1] 37214 x := v.Args[0] 37215 y := v.Args[1] 37216 v.reset(OpARM64SRA) 37217 v.AddArg(x) 37218 v0 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37219 v0.Aux = OpARM64LessThanU 37220 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37221 v1.AddArg(y) 37222 v0.AddArg(v1) 37223 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37224 v2.AuxInt = 63 37225 v0.AddArg(v2) 37226 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37227 v3.AuxInt = 64 37228 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37229 v4.AddArg(y) 37230 v3.AddArg(v4) 37231 v0.AddArg(v3) 37232 v.AddArg(v0) 37233 return true 37234 } 37235 } 37236 func rewriteValueARM64_OpRsh8Ux16_0(v *Value) bool { 37237 b := v.Block 37238 _ = b 37239 typ := &b.Func.Config.Types 37240 _ = typ 37241 // match: (Rsh8Ux16 <t> x y) 37242 // cond: 37243 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt16to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt16to64 y))) 37244 for { 37245 t := v.Type 37246 _ = v.Args[1] 37247 x := v.Args[0] 37248 y := v.Args[1] 37249 v.reset(OpARM64CSEL) 37250 v.Aux = OpARM64LessThanU 37251 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37252 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37253 v1.AddArg(x) 37254 v0.AddArg(v1) 37255 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37256 v2.AddArg(y) 37257 v0.AddArg(v2) 37258 v.AddArg(v0) 37259 v3 := b.NewValue0(v.Pos, OpConst64, t) 37260 v3.AuxInt = 0 37261 v.AddArg(v3) 37262 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37263 v4.AuxInt = 64 37264 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37265 v5.AddArg(y) 37266 v4.AddArg(v5) 37267 v.AddArg(v4) 37268 return true 37269 } 37270 } 37271 func rewriteValueARM64_OpRsh8Ux32_0(v *Value) bool { 37272 b := v.Block 37273 _ = b 37274 typ := &b.Func.Config.Types 37275 _ = typ 37276 // match: (Rsh8Ux32 <t> x y) 37277 // cond: 37278 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt32to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt32to64 y))) 37279 for { 37280 t := v.Type 37281 _ = v.Args[1] 37282 x := v.Args[0] 37283 y := v.Args[1] 37284 v.reset(OpARM64CSEL) 37285 v.Aux = OpARM64LessThanU 37286 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37287 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37288 v1.AddArg(x) 37289 v0.AddArg(v1) 37290 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37291 v2.AddArg(y) 37292 v0.AddArg(v2) 37293 v.AddArg(v0) 37294 v3 := b.NewValue0(v.Pos, OpConst64, t) 37295 v3.AuxInt = 0 37296 v.AddArg(v3) 37297 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37298 v4.AuxInt = 64 37299 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37300 v5.AddArg(y) 37301 v4.AddArg(v5) 37302 v.AddArg(v4) 37303 return true 37304 } 37305 } 37306 func rewriteValueARM64_OpRsh8Ux64_0(v *Value) bool { 37307 b := v.Block 37308 _ = b 37309 typ := &b.Func.Config.Types 37310 _ = typ 37311 // match: (Rsh8Ux64 <t> x y) 37312 // cond: 37313 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) y) (Const64 <t> [0]) (CMPconst [64] y)) 37314 for { 37315 t := v.Type 37316 _ = v.Args[1] 37317 x := v.Args[0] 37318 y := v.Args[1] 37319 v.reset(OpARM64CSEL) 37320 v.Aux = OpARM64LessThanU 37321 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37322 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37323 v1.AddArg(x) 37324 v0.AddArg(v1) 37325 v0.AddArg(y) 37326 v.AddArg(v0) 37327 v2 := b.NewValue0(v.Pos, OpConst64, t) 37328 v2.AuxInt = 0 37329 v.AddArg(v2) 37330 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37331 v3.AuxInt = 64 37332 v3.AddArg(y) 37333 v.AddArg(v3) 37334 return true 37335 } 37336 } 37337 func rewriteValueARM64_OpRsh8Ux8_0(v *Value) bool { 37338 b := v.Block 37339 _ = b 37340 typ := &b.Func.Config.Types 37341 _ = typ 37342 // match: (Rsh8Ux8 <t> x y) 37343 // cond: 37344 // result: (CSEL {OpARM64LessThanU} (SRL <t> (ZeroExt8to64 x) (ZeroExt8to64 y)) (Const64 <t> [0]) (CMPconst [64] (ZeroExt8to64 y))) 37345 for { 37346 t := v.Type 37347 _ = v.Args[1] 37348 x := v.Args[0] 37349 y := v.Args[1] 37350 v.reset(OpARM64CSEL) 37351 v.Aux = OpARM64LessThanU 37352 v0 := b.NewValue0(v.Pos, OpARM64SRL, t) 37353 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37354 v1.AddArg(x) 37355 v0.AddArg(v1) 37356 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37357 v2.AddArg(y) 37358 v0.AddArg(v2) 37359 v.AddArg(v0) 37360 v3 := b.NewValue0(v.Pos, OpConst64, t) 37361 v3.AuxInt = 0 37362 v.AddArg(v3) 37363 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37364 v4.AuxInt = 64 37365 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37366 v5.AddArg(y) 37367 v4.AddArg(v5) 37368 v.AddArg(v4) 37369 return true 37370 } 37371 } 37372 func rewriteValueARM64_OpRsh8x16_0(v *Value) bool { 37373 b := v.Block 37374 _ = b 37375 typ := &b.Func.Config.Types 37376 _ = typ 37377 // match: (Rsh8x16 x y) 37378 // cond: 37379 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt16to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt16to64 y)))) 37380 for { 37381 _ = v.Args[1] 37382 x := v.Args[0] 37383 y := v.Args[1] 37384 v.reset(OpARM64SRA) 37385 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37386 v0.AddArg(x) 37387 v.AddArg(v0) 37388 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37389 v1.Aux = OpARM64LessThanU 37390 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37391 v2.AddArg(y) 37392 v1.AddArg(v2) 37393 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 37394 v3.AuxInt = 63 37395 v1.AddArg(v3) 37396 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37397 v4.AuxInt = 64 37398 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 37399 v5.AddArg(y) 37400 v4.AddArg(v5) 37401 v1.AddArg(v4) 37402 v.AddArg(v1) 37403 return true 37404 } 37405 } 37406 func rewriteValueARM64_OpRsh8x32_0(v *Value) bool { 37407 b := v.Block 37408 _ = b 37409 typ := &b.Func.Config.Types 37410 _ = typ 37411 // match: (Rsh8x32 x y) 37412 // cond: 37413 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt32to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt32to64 y)))) 37414 for { 37415 _ = v.Args[1] 37416 x := v.Args[0] 37417 y := v.Args[1] 37418 v.reset(OpARM64SRA) 37419 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37420 v0.AddArg(x) 37421 v.AddArg(v0) 37422 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37423 v1.Aux = OpARM64LessThanU 37424 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37425 v2.AddArg(y) 37426 v1.AddArg(v2) 37427 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 37428 v3.AuxInt = 63 37429 v1.AddArg(v3) 37430 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37431 v4.AuxInt = 64 37432 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 37433 v5.AddArg(y) 37434 v4.AddArg(v5) 37435 v1.AddArg(v4) 37436 v.AddArg(v1) 37437 return true 37438 } 37439 } 37440 func rewriteValueARM64_OpRsh8x64_0(v *Value) bool { 37441 b := v.Block 37442 _ = b 37443 typ := &b.Func.Config.Types 37444 _ = typ 37445 // match: (Rsh8x64 x y) 37446 // cond: 37447 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> y (Const64 <y.Type> [63]) (CMPconst [64] y))) 37448 for { 37449 _ = v.Args[1] 37450 x := v.Args[0] 37451 y := v.Args[1] 37452 v.reset(OpARM64SRA) 37453 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37454 v0.AddArg(x) 37455 v.AddArg(v0) 37456 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37457 v1.Aux = OpARM64LessThanU 37458 v1.AddArg(y) 37459 v2 := b.NewValue0(v.Pos, OpConst64, y.Type) 37460 v2.AuxInt = 63 37461 v1.AddArg(v2) 37462 v3 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37463 v3.AuxInt = 64 37464 v3.AddArg(y) 37465 v1.AddArg(v3) 37466 v.AddArg(v1) 37467 return true 37468 } 37469 } 37470 func rewriteValueARM64_OpRsh8x8_0(v *Value) bool { 37471 b := v.Block 37472 _ = b 37473 typ := &b.Func.Config.Types 37474 _ = typ 37475 // match: (Rsh8x8 x y) 37476 // cond: 37477 // result: (SRA (SignExt8to64 x) (CSEL {OpARM64LessThanU} <y.Type> (ZeroExt8to64 y) (Const64 <y.Type> [63]) (CMPconst [64] (ZeroExt8to64 y)))) 37478 for { 37479 _ = v.Args[1] 37480 x := v.Args[0] 37481 y := v.Args[1] 37482 v.reset(OpARM64SRA) 37483 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 37484 v0.AddArg(x) 37485 v.AddArg(v0) 37486 v1 := b.NewValue0(v.Pos, OpARM64CSEL, y.Type) 37487 v1.Aux = OpARM64LessThanU 37488 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37489 v2.AddArg(y) 37490 v1.AddArg(v2) 37491 v3 := b.NewValue0(v.Pos, OpConst64, y.Type) 37492 v3.AuxInt = 63 37493 v1.AddArg(v3) 37494 v4 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags) 37495 v4.AuxInt = 64 37496 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 37497 v5.AddArg(y) 37498 v4.AddArg(v5) 37499 v1.AddArg(v4) 37500 v.AddArg(v1) 37501 return true 37502 } 37503 } 37504 func rewriteValueARM64_OpSignExt16to32_0(v *Value) bool { 37505 // match: (SignExt16to32 x) 37506 // cond: 37507 // result: (MOVHreg x) 37508 for { 37509 x := v.Args[0] 37510 v.reset(OpARM64MOVHreg) 37511 v.AddArg(x) 37512 return true 37513 } 37514 } 37515 func rewriteValueARM64_OpSignExt16to64_0(v *Value) bool { 37516 // match: (SignExt16to64 x) 37517 // cond: 37518 // result: (MOVHreg x) 37519 for { 37520 x := v.Args[0] 37521 v.reset(OpARM64MOVHreg) 37522 v.AddArg(x) 37523 return true 37524 } 37525 } 37526 func rewriteValueARM64_OpSignExt32to64_0(v *Value) bool { 37527 // match: (SignExt32to64 x) 37528 // cond: 37529 // result: (MOVWreg x) 37530 for { 37531 x := v.Args[0] 37532 v.reset(OpARM64MOVWreg) 37533 v.AddArg(x) 37534 return true 37535 } 37536 } 37537 func rewriteValueARM64_OpSignExt8to16_0(v *Value) bool { 37538 // match: (SignExt8to16 x) 37539 // cond: 37540 // result: (MOVBreg x) 37541 for { 37542 x := v.Args[0] 37543 v.reset(OpARM64MOVBreg) 37544 v.AddArg(x) 37545 return true 37546 } 37547 } 37548 func rewriteValueARM64_OpSignExt8to32_0(v *Value) bool { 37549 // match: (SignExt8to32 x) 37550 // cond: 37551 // result: (MOVBreg x) 37552 for { 37553 x := v.Args[0] 37554 v.reset(OpARM64MOVBreg) 37555 v.AddArg(x) 37556 return true 37557 } 37558 } 37559 func rewriteValueARM64_OpSignExt8to64_0(v *Value) bool { 37560 // match: (SignExt8to64 x) 37561 // cond: 37562 // result: (MOVBreg x) 37563 for { 37564 x := v.Args[0] 37565 v.reset(OpARM64MOVBreg) 37566 v.AddArg(x) 37567 return true 37568 } 37569 } 37570 func rewriteValueARM64_OpSlicemask_0(v *Value) bool { 37571 b := v.Block 37572 _ = b 37573 // match: (Slicemask <t> x) 37574 // cond: 37575 // result: (SRAconst (NEG <t> x) [63]) 37576 for { 37577 t := v.Type 37578 x := v.Args[0] 37579 v.reset(OpARM64SRAconst) 37580 v.AuxInt = 63 37581 v0 := b.NewValue0(v.Pos, OpARM64NEG, t) 37582 v0.AddArg(x) 37583 v.AddArg(v0) 37584 return true 37585 } 37586 } 37587 func rewriteValueARM64_OpSqrt_0(v *Value) bool { 37588 // match: (Sqrt x) 37589 // cond: 37590 // result: (FSQRTD x) 37591 for { 37592 x := v.Args[0] 37593 v.reset(OpARM64FSQRTD) 37594 v.AddArg(x) 37595 return true 37596 } 37597 } 37598 func rewriteValueARM64_OpStaticCall_0(v *Value) bool { 37599 // match: (StaticCall [argwid] {target} mem) 37600 // cond: 37601 // result: (CALLstatic [argwid] {target} mem) 37602 for { 37603 argwid := v.AuxInt 37604 target := v.Aux 37605 mem := v.Args[0] 37606 v.reset(OpARM64CALLstatic) 37607 v.AuxInt = argwid 37608 v.Aux = target 37609 v.AddArg(mem) 37610 return true 37611 } 37612 } 37613 func rewriteValueARM64_OpStore_0(v *Value) bool { 37614 // match: (Store {t} ptr val mem) 37615 // cond: t.(*types.Type).Size() == 1 37616 // result: (MOVBstore ptr val mem) 37617 for { 37618 t := v.Aux 37619 _ = v.Args[2] 37620 ptr := v.Args[0] 37621 val := v.Args[1] 37622 mem := v.Args[2] 37623 if !(t.(*types.Type).Size() == 1) { 37624 break 37625 } 37626 v.reset(OpARM64MOVBstore) 37627 v.AddArg(ptr) 37628 v.AddArg(val) 37629 v.AddArg(mem) 37630 return true 37631 } 37632 // match: (Store {t} ptr val mem) 37633 // cond: t.(*types.Type).Size() == 2 37634 // result: (MOVHstore ptr val mem) 37635 for { 37636 t := v.Aux 37637 _ = v.Args[2] 37638 ptr := v.Args[0] 37639 val := v.Args[1] 37640 mem := v.Args[2] 37641 if !(t.(*types.Type).Size() == 2) { 37642 break 37643 } 37644 v.reset(OpARM64MOVHstore) 37645 v.AddArg(ptr) 37646 v.AddArg(val) 37647 v.AddArg(mem) 37648 return true 37649 } 37650 // match: (Store {t} ptr val mem) 37651 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 37652 // result: (MOVWstore ptr val mem) 37653 for { 37654 t := v.Aux 37655 _ = v.Args[2] 37656 ptr := v.Args[0] 37657 val := v.Args[1] 37658 mem := v.Args[2] 37659 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 37660 break 37661 } 37662 v.reset(OpARM64MOVWstore) 37663 v.AddArg(ptr) 37664 v.AddArg(val) 37665 v.AddArg(mem) 37666 return true 37667 } 37668 // match: (Store {t} ptr val mem) 37669 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 37670 // result: (MOVDstore ptr val mem) 37671 for { 37672 t := v.Aux 37673 _ = v.Args[2] 37674 ptr := v.Args[0] 37675 val := v.Args[1] 37676 mem := v.Args[2] 37677 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 37678 break 37679 } 37680 v.reset(OpARM64MOVDstore) 37681 v.AddArg(ptr) 37682 v.AddArg(val) 37683 v.AddArg(mem) 37684 return true 37685 } 37686 // match: (Store {t} ptr val mem) 37687 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 37688 // result: (FMOVSstore ptr val mem) 37689 for { 37690 t := v.Aux 37691 _ = v.Args[2] 37692 ptr := v.Args[0] 37693 val := v.Args[1] 37694 mem := v.Args[2] 37695 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 37696 break 37697 } 37698 v.reset(OpARM64FMOVSstore) 37699 v.AddArg(ptr) 37700 v.AddArg(val) 37701 v.AddArg(mem) 37702 return true 37703 } 37704 // match: (Store {t} ptr val mem) 37705 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 37706 // result: (FMOVDstore ptr val mem) 37707 for { 37708 t := v.Aux 37709 _ = v.Args[2] 37710 ptr := v.Args[0] 37711 val := v.Args[1] 37712 mem := v.Args[2] 37713 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 37714 break 37715 } 37716 v.reset(OpARM64FMOVDstore) 37717 v.AddArg(ptr) 37718 v.AddArg(val) 37719 v.AddArg(mem) 37720 return true 37721 } 37722 return false 37723 } 37724 func rewriteValueARM64_OpSub16_0(v *Value) bool { 37725 // match: (Sub16 x y) 37726 // cond: 37727 // result: (SUB x y) 37728 for { 37729 _ = v.Args[1] 37730 x := v.Args[0] 37731 y := v.Args[1] 37732 v.reset(OpARM64SUB) 37733 v.AddArg(x) 37734 v.AddArg(y) 37735 return true 37736 } 37737 } 37738 func rewriteValueARM64_OpSub32_0(v *Value) bool { 37739 // match: (Sub32 x y) 37740 // cond: 37741 // result: (SUB x y) 37742 for { 37743 _ = v.Args[1] 37744 x := v.Args[0] 37745 y := v.Args[1] 37746 v.reset(OpARM64SUB) 37747 v.AddArg(x) 37748 v.AddArg(y) 37749 return true 37750 } 37751 } 37752 func rewriteValueARM64_OpSub32F_0(v *Value) bool { 37753 // match: (Sub32F x y) 37754 // cond: 37755 // result: (FSUBS x y) 37756 for { 37757 _ = v.Args[1] 37758 x := v.Args[0] 37759 y := v.Args[1] 37760 v.reset(OpARM64FSUBS) 37761 v.AddArg(x) 37762 v.AddArg(y) 37763 return true 37764 } 37765 } 37766 func rewriteValueARM64_OpSub64_0(v *Value) bool { 37767 // match: (Sub64 x y) 37768 // cond: 37769 // result: (SUB x y) 37770 for { 37771 _ = v.Args[1] 37772 x := v.Args[0] 37773 y := v.Args[1] 37774 v.reset(OpARM64SUB) 37775 v.AddArg(x) 37776 v.AddArg(y) 37777 return true 37778 } 37779 } 37780 func rewriteValueARM64_OpSub64F_0(v *Value) bool { 37781 // match: (Sub64F x y) 37782 // cond: 37783 // result: (FSUBD x y) 37784 for { 37785 _ = v.Args[1] 37786 x := v.Args[0] 37787 y := v.Args[1] 37788 v.reset(OpARM64FSUBD) 37789 v.AddArg(x) 37790 v.AddArg(y) 37791 return true 37792 } 37793 } 37794 func rewriteValueARM64_OpSub8_0(v *Value) bool { 37795 // match: (Sub8 x y) 37796 // cond: 37797 // result: (SUB x y) 37798 for { 37799 _ = v.Args[1] 37800 x := v.Args[0] 37801 y := v.Args[1] 37802 v.reset(OpARM64SUB) 37803 v.AddArg(x) 37804 v.AddArg(y) 37805 return true 37806 } 37807 } 37808 func rewriteValueARM64_OpSubPtr_0(v *Value) bool { 37809 // match: (SubPtr x y) 37810 // cond: 37811 // result: (SUB x y) 37812 for { 37813 _ = v.Args[1] 37814 x := v.Args[0] 37815 y := v.Args[1] 37816 v.reset(OpARM64SUB) 37817 v.AddArg(x) 37818 v.AddArg(y) 37819 return true 37820 } 37821 } 37822 func rewriteValueARM64_OpTrunc_0(v *Value) bool { 37823 // match: (Trunc x) 37824 // cond: 37825 // result: (FRINTZD x) 37826 for { 37827 x := v.Args[0] 37828 v.reset(OpARM64FRINTZD) 37829 v.AddArg(x) 37830 return true 37831 } 37832 } 37833 func rewriteValueARM64_OpTrunc16to8_0(v *Value) bool { 37834 // match: (Trunc16to8 x) 37835 // cond: 37836 // result: x 37837 for { 37838 x := v.Args[0] 37839 v.reset(OpCopy) 37840 v.Type = x.Type 37841 v.AddArg(x) 37842 return true 37843 } 37844 } 37845 func rewriteValueARM64_OpTrunc32to16_0(v *Value) bool { 37846 // match: (Trunc32to16 x) 37847 // cond: 37848 // result: x 37849 for { 37850 x := v.Args[0] 37851 v.reset(OpCopy) 37852 v.Type = x.Type 37853 v.AddArg(x) 37854 return true 37855 } 37856 } 37857 func rewriteValueARM64_OpTrunc32to8_0(v *Value) bool { 37858 // match: (Trunc32to8 x) 37859 // cond: 37860 // result: x 37861 for { 37862 x := v.Args[0] 37863 v.reset(OpCopy) 37864 v.Type = x.Type 37865 v.AddArg(x) 37866 return true 37867 } 37868 } 37869 func rewriteValueARM64_OpTrunc64to16_0(v *Value) bool { 37870 // match: (Trunc64to16 x) 37871 // cond: 37872 // result: x 37873 for { 37874 x := v.Args[0] 37875 v.reset(OpCopy) 37876 v.Type = x.Type 37877 v.AddArg(x) 37878 return true 37879 } 37880 } 37881 func rewriteValueARM64_OpTrunc64to32_0(v *Value) bool { 37882 // match: (Trunc64to32 x) 37883 // cond: 37884 // result: x 37885 for { 37886 x := v.Args[0] 37887 v.reset(OpCopy) 37888 v.Type = x.Type 37889 v.AddArg(x) 37890 return true 37891 } 37892 } 37893 func rewriteValueARM64_OpTrunc64to8_0(v *Value) bool { 37894 // match: (Trunc64to8 x) 37895 // cond: 37896 // result: x 37897 for { 37898 x := v.Args[0] 37899 v.reset(OpCopy) 37900 v.Type = x.Type 37901 v.AddArg(x) 37902 return true 37903 } 37904 } 37905 func rewriteValueARM64_OpWB_0(v *Value) bool { 37906 // match: (WB {fn} destptr srcptr mem) 37907 // cond: 37908 // result: (LoweredWB {fn} destptr srcptr mem) 37909 for { 37910 fn := v.Aux 37911 _ = v.Args[2] 37912 destptr := v.Args[0] 37913 srcptr := v.Args[1] 37914 mem := v.Args[2] 37915 v.reset(OpARM64LoweredWB) 37916 v.Aux = fn 37917 v.AddArg(destptr) 37918 v.AddArg(srcptr) 37919 v.AddArg(mem) 37920 return true 37921 } 37922 } 37923 func rewriteValueARM64_OpXor16_0(v *Value) bool { 37924 // match: (Xor16 x y) 37925 // cond: 37926 // result: (XOR x y) 37927 for { 37928 _ = v.Args[1] 37929 x := v.Args[0] 37930 y := v.Args[1] 37931 v.reset(OpARM64XOR) 37932 v.AddArg(x) 37933 v.AddArg(y) 37934 return true 37935 } 37936 } 37937 func rewriteValueARM64_OpXor32_0(v *Value) bool { 37938 // match: (Xor32 x y) 37939 // cond: 37940 // result: (XOR x y) 37941 for { 37942 _ = v.Args[1] 37943 x := v.Args[0] 37944 y := v.Args[1] 37945 v.reset(OpARM64XOR) 37946 v.AddArg(x) 37947 v.AddArg(y) 37948 return true 37949 } 37950 } 37951 func rewriteValueARM64_OpXor64_0(v *Value) bool { 37952 // match: (Xor64 x y) 37953 // cond: 37954 // result: (XOR x y) 37955 for { 37956 _ = v.Args[1] 37957 x := v.Args[0] 37958 y := v.Args[1] 37959 v.reset(OpARM64XOR) 37960 v.AddArg(x) 37961 v.AddArg(y) 37962 return true 37963 } 37964 } 37965 func rewriteValueARM64_OpXor8_0(v *Value) bool { 37966 // match: (Xor8 x y) 37967 // cond: 37968 // result: (XOR x y) 37969 for { 37970 _ = v.Args[1] 37971 x := v.Args[0] 37972 y := v.Args[1] 37973 v.reset(OpARM64XOR) 37974 v.AddArg(x) 37975 v.AddArg(y) 37976 return true 37977 } 37978 } 37979 func rewriteValueARM64_OpZero_0(v *Value) bool { 37980 b := v.Block 37981 _ = b 37982 typ := &b.Func.Config.Types 37983 _ = typ 37984 // match: (Zero [0] _ mem) 37985 // cond: 37986 // result: mem 37987 for { 37988 if v.AuxInt != 0 { 37989 break 37990 } 37991 _ = v.Args[1] 37992 mem := v.Args[1] 37993 v.reset(OpCopy) 37994 v.Type = mem.Type 37995 v.AddArg(mem) 37996 return true 37997 } 37998 // match: (Zero [1] ptr mem) 37999 // cond: 38000 // result: (MOVBstore ptr (MOVDconst [0]) mem) 38001 for { 38002 if v.AuxInt != 1 { 38003 break 38004 } 38005 _ = v.Args[1] 38006 ptr := v.Args[0] 38007 mem := v.Args[1] 38008 v.reset(OpARM64MOVBstore) 38009 v.AddArg(ptr) 38010 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38011 v0.AuxInt = 0 38012 v.AddArg(v0) 38013 v.AddArg(mem) 38014 return true 38015 } 38016 // match: (Zero [2] ptr mem) 38017 // cond: 38018 // result: (MOVHstore ptr (MOVDconst [0]) mem) 38019 for { 38020 if v.AuxInt != 2 { 38021 break 38022 } 38023 _ = v.Args[1] 38024 ptr := v.Args[0] 38025 mem := v.Args[1] 38026 v.reset(OpARM64MOVHstore) 38027 v.AddArg(ptr) 38028 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38029 v0.AuxInt = 0 38030 v.AddArg(v0) 38031 v.AddArg(mem) 38032 return true 38033 } 38034 // match: (Zero [4] ptr mem) 38035 // cond: 38036 // result: (MOVWstore ptr (MOVDconst [0]) mem) 38037 for { 38038 if v.AuxInt != 4 { 38039 break 38040 } 38041 _ = v.Args[1] 38042 ptr := v.Args[0] 38043 mem := v.Args[1] 38044 v.reset(OpARM64MOVWstore) 38045 v.AddArg(ptr) 38046 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38047 v0.AuxInt = 0 38048 v.AddArg(v0) 38049 v.AddArg(mem) 38050 return true 38051 } 38052 // match: (Zero [8] ptr mem) 38053 // cond: 38054 // result: (MOVDstore ptr (MOVDconst [0]) mem) 38055 for { 38056 if v.AuxInt != 8 { 38057 break 38058 } 38059 _ = v.Args[1] 38060 ptr := v.Args[0] 38061 mem := v.Args[1] 38062 v.reset(OpARM64MOVDstore) 38063 v.AddArg(ptr) 38064 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38065 v0.AuxInt = 0 38066 v.AddArg(v0) 38067 v.AddArg(mem) 38068 return true 38069 } 38070 // match: (Zero [3] ptr mem) 38071 // cond: 38072 // result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)) 38073 for { 38074 if v.AuxInt != 3 { 38075 break 38076 } 38077 _ = v.Args[1] 38078 ptr := v.Args[0] 38079 mem := v.Args[1] 38080 v.reset(OpARM64MOVBstore) 38081 v.AuxInt = 2 38082 v.AddArg(ptr) 38083 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38084 v0.AuxInt = 0 38085 v.AddArg(v0) 38086 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38087 v1.AddArg(ptr) 38088 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38089 v2.AuxInt = 0 38090 v1.AddArg(v2) 38091 v1.AddArg(mem) 38092 v.AddArg(v1) 38093 return true 38094 } 38095 // match: (Zero [5] ptr mem) 38096 // cond: 38097 // result: (MOVBstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 38098 for { 38099 if v.AuxInt != 5 { 38100 break 38101 } 38102 _ = v.Args[1] 38103 ptr := v.Args[0] 38104 mem := v.Args[1] 38105 v.reset(OpARM64MOVBstore) 38106 v.AuxInt = 4 38107 v.AddArg(ptr) 38108 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38109 v0.AuxInt = 0 38110 v.AddArg(v0) 38111 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38112 v1.AddArg(ptr) 38113 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38114 v2.AuxInt = 0 38115 v1.AddArg(v2) 38116 v1.AddArg(mem) 38117 v.AddArg(v1) 38118 return true 38119 } 38120 // match: (Zero [6] ptr mem) 38121 // cond: 38122 // result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)) 38123 for { 38124 if v.AuxInt != 6 { 38125 break 38126 } 38127 _ = v.Args[1] 38128 ptr := v.Args[0] 38129 mem := v.Args[1] 38130 v.reset(OpARM64MOVHstore) 38131 v.AuxInt = 4 38132 v.AddArg(ptr) 38133 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38134 v0.AuxInt = 0 38135 v.AddArg(v0) 38136 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38137 v1.AddArg(ptr) 38138 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38139 v2.AuxInt = 0 38140 v1.AddArg(v2) 38141 v1.AddArg(mem) 38142 v.AddArg(v1) 38143 return true 38144 } 38145 // match: (Zero [7] ptr mem) 38146 // cond: 38147 // result: (MOVBstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))) 38148 for { 38149 if v.AuxInt != 7 { 38150 break 38151 } 38152 _ = v.Args[1] 38153 ptr := v.Args[0] 38154 mem := v.Args[1] 38155 v.reset(OpARM64MOVBstore) 38156 v.AuxInt = 6 38157 v.AddArg(ptr) 38158 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38159 v0.AuxInt = 0 38160 v.AddArg(v0) 38161 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38162 v1.AuxInt = 4 38163 v1.AddArg(ptr) 38164 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38165 v2.AuxInt = 0 38166 v1.AddArg(v2) 38167 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38168 v3.AddArg(ptr) 38169 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38170 v4.AuxInt = 0 38171 v3.AddArg(v4) 38172 v3.AddArg(mem) 38173 v1.AddArg(v3) 38174 v.AddArg(v1) 38175 return true 38176 } 38177 // match: (Zero [9] ptr mem) 38178 // cond: 38179 // result: (MOVBstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 38180 for { 38181 if v.AuxInt != 9 { 38182 break 38183 } 38184 _ = v.Args[1] 38185 ptr := v.Args[0] 38186 mem := v.Args[1] 38187 v.reset(OpARM64MOVBstore) 38188 v.AuxInt = 8 38189 v.AddArg(ptr) 38190 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38191 v0.AuxInt = 0 38192 v.AddArg(v0) 38193 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38194 v1.AddArg(ptr) 38195 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38196 v2.AuxInt = 0 38197 v1.AddArg(v2) 38198 v1.AddArg(mem) 38199 v.AddArg(v1) 38200 return true 38201 } 38202 return false 38203 } 38204 func rewriteValueARM64_OpZero_10(v *Value) bool { 38205 b := v.Block 38206 _ = b 38207 typ := &b.Func.Config.Types 38208 _ = typ 38209 // match: (Zero [10] ptr mem) 38210 // cond: 38211 // result: (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 38212 for { 38213 if v.AuxInt != 10 { 38214 break 38215 } 38216 _ = v.Args[1] 38217 ptr := v.Args[0] 38218 mem := v.Args[1] 38219 v.reset(OpARM64MOVHstore) 38220 v.AuxInt = 8 38221 v.AddArg(ptr) 38222 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38223 v0.AuxInt = 0 38224 v.AddArg(v0) 38225 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38226 v1.AddArg(ptr) 38227 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38228 v2.AuxInt = 0 38229 v1.AddArg(v2) 38230 v1.AddArg(mem) 38231 v.AddArg(v1) 38232 return true 38233 } 38234 // match: (Zero [11] ptr mem) 38235 // cond: 38236 // result: (MOVBstore [10] ptr (MOVDconst [0]) (MOVHstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 38237 for { 38238 if v.AuxInt != 11 { 38239 break 38240 } 38241 _ = v.Args[1] 38242 ptr := v.Args[0] 38243 mem := v.Args[1] 38244 v.reset(OpARM64MOVBstore) 38245 v.AuxInt = 10 38246 v.AddArg(ptr) 38247 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38248 v0.AuxInt = 0 38249 v.AddArg(v0) 38250 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38251 v1.AuxInt = 8 38252 v1.AddArg(ptr) 38253 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38254 v2.AuxInt = 0 38255 v1.AddArg(v2) 38256 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38257 v3.AddArg(ptr) 38258 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38259 v4.AuxInt = 0 38260 v3.AddArg(v4) 38261 v3.AddArg(mem) 38262 v1.AddArg(v3) 38263 v.AddArg(v1) 38264 return true 38265 } 38266 // match: (Zero [12] ptr mem) 38267 // cond: 38268 // result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)) 38269 for { 38270 if v.AuxInt != 12 { 38271 break 38272 } 38273 _ = v.Args[1] 38274 ptr := v.Args[0] 38275 mem := v.Args[1] 38276 v.reset(OpARM64MOVWstore) 38277 v.AuxInt = 8 38278 v.AddArg(ptr) 38279 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38280 v0.AuxInt = 0 38281 v.AddArg(v0) 38282 v1 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38283 v1.AddArg(ptr) 38284 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38285 v2.AuxInt = 0 38286 v1.AddArg(v2) 38287 v1.AddArg(mem) 38288 v.AddArg(v1) 38289 return true 38290 } 38291 // match: (Zero [13] ptr mem) 38292 // cond: 38293 // result: (MOVBstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 38294 for { 38295 if v.AuxInt != 13 { 38296 break 38297 } 38298 _ = v.Args[1] 38299 ptr := v.Args[0] 38300 mem := v.Args[1] 38301 v.reset(OpARM64MOVBstore) 38302 v.AuxInt = 12 38303 v.AddArg(ptr) 38304 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38305 v0.AuxInt = 0 38306 v.AddArg(v0) 38307 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38308 v1.AuxInt = 8 38309 v1.AddArg(ptr) 38310 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38311 v2.AuxInt = 0 38312 v1.AddArg(v2) 38313 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38314 v3.AddArg(ptr) 38315 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38316 v4.AuxInt = 0 38317 v3.AddArg(v4) 38318 v3.AddArg(mem) 38319 v1.AddArg(v3) 38320 v.AddArg(v1) 38321 return true 38322 } 38323 // match: (Zero [14] ptr mem) 38324 // cond: 38325 // result: (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))) 38326 for { 38327 if v.AuxInt != 14 { 38328 break 38329 } 38330 _ = v.Args[1] 38331 ptr := v.Args[0] 38332 mem := v.Args[1] 38333 v.reset(OpARM64MOVHstore) 38334 v.AuxInt = 12 38335 v.AddArg(ptr) 38336 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38337 v0.AuxInt = 0 38338 v.AddArg(v0) 38339 v1 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38340 v1.AuxInt = 8 38341 v1.AddArg(ptr) 38342 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38343 v2.AuxInt = 0 38344 v1.AddArg(v2) 38345 v3 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38346 v3.AddArg(ptr) 38347 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38348 v4.AuxInt = 0 38349 v3.AddArg(v4) 38350 v3.AddArg(mem) 38351 v1.AddArg(v3) 38352 v.AddArg(v1) 38353 return true 38354 } 38355 // match: (Zero [15] ptr mem) 38356 // cond: 38357 // result: (MOVBstore [14] ptr (MOVDconst [0]) (MOVHstore [12] ptr (MOVDconst [0]) (MOVWstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))) 38358 for { 38359 if v.AuxInt != 15 { 38360 break 38361 } 38362 _ = v.Args[1] 38363 ptr := v.Args[0] 38364 mem := v.Args[1] 38365 v.reset(OpARM64MOVBstore) 38366 v.AuxInt = 14 38367 v.AddArg(ptr) 38368 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38369 v0.AuxInt = 0 38370 v.AddArg(v0) 38371 v1 := b.NewValue0(v.Pos, OpARM64MOVHstore, types.TypeMem) 38372 v1.AuxInt = 12 38373 v1.AddArg(ptr) 38374 v2 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38375 v2.AuxInt = 0 38376 v1.AddArg(v2) 38377 v3 := b.NewValue0(v.Pos, OpARM64MOVWstore, types.TypeMem) 38378 v3.AuxInt = 8 38379 v3.AddArg(ptr) 38380 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38381 v4.AuxInt = 0 38382 v3.AddArg(v4) 38383 v5 := b.NewValue0(v.Pos, OpARM64MOVDstore, types.TypeMem) 38384 v5.AddArg(ptr) 38385 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38386 v6.AuxInt = 0 38387 v5.AddArg(v6) 38388 v5.AddArg(mem) 38389 v3.AddArg(v5) 38390 v1.AddArg(v3) 38391 v.AddArg(v1) 38392 return true 38393 } 38394 // match: (Zero [16] ptr mem) 38395 // cond: 38396 // result: (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem) 38397 for { 38398 if v.AuxInt != 16 { 38399 break 38400 } 38401 _ = v.Args[1] 38402 ptr := v.Args[0] 38403 mem := v.Args[1] 38404 v.reset(OpARM64STP) 38405 v.AuxInt = 0 38406 v.AddArg(ptr) 38407 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38408 v0.AuxInt = 0 38409 v.AddArg(v0) 38410 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38411 v1.AuxInt = 0 38412 v.AddArg(v1) 38413 v.AddArg(mem) 38414 return true 38415 } 38416 // match: (Zero [32] ptr mem) 38417 // cond: 38418 // result: (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)) 38419 for { 38420 if v.AuxInt != 32 { 38421 break 38422 } 38423 _ = v.Args[1] 38424 ptr := v.Args[0] 38425 mem := v.Args[1] 38426 v.reset(OpARM64STP) 38427 v.AuxInt = 16 38428 v.AddArg(ptr) 38429 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38430 v0.AuxInt = 0 38431 v.AddArg(v0) 38432 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38433 v1.AuxInt = 0 38434 v.AddArg(v1) 38435 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38436 v2.AuxInt = 0 38437 v2.AddArg(ptr) 38438 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38439 v3.AuxInt = 0 38440 v2.AddArg(v3) 38441 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38442 v4.AuxInt = 0 38443 v2.AddArg(v4) 38444 v2.AddArg(mem) 38445 v.AddArg(v2) 38446 return true 38447 } 38448 // match: (Zero [48] ptr mem) 38449 // cond: 38450 // result: (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem))) 38451 for { 38452 if v.AuxInt != 48 { 38453 break 38454 } 38455 _ = v.Args[1] 38456 ptr := v.Args[0] 38457 mem := v.Args[1] 38458 v.reset(OpARM64STP) 38459 v.AuxInt = 32 38460 v.AddArg(ptr) 38461 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38462 v0.AuxInt = 0 38463 v.AddArg(v0) 38464 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38465 v1.AuxInt = 0 38466 v.AddArg(v1) 38467 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38468 v2.AuxInt = 16 38469 v2.AddArg(ptr) 38470 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38471 v3.AuxInt = 0 38472 v2.AddArg(v3) 38473 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38474 v4.AuxInt = 0 38475 v2.AddArg(v4) 38476 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38477 v5.AuxInt = 0 38478 v5.AddArg(ptr) 38479 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38480 v6.AuxInt = 0 38481 v5.AddArg(v6) 38482 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38483 v7.AuxInt = 0 38484 v5.AddArg(v7) 38485 v5.AddArg(mem) 38486 v2.AddArg(v5) 38487 v.AddArg(v2) 38488 return true 38489 } 38490 // match: (Zero [64] ptr mem) 38491 // cond: 38492 // result: (STP [48] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [32] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [16] ptr (MOVDconst [0]) (MOVDconst [0]) (STP [0] ptr (MOVDconst [0]) (MOVDconst [0]) mem)))) 38493 for { 38494 if v.AuxInt != 64 { 38495 break 38496 } 38497 _ = v.Args[1] 38498 ptr := v.Args[0] 38499 mem := v.Args[1] 38500 v.reset(OpARM64STP) 38501 v.AuxInt = 48 38502 v.AddArg(ptr) 38503 v0 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38504 v0.AuxInt = 0 38505 v.AddArg(v0) 38506 v1 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38507 v1.AuxInt = 0 38508 v.AddArg(v1) 38509 v2 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38510 v2.AuxInt = 32 38511 v2.AddArg(ptr) 38512 v3 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38513 v3.AuxInt = 0 38514 v2.AddArg(v3) 38515 v4 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38516 v4.AuxInt = 0 38517 v2.AddArg(v4) 38518 v5 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38519 v5.AuxInt = 16 38520 v5.AddArg(ptr) 38521 v6 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38522 v6.AuxInt = 0 38523 v5.AddArg(v6) 38524 v7 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38525 v7.AuxInt = 0 38526 v5.AddArg(v7) 38527 v8 := b.NewValue0(v.Pos, OpARM64STP, types.TypeMem) 38528 v8.AuxInt = 0 38529 v8.AddArg(ptr) 38530 v9 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38531 v9.AuxInt = 0 38532 v8.AddArg(v9) 38533 v10 := b.NewValue0(v.Pos, OpARM64MOVDconst, typ.UInt64) 38534 v10.AuxInt = 0 38535 v8.AddArg(v10) 38536 v8.AddArg(mem) 38537 v5.AddArg(v8) 38538 v2.AddArg(v5) 38539 v.AddArg(v2) 38540 return true 38541 } 38542 return false 38543 } 38544 func rewriteValueARM64_OpZero_20(v *Value) bool { 38545 b := v.Block 38546 _ = b 38547 config := b.Func.Config 38548 _ = config 38549 // match: (Zero [s] ptr mem) 38550 // cond: s%16 != 0 && s%16 <= 8 && s > 16 38551 // result: (Zero [8] (OffPtr <ptr.Type> ptr [s-8]) (Zero [s-s%16] ptr mem)) 38552 for { 38553 s := v.AuxInt 38554 _ = v.Args[1] 38555 ptr := v.Args[0] 38556 mem := v.Args[1] 38557 if !(s%16 != 0 && s%16 <= 8 && s > 16) { 38558 break 38559 } 38560 v.reset(OpZero) 38561 v.AuxInt = 8 38562 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 38563 v0.AuxInt = s - 8 38564 v0.AddArg(ptr) 38565 v.AddArg(v0) 38566 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 38567 v1.AuxInt = s - s%16 38568 v1.AddArg(ptr) 38569 v1.AddArg(mem) 38570 v.AddArg(v1) 38571 return true 38572 } 38573 // match: (Zero [s] ptr mem) 38574 // cond: s%16 != 0 && s%16 > 8 && s > 16 38575 // result: (Zero [16] (OffPtr <ptr.Type> ptr [s-16]) (Zero [s-s%16] ptr mem)) 38576 for { 38577 s := v.AuxInt 38578 _ = v.Args[1] 38579 ptr := v.Args[0] 38580 mem := v.Args[1] 38581 if !(s%16 != 0 && s%16 > 8 && s > 16) { 38582 break 38583 } 38584 v.reset(OpZero) 38585 v.AuxInt = 16 38586 v0 := b.NewValue0(v.Pos, OpOffPtr, ptr.Type) 38587 v0.AuxInt = s - 16 38588 v0.AddArg(ptr) 38589 v.AddArg(v0) 38590 v1 := b.NewValue0(v.Pos, OpZero, types.TypeMem) 38591 v1.AuxInt = s - s%16 38592 v1.AddArg(ptr) 38593 v1.AddArg(mem) 38594 v.AddArg(v1) 38595 return true 38596 } 38597 // match: (Zero [s] ptr mem) 38598 // cond: s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice 38599 // result: (DUFFZERO [4 * (64 - s/16)] ptr mem) 38600 for { 38601 s := v.AuxInt 38602 _ = v.Args[1] 38603 ptr := v.Args[0] 38604 mem := v.Args[1] 38605 if !(s%16 == 0 && s > 64 && s <= 16*64 && !config.noDuffDevice) { 38606 break 38607 } 38608 v.reset(OpARM64DUFFZERO) 38609 v.AuxInt = 4 * (64 - s/16) 38610 v.AddArg(ptr) 38611 v.AddArg(mem) 38612 return true 38613 } 38614 // match: (Zero [s] ptr mem) 38615 // cond: s%16 == 0 && (s > 16*64 || config.noDuffDevice) 38616 // result: (LoweredZero ptr (ADDconst <ptr.Type> [s-16] ptr) mem) 38617 for { 38618 s := v.AuxInt 38619 _ = v.Args[1] 38620 ptr := v.Args[0] 38621 mem := v.Args[1] 38622 if !(s%16 == 0 && (s > 16*64 || config.noDuffDevice)) { 38623 break 38624 } 38625 v.reset(OpARM64LoweredZero) 38626 v.AddArg(ptr) 38627 v0 := b.NewValue0(v.Pos, OpARM64ADDconst, ptr.Type) 38628 v0.AuxInt = s - 16 38629 v0.AddArg(ptr) 38630 v.AddArg(v0) 38631 v.AddArg(mem) 38632 return true 38633 } 38634 return false 38635 } 38636 func rewriteValueARM64_OpZeroExt16to32_0(v *Value) bool { 38637 // match: (ZeroExt16to32 x) 38638 // cond: 38639 // result: (MOVHUreg x) 38640 for { 38641 x := v.Args[0] 38642 v.reset(OpARM64MOVHUreg) 38643 v.AddArg(x) 38644 return true 38645 } 38646 } 38647 func rewriteValueARM64_OpZeroExt16to64_0(v *Value) bool { 38648 // match: (ZeroExt16to64 x) 38649 // cond: 38650 // result: (MOVHUreg x) 38651 for { 38652 x := v.Args[0] 38653 v.reset(OpARM64MOVHUreg) 38654 v.AddArg(x) 38655 return true 38656 } 38657 } 38658 func rewriteValueARM64_OpZeroExt32to64_0(v *Value) bool { 38659 // match: (ZeroExt32to64 x) 38660 // cond: 38661 // result: (MOVWUreg x) 38662 for { 38663 x := v.Args[0] 38664 v.reset(OpARM64MOVWUreg) 38665 v.AddArg(x) 38666 return true 38667 } 38668 } 38669 func rewriteValueARM64_OpZeroExt8to16_0(v *Value) bool { 38670 // match: (ZeroExt8to16 x) 38671 // cond: 38672 // result: (MOVBUreg x) 38673 for { 38674 x := v.Args[0] 38675 v.reset(OpARM64MOVBUreg) 38676 v.AddArg(x) 38677 return true 38678 } 38679 } 38680 func rewriteValueARM64_OpZeroExt8to32_0(v *Value) bool { 38681 // match: (ZeroExt8to32 x) 38682 // cond: 38683 // result: (MOVBUreg x) 38684 for { 38685 x := v.Args[0] 38686 v.reset(OpARM64MOVBUreg) 38687 v.AddArg(x) 38688 return true 38689 } 38690 } 38691 func rewriteValueARM64_OpZeroExt8to64_0(v *Value) bool { 38692 // match: (ZeroExt8to64 x) 38693 // cond: 38694 // result: (MOVBUreg x) 38695 for { 38696 x := v.Args[0] 38697 v.reset(OpARM64MOVBUreg) 38698 v.AddArg(x) 38699 return true 38700 } 38701 } 38702 func rewriteBlockARM64(b *Block) bool { 38703 config := b.Func.Config 38704 _ = config 38705 fe := b.Func.fe 38706 _ = fe 38707 typ := &config.Types 38708 _ = typ 38709 switch b.Kind { 38710 case BlockARM64EQ: 38711 // match: (EQ (CMPWconst [0] x:(ANDconst [c] y)) yes no) 38712 // cond: x.Uses == 1 38713 // result: (EQ (TSTWconst [c] y) yes no) 38714 for { 38715 v := b.Control 38716 if v.Op != OpARM64CMPWconst { 38717 break 38718 } 38719 if v.AuxInt != 0 { 38720 break 38721 } 38722 x := v.Args[0] 38723 if x.Op != OpARM64ANDconst { 38724 break 38725 } 38726 c := x.AuxInt 38727 y := x.Args[0] 38728 if !(x.Uses == 1) { 38729 break 38730 } 38731 b.Kind = BlockARM64EQ 38732 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 38733 v0.AuxInt = c 38734 v0.AddArg(y) 38735 b.SetControl(v0) 38736 b.Aux = nil 38737 return true 38738 } 38739 // match: (EQ (CMPconst [0] z:(AND x y)) yes no) 38740 // cond: z.Uses == 1 38741 // result: (EQ (TST x y) yes no) 38742 for { 38743 v := b.Control 38744 if v.Op != OpARM64CMPconst { 38745 break 38746 } 38747 if v.AuxInt != 0 { 38748 break 38749 } 38750 z := v.Args[0] 38751 if z.Op != OpARM64AND { 38752 break 38753 } 38754 _ = z.Args[1] 38755 x := z.Args[0] 38756 y := z.Args[1] 38757 if !(z.Uses == 1) { 38758 break 38759 } 38760 b.Kind = BlockARM64EQ 38761 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 38762 v0.AddArg(x) 38763 v0.AddArg(y) 38764 b.SetControl(v0) 38765 b.Aux = nil 38766 return true 38767 } 38768 // match: (EQ (CMPWconst [0] z:(AND x y)) yes no) 38769 // cond: z.Uses == 1 38770 // result: (EQ (TSTW x y) yes no) 38771 for { 38772 v := b.Control 38773 if v.Op != OpARM64CMPWconst { 38774 break 38775 } 38776 if v.AuxInt != 0 { 38777 break 38778 } 38779 z := v.Args[0] 38780 if z.Op != OpARM64AND { 38781 break 38782 } 38783 _ = z.Args[1] 38784 x := z.Args[0] 38785 y := z.Args[1] 38786 if !(z.Uses == 1) { 38787 break 38788 } 38789 b.Kind = BlockARM64EQ 38790 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 38791 v0.AddArg(x) 38792 v0.AddArg(y) 38793 b.SetControl(v0) 38794 b.Aux = nil 38795 return true 38796 } 38797 // match: (EQ (CMPconst [0] x:(ANDconst [c] y)) yes no) 38798 // cond: x.Uses == 1 38799 // result: (EQ (TSTconst [c] y) yes no) 38800 for { 38801 v := b.Control 38802 if v.Op != OpARM64CMPconst { 38803 break 38804 } 38805 if v.AuxInt != 0 { 38806 break 38807 } 38808 x := v.Args[0] 38809 if x.Op != OpARM64ANDconst { 38810 break 38811 } 38812 c := x.AuxInt 38813 y := x.Args[0] 38814 if !(x.Uses == 1) { 38815 break 38816 } 38817 b.Kind = BlockARM64EQ 38818 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 38819 v0.AuxInt = c 38820 v0.AddArg(y) 38821 b.SetControl(v0) 38822 b.Aux = nil 38823 return true 38824 } 38825 // match: (EQ (CMPconst [0] x:(ADDconst [c] y)) yes no) 38826 // cond: x.Uses == 1 38827 // result: (EQ (CMNconst [c] y) yes no) 38828 for { 38829 v := b.Control 38830 if v.Op != OpARM64CMPconst { 38831 break 38832 } 38833 if v.AuxInt != 0 { 38834 break 38835 } 38836 x := v.Args[0] 38837 if x.Op != OpARM64ADDconst { 38838 break 38839 } 38840 c := x.AuxInt 38841 y := x.Args[0] 38842 if !(x.Uses == 1) { 38843 break 38844 } 38845 b.Kind = BlockARM64EQ 38846 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 38847 v0.AuxInt = c 38848 v0.AddArg(y) 38849 b.SetControl(v0) 38850 b.Aux = nil 38851 return true 38852 } 38853 // match: (EQ (CMPWconst [0] x:(ADDconst [c] y)) yes no) 38854 // cond: x.Uses == 1 38855 // result: (EQ (CMNWconst [c] y) yes no) 38856 for { 38857 v := b.Control 38858 if v.Op != OpARM64CMPWconst { 38859 break 38860 } 38861 if v.AuxInt != 0 { 38862 break 38863 } 38864 x := v.Args[0] 38865 if x.Op != OpARM64ADDconst { 38866 break 38867 } 38868 c := x.AuxInt 38869 y := x.Args[0] 38870 if !(x.Uses == 1) { 38871 break 38872 } 38873 b.Kind = BlockARM64EQ 38874 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 38875 v0.AuxInt = c 38876 v0.AddArg(y) 38877 b.SetControl(v0) 38878 b.Aux = nil 38879 return true 38880 } 38881 // match: (EQ (CMPconst [0] z:(ADD x y)) yes no) 38882 // cond: z.Uses == 1 38883 // result: (EQ (CMN x y) yes no) 38884 for { 38885 v := b.Control 38886 if v.Op != OpARM64CMPconst { 38887 break 38888 } 38889 if v.AuxInt != 0 { 38890 break 38891 } 38892 z := v.Args[0] 38893 if z.Op != OpARM64ADD { 38894 break 38895 } 38896 _ = z.Args[1] 38897 x := z.Args[0] 38898 y := z.Args[1] 38899 if !(z.Uses == 1) { 38900 break 38901 } 38902 b.Kind = BlockARM64EQ 38903 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 38904 v0.AddArg(x) 38905 v0.AddArg(y) 38906 b.SetControl(v0) 38907 b.Aux = nil 38908 return true 38909 } 38910 // match: (EQ (CMPWconst [0] z:(ADD x y)) yes no) 38911 // cond: z.Uses == 1 38912 // result: (EQ (CMNW x y) yes no) 38913 for { 38914 v := b.Control 38915 if v.Op != OpARM64CMPWconst { 38916 break 38917 } 38918 if v.AuxInt != 0 { 38919 break 38920 } 38921 z := v.Args[0] 38922 if z.Op != OpARM64ADD { 38923 break 38924 } 38925 _ = z.Args[1] 38926 x := z.Args[0] 38927 y := z.Args[1] 38928 if !(z.Uses == 1) { 38929 break 38930 } 38931 b.Kind = BlockARM64EQ 38932 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 38933 v0.AddArg(x) 38934 v0.AddArg(y) 38935 b.SetControl(v0) 38936 b.Aux = nil 38937 return true 38938 } 38939 // match: (EQ (CMP x z:(NEG y)) yes no) 38940 // cond: z.Uses == 1 38941 // result: (EQ (CMN x y) yes no) 38942 for { 38943 v := b.Control 38944 if v.Op != OpARM64CMP { 38945 break 38946 } 38947 _ = v.Args[1] 38948 x := v.Args[0] 38949 z := v.Args[1] 38950 if z.Op != OpARM64NEG { 38951 break 38952 } 38953 y := z.Args[0] 38954 if !(z.Uses == 1) { 38955 break 38956 } 38957 b.Kind = BlockARM64EQ 38958 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 38959 v0.AddArg(x) 38960 v0.AddArg(y) 38961 b.SetControl(v0) 38962 b.Aux = nil 38963 return true 38964 } 38965 // match: (EQ (CMPW x z:(NEG y)) yes no) 38966 // cond: z.Uses == 1 38967 // result: (EQ (CMNW x y) yes no) 38968 for { 38969 v := b.Control 38970 if v.Op != OpARM64CMPW { 38971 break 38972 } 38973 _ = v.Args[1] 38974 x := v.Args[0] 38975 z := v.Args[1] 38976 if z.Op != OpARM64NEG { 38977 break 38978 } 38979 y := z.Args[0] 38980 if !(z.Uses == 1) { 38981 break 38982 } 38983 b.Kind = BlockARM64EQ 38984 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 38985 v0.AddArg(x) 38986 v0.AddArg(y) 38987 b.SetControl(v0) 38988 b.Aux = nil 38989 return true 38990 } 38991 // match: (EQ (CMPconst [0] x) yes no) 38992 // cond: 38993 // result: (Z x yes no) 38994 for { 38995 v := b.Control 38996 if v.Op != OpARM64CMPconst { 38997 break 38998 } 38999 if v.AuxInt != 0 { 39000 break 39001 } 39002 x := v.Args[0] 39003 b.Kind = BlockARM64Z 39004 b.SetControl(x) 39005 b.Aux = nil 39006 return true 39007 } 39008 // match: (EQ (CMPWconst [0] x) yes no) 39009 // cond: 39010 // result: (ZW x yes no) 39011 for { 39012 v := b.Control 39013 if v.Op != OpARM64CMPWconst { 39014 break 39015 } 39016 if v.AuxInt != 0 { 39017 break 39018 } 39019 x := v.Args[0] 39020 b.Kind = BlockARM64ZW 39021 b.SetControl(x) 39022 b.Aux = nil 39023 return true 39024 } 39025 // match: (EQ (CMPconst [0] z:(MADD a x y)) yes no) 39026 // cond: z.Uses==1 39027 // result: (EQ (CMN a (MUL <x.Type> x y)) yes no) 39028 for { 39029 v := b.Control 39030 if v.Op != OpARM64CMPconst { 39031 break 39032 } 39033 if v.AuxInt != 0 { 39034 break 39035 } 39036 z := v.Args[0] 39037 if z.Op != OpARM64MADD { 39038 break 39039 } 39040 _ = z.Args[2] 39041 a := z.Args[0] 39042 x := z.Args[1] 39043 y := z.Args[2] 39044 if !(z.Uses == 1) { 39045 break 39046 } 39047 b.Kind = BlockARM64EQ 39048 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39049 v0.AddArg(a) 39050 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39051 v1.AddArg(x) 39052 v1.AddArg(y) 39053 v0.AddArg(v1) 39054 b.SetControl(v0) 39055 b.Aux = nil 39056 return true 39057 } 39058 // match: (EQ (CMPconst [0] z:(MSUB a x y)) yes no) 39059 // cond: z.Uses==1 39060 // result: (EQ (CMP a (MUL <x.Type> x y)) yes no) 39061 for { 39062 v := b.Control 39063 if v.Op != OpARM64CMPconst { 39064 break 39065 } 39066 if v.AuxInt != 0 { 39067 break 39068 } 39069 z := v.Args[0] 39070 if z.Op != OpARM64MSUB { 39071 break 39072 } 39073 _ = z.Args[2] 39074 a := z.Args[0] 39075 x := z.Args[1] 39076 y := z.Args[2] 39077 if !(z.Uses == 1) { 39078 break 39079 } 39080 b.Kind = BlockARM64EQ 39081 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 39082 v0.AddArg(a) 39083 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39084 v1.AddArg(x) 39085 v1.AddArg(y) 39086 v0.AddArg(v1) 39087 b.SetControl(v0) 39088 b.Aux = nil 39089 return true 39090 } 39091 // match: (EQ (CMPWconst [0] z:(MADDW a x y)) yes no) 39092 // cond: z.Uses==1 39093 // result: (EQ (CMNW a (MULW <x.Type> x y)) yes no) 39094 for { 39095 v := b.Control 39096 if v.Op != OpARM64CMPWconst { 39097 break 39098 } 39099 if v.AuxInt != 0 { 39100 break 39101 } 39102 z := v.Args[0] 39103 if z.Op != OpARM64MADDW { 39104 break 39105 } 39106 _ = z.Args[2] 39107 a := z.Args[0] 39108 x := z.Args[1] 39109 y := z.Args[2] 39110 if !(z.Uses == 1) { 39111 break 39112 } 39113 b.Kind = BlockARM64EQ 39114 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39115 v0.AddArg(a) 39116 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39117 v1.AddArg(x) 39118 v1.AddArg(y) 39119 v0.AddArg(v1) 39120 b.SetControl(v0) 39121 b.Aux = nil 39122 return true 39123 } 39124 // match: (EQ (CMPWconst [0] z:(MSUBW a x y)) yes no) 39125 // cond: z.Uses==1 39126 // result: (EQ (CMPW a (MULW <x.Type> x y)) yes no) 39127 for { 39128 v := b.Control 39129 if v.Op != OpARM64CMPWconst { 39130 break 39131 } 39132 if v.AuxInt != 0 { 39133 break 39134 } 39135 z := v.Args[0] 39136 if z.Op != OpARM64MSUBW { 39137 break 39138 } 39139 _ = z.Args[2] 39140 a := z.Args[0] 39141 x := z.Args[1] 39142 y := z.Args[2] 39143 if !(z.Uses == 1) { 39144 break 39145 } 39146 b.Kind = BlockARM64EQ 39147 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 39148 v0.AddArg(a) 39149 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39150 v1.AddArg(x) 39151 v1.AddArg(y) 39152 v0.AddArg(v1) 39153 b.SetControl(v0) 39154 b.Aux = nil 39155 return true 39156 } 39157 // match: (EQ (TSTconst [c] x) yes no) 39158 // cond: oneBit(c) 39159 // result: (TBZ {ntz(c)} x yes no) 39160 for { 39161 v := b.Control 39162 if v.Op != OpARM64TSTconst { 39163 break 39164 } 39165 c := v.AuxInt 39166 x := v.Args[0] 39167 if !(oneBit(c)) { 39168 break 39169 } 39170 b.Kind = BlockARM64TBZ 39171 b.SetControl(x) 39172 b.Aux = ntz(c) 39173 return true 39174 } 39175 // match: (EQ (TSTWconst [c] x) yes no) 39176 // cond: oneBit(int64(uint32(c))) 39177 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 39178 for { 39179 v := b.Control 39180 if v.Op != OpARM64TSTWconst { 39181 break 39182 } 39183 c := v.AuxInt 39184 x := v.Args[0] 39185 if !(oneBit(int64(uint32(c)))) { 39186 break 39187 } 39188 b.Kind = BlockARM64TBZ 39189 b.SetControl(x) 39190 b.Aux = ntz(int64(uint32(c))) 39191 return true 39192 } 39193 // match: (EQ (FlagEQ) yes no) 39194 // cond: 39195 // result: (First nil yes no) 39196 for { 39197 v := b.Control 39198 if v.Op != OpARM64FlagEQ { 39199 break 39200 } 39201 b.Kind = BlockFirst 39202 b.SetControl(nil) 39203 b.Aux = nil 39204 return true 39205 } 39206 // match: (EQ (FlagLT_ULT) yes no) 39207 // cond: 39208 // result: (First nil no yes) 39209 for { 39210 v := b.Control 39211 if v.Op != OpARM64FlagLT_ULT { 39212 break 39213 } 39214 b.Kind = BlockFirst 39215 b.SetControl(nil) 39216 b.Aux = nil 39217 b.swapSuccessors() 39218 return true 39219 } 39220 // match: (EQ (FlagLT_UGT) yes no) 39221 // cond: 39222 // result: (First nil no yes) 39223 for { 39224 v := b.Control 39225 if v.Op != OpARM64FlagLT_UGT { 39226 break 39227 } 39228 b.Kind = BlockFirst 39229 b.SetControl(nil) 39230 b.Aux = nil 39231 b.swapSuccessors() 39232 return true 39233 } 39234 // match: (EQ (FlagGT_ULT) yes no) 39235 // cond: 39236 // result: (First nil no yes) 39237 for { 39238 v := b.Control 39239 if v.Op != OpARM64FlagGT_ULT { 39240 break 39241 } 39242 b.Kind = BlockFirst 39243 b.SetControl(nil) 39244 b.Aux = nil 39245 b.swapSuccessors() 39246 return true 39247 } 39248 // match: (EQ (FlagGT_UGT) yes no) 39249 // cond: 39250 // result: (First nil no yes) 39251 for { 39252 v := b.Control 39253 if v.Op != OpARM64FlagGT_UGT { 39254 break 39255 } 39256 b.Kind = BlockFirst 39257 b.SetControl(nil) 39258 b.Aux = nil 39259 b.swapSuccessors() 39260 return true 39261 } 39262 // match: (EQ (InvertFlags cmp) yes no) 39263 // cond: 39264 // result: (EQ cmp yes no) 39265 for { 39266 v := b.Control 39267 if v.Op != OpARM64InvertFlags { 39268 break 39269 } 39270 cmp := v.Args[0] 39271 b.Kind = BlockARM64EQ 39272 b.SetControl(cmp) 39273 b.Aux = nil 39274 return true 39275 } 39276 case BlockARM64GE: 39277 // match: (GE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 39278 // cond: x.Uses == 1 39279 // result: (GE (TSTWconst [c] y) yes no) 39280 for { 39281 v := b.Control 39282 if v.Op != OpARM64CMPWconst { 39283 break 39284 } 39285 if v.AuxInt != 0 { 39286 break 39287 } 39288 x := v.Args[0] 39289 if x.Op != OpARM64ANDconst { 39290 break 39291 } 39292 c := x.AuxInt 39293 y := x.Args[0] 39294 if !(x.Uses == 1) { 39295 break 39296 } 39297 b.Kind = BlockARM64GE 39298 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 39299 v0.AuxInt = c 39300 v0.AddArg(y) 39301 b.SetControl(v0) 39302 b.Aux = nil 39303 return true 39304 } 39305 // match: (GE (CMPconst [0] z:(AND x y)) yes no) 39306 // cond: z.Uses == 1 39307 // result: (GE (TST x y) yes no) 39308 for { 39309 v := b.Control 39310 if v.Op != OpARM64CMPconst { 39311 break 39312 } 39313 if v.AuxInt != 0 { 39314 break 39315 } 39316 z := v.Args[0] 39317 if z.Op != OpARM64AND { 39318 break 39319 } 39320 _ = z.Args[1] 39321 x := z.Args[0] 39322 y := z.Args[1] 39323 if !(z.Uses == 1) { 39324 break 39325 } 39326 b.Kind = BlockARM64GE 39327 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 39328 v0.AddArg(x) 39329 v0.AddArg(y) 39330 b.SetControl(v0) 39331 b.Aux = nil 39332 return true 39333 } 39334 // match: (GE (CMPWconst [0] z:(AND x y)) yes no) 39335 // cond: z.Uses == 1 39336 // result: (GE (TSTW x y) yes no) 39337 for { 39338 v := b.Control 39339 if v.Op != OpARM64CMPWconst { 39340 break 39341 } 39342 if v.AuxInt != 0 { 39343 break 39344 } 39345 z := v.Args[0] 39346 if z.Op != OpARM64AND { 39347 break 39348 } 39349 _ = z.Args[1] 39350 x := z.Args[0] 39351 y := z.Args[1] 39352 if !(z.Uses == 1) { 39353 break 39354 } 39355 b.Kind = BlockARM64GE 39356 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 39357 v0.AddArg(x) 39358 v0.AddArg(y) 39359 b.SetControl(v0) 39360 b.Aux = nil 39361 return true 39362 } 39363 // match: (GE (CMPconst [0] x:(ANDconst [c] y)) yes no) 39364 // cond: x.Uses == 1 39365 // result: (GE (TSTconst [c] y) yes no) 39366 for { 39367 v := b.Control 39368 if v.Op != OpARM64CMPconst { 39369 break 39370 } 39371 if v.AuxInt != 0 { 39372 break 39373 } 39374 x := v.Args[0] 39375 if x.Op != OpARM64ANDconst { 39376 break 39377 } 39378 c := x.AuxInt 39379 y := x.Args[0] 39380 if !(x.Uses == 1) { 39381 break 39382 } 39383 b.Kind = BlockARM64GE 39384 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 39385 v0.AuxInt = c 39386 v0.AddArg(y) 39387 b.SetControl(v0) 39388 b.Aux = nil 39389 return true 39390 } 39391 // match: (GE (CMPconst [0] x:(ADDconst [c] y)) yes no) 39392 // cond: x.Uses == 1 39393 // result: (GE (CMNconst [c] y) yes no) 39394 for { 39395 v := b.Control 39396 if v.Op != OpARM64CMPconst { 39397 break 39398 } 39399 if v.AuxInt != 0 { 39400 break 39401 } 39402 x := v.Args[0] 39403 if x.Op != OpARM64ADDconst { 39404 break 39405 } 39406 c := x.AuxInt 39407 y := x.Args[0] 39408 if !(x.Uses == 1) { 39409 break 39410 } 39411 b.Kind = BlockARM64GE 39412 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 39413 v0.AuxInt = c 39414 v0.AddArg(y) 39415 b.SetControl(v0) 39416 b.Aux = nil 39417 return true 39418 } 39419 // match: (GE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 39420 // cond: x.Uses == 1 39421 // result: (GE (CMNWconst [c] y) yes no) 39422 for { 39423 v := b.Control 39424 if v.Op != OpARM64CMPWconst { 39425 break 39426 } 39427 if v.AuxInt != 0 { 39428 break 39429 } 39430 x := v.Args[0] 39431 if x.Op != OpARM64ADDconst { 39432 break 39433 } 39434 c := x.AuxInt 39435 y := x.Args[0] 39436 if !(x.Uses == 1) { 39437 break 39438 } 39439 b.Kind = BlockARM64GE 39440 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 39441 v0.AuxInt = c 39442 v0.AddArg(y) 39443 b.SetControl(v0) 39444 b.Aux = nil 39445 return true 39446 } 39447 // match: (GE (CMPconst [0] z:(ADD x y)) yes no) 39448 // cond: z.Uses == 1 39449 // result: (GE (CMN x y) yes no) 39450 for { 39451 v := b.Control 39452 if v.Op != OpARM64CMPconst { 39453 break 39454 } 39455 if v.AuxInt != 0 { 39456 break 39457 } 39458 z := v.Args[0] 39459 if z.Op != OpARM64ADD { 39460 break 39461 } 39462 _ = z.Args[1] 39463 x := z.Args[0] 39464 y := z.Args[1] 39465 if !(z.Uses == 1) { 39466 break 39467 } 39468 b.Kind = BlockARM64GE 39469 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39470 v0.AddArg(x) 39471 v0.AddArg(y) 39472 b.SetControl(v0) 39473 b.Aux = nil 39474 return true 39475 } 39476 // match: (GE (CMPWconst [0] z:(ADD x y)) yes no) 39477 // cond: z.Uses == 1 39478 // result: (GE (CMNW x y) yes no) 39479 for { 39480 v := b.Control 39481 if v.Op != OpARM64CMPWconst { 39482 break 39483 } 39484 if v.AuxInt != 0 { 39485 break 39486 } 39487 z := v.Args[0] 39488 if z.Op != OpARM64ADD { 39489 break 39490 } 39491 _ = z.Args[1] 39492 x := z.Args[0] 39493 y := z.Args[1] 39494 if !(z.Uses == 1) { 39495 break 39496 } 39497 b.Kind = BlockARM64GE 39498 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39499 v0.AddArg(x) 39500 v0.AddArg(y) 39501 b.SetControl(v0) 39502 b.Aux = nil 39503 return true 39504 } 39505 // match: (GE (CMP x z:(NEG y)) yes no) 39506 // cond: z.Uses == 1 39507 // result: (GE (CMN x y) yes no) 39508 for { 39509 v := b.Control 39510 if v.Op != OpARM64CMP { 39511 break 39512 } 39513 _ = v.Args[1] 39514 x := v.Args[0] 39515 z := v.Args[1] 39516 if z.Op != OpARM64NEG { 39517 break 39518 } 39519 y := z.Args[0] 39520 if !(z.Uses == 1) { 39521 break 39522 } 39523 b.Kind = BlockARM64GE 39524 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39525 v0.AddArg(x) 39526 v0.AddArg(y) 39527 b.SetControl(v0) 39528 b.Aux = nil 39529 return true 39530 } 39531 // match: (GE (CMPW x z:(NEG y)) yes no) 39532 // cond: z.Uses == 1 39533 // result: (GE (CMNW x y) yes no) 39534 for { 39535 v := b.Control 39536 if v.Op != OpARM64CMPW { 39537 break 39538 } 39539 _ = v.Args[1] 39540 x := v.Args[0] 39541 z := v.Args[1] 39542 if z.Op != OpARM64NEG { 39543 break 39544 } 39545 y := z.Args[0] 39546 if !(z.Uses == 1) { 39547 break 39548 } 39549 b.Kind = BlockARM64GE 39550 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39551 v0.AddArg(x) 39552 v0.AddArg(y) 39553 b.SetControl(v0) 39554 b.Aux = nil 39555 return true 39556 } 39557 // match: (GE (CMPconst [0] z:(MADD a x y)) yes no) 39558 // cond: z.Uses==1 39559 // result: (GE (CMN a (MUL <x.Type> x y)) yes no) 39560 for { 39561 v := b.Control 39562 if v.Op != OpARM64CMPconst { 39563 break 39564 } 39565 if v.AuxInt != 0 { 39566 break 39567 } 39568 z := v.Args[0] 39569 if z.Op != OpARM64MADD { 39570 break 39571 } 39572 _ = z.Args[2] 39573 a := z.Args[0] 39574 x := z.Args[1] 39575 y := z.Args[2] 39576 if !(z.Uses == 1) { 39577 break 39578 } 39579 b.Kind = BlockARM64GE 39580 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39581 v0.AddArg(a) 39582 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39583 v1.AddArg(x) 39584 v1.AddArg(y) 39585 v0.AddArg(v1) 39586 b.SetControl(v0) 39587 b.Aux = nil 39588 return true 39589 } 39590 // match: (GE (CMPconst [0] z:(MSUB a x y)) yes no) 39591 // cond: z.Uses==1 39592 // result: (GE (CMP a (MUL <x.Type> x y)) yes no) 39593 for { 39594 v := b.Control 39595 if v.Op != OpARM64CMPconst { 39596 break 39597 } 39598 if v.AuxInt != 0 { 39599 break 39600 } 39601 z := v.Args[0] 39602 if z.Op != OpARM64MSUB { 39603 break 39604 } 39605 _ = z.Args[2] 39606 a := z.Args[0] 39607 x := z.Args[1] 39608 y := z.Args[2] 39609 if !(z.Uses == 1) { 39610 break 39611 } 39612 b.Kind = BlockARM64GE 39613 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 39614 v0.AddArg(a) 39615 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 39616 v1.AddArg(x) 39617 v1.AddArg(y) 39618 v0.AddArg(v1) 39619 b.SetControl(v0) 39620 b.Aux = nil 39621 return true 39622 } 39623 // match: (GE (CMPWconst [0] z:(MADDW a x y)) yes no) 39624 // cond: z.Uses==1 39625 // result: (GE (CMNW a (MULW <x.Type> x y)) yes no) 39626 for { 39627 v := b.Control 39628 if v.Op != OpARM64CMPWconst { 39629 break 39630 } 39631 if v.AuxInt != 0 { 39632 break 39633 } 39634 z := v.Args[0] 39635 if z.Op != OpARM64MADDW { 39636 break 39637 } 39638 _ = z.Args[2] 39639 a := z.Args[0] 39640 x := z.Args[1] 39641 y := z.Args[2] 39642 if !(z.Uses == 1) { 39643 break 39644 } 39645 b.Kind = BlockARM64GE 39646 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 39647 v0.AddArg(a) 39648 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39649 v1.AddArg(x) 39650 v1.AddArg(y) 39651 v0.AddArg(v1) 39652 b.SetControl(v0) 39653 b.Aux = nil 39654 return true 39655 } 39656 // match: (GE (CMPWconst [0] z:(MSUBW a x y)) yes no) 39657 // cond: z.Uses==1 39658 // result: (GE (CMPW a (MULW <x.Type> x y)) yes no) 39659 for { 39660 v := b.Control 39661 if v.Op != OpARM64CMPWconst { 39662 break 39663 } 39664 if v.AuxInt != 0 { 39665 break 39666 } 39667 z := v.Args[0] 39668 if z.Op != OpARM64MSUBW { 39669 break 39670 } 39671 _ = z.Args[2] 39672 a := z.Args[0] 39673 x := z.Args[1] 39674 y := z.Args[2] 39675 if !(z.Uses == 1) { 39676 break 39677 } 39678 b.Kind = BlockARM64GE 39679 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 39680 v0.AddArg(a) 39681 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 39682 v1.AddArg(x) 39683 v1.AddArg(y) 39684 v0.AddArg(v1) 39685 b.SetControl(v0) 39686 b.Aux = nil 39687 return true 39688 } 39689 // match: (GE (CMPWconst [0] x) yes no) 39690 // cond: 39691 // result: (TBZ {int64(31)} x yes no) 39692 for { 39693 v := b.Control 39694 if v.Op != OpARM64CMPWconst { 39695 break 39696 } 39697 if v.AuxInt != 0 { 39698 break 39699 } 39700 x := v.Args[0] 39701 b.Kind = BlockARM64TBZ 39702 b.SetControl(x) 39703 b.Aux = int64(31) 39704 return true 39705 } 39706 // match: (GE (CMPconst [0] x) yes no) 39707 // cond: 39708 // result: (TBZ {int64(63)} x yes no) 39709 for { 39710 v := b.Control 39711 if v.Op != OpARM64CMPconst { 39712 break 39713 } 39714 if v.AuxInt != 0 { 39715 break 39716 } 39717 x := v.Args[0] 39718 b.Kind = BlockARM64TBZ 39719 b.SetControl(x) 39720 b.Aux = int64(63) 39721 return true 39722 } 39723 // match: (GE (FlagEQ) yes no) 39724 // cond: 39725 // result: (First nil yes no) 39726 for { 39727 v := b.Control 39728 if v.Op != OpARM64FlagEQ { 39729 break 39730 } 39731 b.Kind = BlockFirst 39732 b.SetControl(nil) 39733 b.Aux = nil 39734 return true 39735 } 39736 // match: (GE (FlagLT_ULT) yes no) 39737 // cond: 39738 // result: (First nil no yes) 39739 for { 39740 v := b.Control 39741 if v.Op != OpARM64FlagLT_ULT { 39742 break 39743 } 39744 b.Kind = BlockFirst 39745 b.SetControl(nil) 39746 b.Aux = nil 39747 b.swapSuccessors() 39748 return true 39749 } 39750 // match: (GE (FlagLT_UGT) yes no) 39751 // cond: 39752 // result: (First nil no yes) 39753 for { 39754 v := b.Control 39755 if v.Op != OpARM64FlagLT_UGT { 39756 break 39757 } 39758 b.Kind = BlockFirst 39759 b.SetControl(nil) 39760 b.Aux = nil 39761 b.swapSuccessors() 39762 return true 39763 } 39764 // match: (GE (FlagGT_ULT) yes no) 39765 // cond: 39766 // result: (First nil yes no) 39767 for { 39768 v := b.Control 39769 if v.Op != OpARM64FlagGT_ULT { 39770 break 39771 } 39772 b.Kind = BlockFirst 39773 b.SetControl(nil) 39774 b.Aux = nil 39775 return true 39776 } 39777 // match: (GE (FlagGT_UGT) yes no) 39778 // cond: 39779 // result: (First nil yes no) 39780 for { 39781 v := b.Control 39782 if v.Op != OpARM64FlagGT_UGT { 39783 break 39784 } 39785 b.Kind = BlockFirst 39786 b.SetControl(nil) 39787 b.Aux = nil 39788 return true 39789 } 39790 // match: (GE (InvertFlags cmp) yes no) 39791 // cond: 39792 // result: (LE cmp yes no) 39793 for { 39794 v := b.Control 39795 if v.Op != OpARM64InvertFlags { 39796 break 39797 } 39798 cmp := v.Args[0] 39799 b.Kind = BlockARM64LE 39800 b.SetControl(cmp) 39801 b.Aux = nil 39802 return true 39803 } 39804 case BlockARM64GT: 39805 // match: (GT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 39806 // cond: x.Uses == 1 39807 // result: (GT (TSTWconst [c] y) yes no) 39808 for { 39809 v := b.Control 39810 if v.Op != OpARM64CMPWconst { 39811 break 39812 } 39813 if v.AuxInt != 0 { 39814 break 39815 } 39816 x := v.Args[0] 39817 if x.Op != OpARM64ANDconst { 39818 break 39819 } 39820 c := x.AuxInt 39821 y := x.Args[0] 39822 if !(x.Uses == 1) { 39823 break 39824 } 39825 b.Kind = BlockARM64GT 39826 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 39827 v0.AuxInt = c 39828 v0.AddArg(y) 39829 b.SetControl(v0) 39830 b.Aux = nil 39831 return true 39832 } 39833 // match: (GT (CMPconst [0] z:(AND x y)) yes no) 39834 // cond: z.Uses == 1 39835 // result: (GT (TST x y) yes no) 39836 for { 39837 v := b.Control 39838 if v.Op != OpARM64CMPconst { 39839 break 39840 } 39841 if v.AuxInt != 0 { 39842 break 39843 } 39844 z := v.Args[0] 39845 if z.Op != OpARM64AND { 39846 break 39847 } 39848 _ = z.Args[1] 39849 x := z.Args[0] 39850 y := z.Args[1] 39851 if !(z.Uses == 1) { 39852 break 39853 } 39854 b.Kind = BlockARM64GT 39855 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 39856 v0.AddArg(x) 39857 v0.AddArg(y) 39858 b.SetControl(v0) 39859 b.Aux = nil 39860 return true 39861 } 39862 // match: (GT (CMPWconst [0] z:(AND x y)) yes no) 39863 // cond: z.Uses == 1 39864 // result: (GT (TSTW x y) yes no) 39865 for { 39866 v := b.Control 39867 if v.Op != OpARM64CMPWconst { 39868 break 39869 } 39870 if v.AuxInt != 0 { 39871 break 39872 } 39873 z := v.Args[0] 39874 if z.Op != OpARM64AND { 39875 break 39876 } 39877 _ = z.Args[1] 39878 x := z.Args[0] 39879 y := z.Args[1] 39880 if !(z.Uses == 1) { 39881 break 39882 } 39883 b.Kind = BlockARM64GT 39884 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 39885 v0.AddArg(x) 39886 v0.AddArg(y) 39887 b.SetControl(v0) 39888 b.Aux = nil 39889 return true 39890 } 39891 // match: (GT (CMPconst [0] x:(ANDconst [c] y)) yes no) 39892 // cond: x.Uses == 1 39893 // result: (GT (TSTconst [c] y) yes no) 39894 for { 39895 v := b.Control 39896 if v.Op != OpARM64CMPconst { 39897 break 39898 } 39899 if v.AuxInt != 0 { 39900 break 39901 } 39902 x := v.Args[0] 39903 if x.Op != OpARM64ANDconst { 39904 break 39905 } 39906 c := x.AuxInt 39907 y := x.Args[0] 39908 if !(x.Uses == 1) { 39909 break 39910 } 39911 b.Kind = BlockARM64GT 39912 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 39913 v0.AuxInt = c 39914 v0.AddArg(y) 39915 b.SetControl(v0) 39916 b.Aux = nil 39917 return true 39918 } 39919 // match: (GT (CMPconst [0] x:(ADDconst [c] y)) yes no) 39920 // cond: x.Uses == 1 39921 // result: (GT (CMNconst [c] y) yes no) 39922 for { 39923 v := b.Control 39924 if v.Op != OpARM64CMPconst { 39925 break 39926 } 39927 if v.AuxInt != 0 { 39928 break 39929 } 39930 x := v.Args[0] 39931 if x.Op != OpARM64ADDconst { 39932 break 39933 } 39934 c := x.AuxInt 39935 y := x.Args[0] 39936 if !(x.Uses == 1) { 39937 break 39938 } 39939 b.Kind = BlockARM64GT 39940 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 39941 v0.AuxInt = c 39942 v0.AddArg(y) 39943 b.SetControl(v0) 39944 b.Aux = nil 39945 return true 39946 } 39947 // match: (GT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 39948 // cond: x.Uses == 1 39949 // result: (GT (CMNWconst [c] y) yes no) 39950 for { 39951 v := b.Control 39952 if v.Op != OpARM64CMPWconst { 39953 break 39954 } 39955 if v.AuxInt != 0 { 39956 break 39957 } 39958 x := v.Args[0] 39959 if x.Op != OpARM64ADDconst { 39960 break 39961 } 39962 c := x.AuxInt 39963 y := x.Args[0] 39964 if !(x.Uses == 1) { 39965 break 39966 } 39967 b.Kind = BlockARM64GT 39968 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 39969 v0.AuxInt = c 39970 v0.AddArg(y) 39971 b.SetControl(v0) 39972 b.Aux = nil 39973 return true 39974 } 39975 // match: (GT (CMPconst [0] z:(ADD x y)) yes no) 39976 // cond: z.Uses == 1 39977 // result: (GT (CMN x y) yes no) 39978 for { 39979 v := b.Control 39980 if v.Op != OpARM64CMPconst { 39981 break 39982 } 39983 if v.AuxInt != 0 { 39984 break 39985 } 39986 z := v.Args[0] 39987 if z.Op != OpARM64ADD { 39988 break 39989 } 39990 _ = z.Args[1] 39991 x := z.Args[0] 39992 y := z.Args[1] 39993 if !(z.Uses == 1) { 39994 break 39995 } 39996 b.Kind = BlockARM64GT 39997 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 39998 v0.AddArg(x) 39999 v0.AddArg(y) 40000 b.SetControl(v0) 40001 b.Aux = nil 40002 return true 40003 } 40004 // match: (GT (CMPWconst [0] z:(ADD x y)) yes no) 40005 // cond: z.Uses == 1 40006 // result: (GT (CMNW x y) yes no) 40007 for { 40008 v := b.Control 40009 if v.Op != OpARM64CMPWconst { 40010 break 40011 } 40012 if v.AuxInt != 0 { 40013 break 40014 } 40015 z := v.Args[0] 40016 if z.Op != OpARM64ADD { 40017 break 40018 } 40019 _ = z.Args[1] 40020 x := z.Args[0] 40021 y := z.Args[1] 40022 if !(z.Uses == 1) { 40023 break 40024 } 40025 b.Kind = BlockARM64GT 40026 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40027 v0.AddArg(x) 40028 v0.AddArg(y) 40029 b.SetControl(v0) 40030 b.Aux = nil 40031 return true 40032 } 40033 // match: (GT (CMP x z:(NEG y)) yes no) 40034 // cond: z.Uses == 1 40035 // result: (GT (CMN x y) yes no) 40036 for { 40037 v := b.Control 40038 if v.Op != OpARM64CMP { 40039 break 40040 } 40041 _ = v.Args[1] 40042 x := v.Args[0] 40043 z := v.Args[1] 40044 if z.Op != OpARM64NEG { 40045 break 40046 } 40047 y := z.Args[0] 40048 if !(z.Uses == 1) { 40049 break 40050 } 40051 b.Kind = BlockARM64GT 40052 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40053 v0.AddArg(x) 40054 v0.AddArg(y) 40055 b.SetControl(v0) 40056 b.Aux = nil 40057 return true 40058 } 40059 // match: (GT (CMPW x z:(NEG y)) yes no) 40060 // cond: z.Uses == 1 40061 // result: (GT (CMNW x y) yes no) 40062 for { 40063 v := b.Control 40064 if v.Op != OpARM64CMPW { 40065 break 40066 } 40067 _ = v.Args[1] 40068 x := v.Args[0] 40069 z := v.Args[1] 40070 if z.Op != OpARM64NEG { 40071 break 40072 } 40073 y := z.Args[0] 40074 if !(z.Uses == 1) { 40075 break 40076 } 40077 b.Kind = BlockARM64GT 40078 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40079 v0.AddArg(x) 40080 v0.AddArg(y) 40081 b.SetControl(v0) 40082 b.Aux = nil 40083 return true 40084 } 40085 // match: (GT (CMPconst [0] z:(MADD a x y)) yes no) 40086 // cond: z.Uses==1 40087 // result: (GT (CMN a (MUL <x.Type> x y)) yes no) 40088 for { 40089 v := b.Control 40090 if v.Op != OpARM64CMPconst { 40091 break 40092 } 40093 if v.AuxInt != 0 { 40094 break 40095 } 40096 z := v.Args[0] 40097 if z.Op != OpARM64MADD { 40098 break 40099 } 40100 _ = z.Args[2] 40101 a := z.Args[0] 40102 x := z.Args[1] 40103 y := z.Args[2] 40104 if !(z.Uses == 1) { 40105 break 40106 } 40107 b.Kind = BlockARM64GT 40108 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40109 v0.AddArg(a) 40110 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40111 v1.AddArg(x) 40112 v1.AddArg(y) 40113 v0.AddArg(v1) 40114 b.SetControl(v0) 40115 b.Aux = nil 40116 return true 40117 } 40118 // match: (GT (CMPconst [0] z:(MSUB a x y)) yes no) 40119 // cond: z.Uses==1 40120 // result: (GT (CMP a (MUL <x.Type> x y)) yes no) 40121 for { 40122 v := b.Control 40123 if v.Op != OpARM64CMPconst { 40124 break 40125 } 40126 if v.AuxInt != 0 { 40127 break 40128 } 40129 z := v.Args[0] 40130 if z.Op != OpARM64MSUB { 40131 break 40132 } 40133 _ = z.Args[2] 40134 a := z.Args[0] 40135 x := z.Args[1] 40136 y := z.Args[2] 40137 if !(z.Uses == 1) { 40138 break 40139 } 40140 b.Kind = BlockARM64GT 40141 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 40142 v0.AddArg(a) 40143 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40144 v1.AddArg(x) 40145 v1.AddArg(y) 40146 v0.AddArg(v1) 40147 b.SetControl(v0) 40148 b.Aux = nil 40149 return true 40150 } 40151 // match: (GT (CMPWconst [0] z:(MADDW a x y)) yes no) 40152 // cond: z.Uses==1 40153 // result: (GT (CMNW a (MULW <x.Type> x y)) yes no) 40154 for { 40155 v := b.Control 40156 if v.Op != OpARM64CMPWconst { 40157 break 40158 } 40159 if v.AuxInt != 0 { 40160 break 40161 } 40162 z := v.Args[0] 40163 if z.Op != OpARM64MADDW { 40164 break 40165 } 40166 _ = z.Args[2] 40167 a := z.Args[0] 40168 x := z.Args[1] 40169 y := z.Args[2] 40170 if !(z.Uses == 1) { 40171 break 40172 } 40173 b.Kind = BlockARM64GT 40174 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40175 v0.AddArg(a) 40176 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40177 v1.AddArg(x) 40178 v1.AddArg(y) 40179 v0.AddArg(v1) 40180 b.SetControl(v0) 40181 b.Aux = nil 40182 return true 40183 } 40184 // match: (GT (CMPWconst [0] z:(MSUBW a x y)) yes no) 40185 // cond: z.Uses==1 40186 // result: (GT (CMPW a (MULW <x.Type> x y)) yes no) 40187 for { 40188 v := b.Control 40189 if v.Op != OpARM64CMPWconst { 40190 break 40191 } 40192 if v.AuxInt != 0 { 40193 break 40194 } 40195 z := v.Args[0] 40196 if z.Op != OpARM64MSUBW { 40197 break 40198 } 40199 _ = z.Args[2] 40200 a := z.Args[0] 40201 x := z.Args[1] 40202 y := z.Args[2] 40203 if !(z.Uses == 1) { 40204 break 40205 } 40206 b.Kind = BlockARM64GT 40207 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 40208 v0.AddArg(a) 40209 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40210 v1.AddArg(x) 40211 v1.AddArg(y) 40212 v0.AddArg(v1) 40213 b.SetControl(v0) 40214 b.Aux = nil 40215 return true 40216 } 40217 // match: (GT (FlagEQ) yes no) 40218 // cond: 40219 // result: (First nil no yes) 40220 for { 40221 v := b.Control 40222 if v.Op != OpARM64FlagEQ { 40223 break 40224 } 40225 b.Kind = BlockFirst 40226 b.SetControl(nil) 40227 b.Aux = nil 40228 b.swapSuccessors() 40229 return true 40230 } 40231 // match: (GT (FlagLT_ULT) yes no) 40232 // cond: 40233 // result: (First nil no yes) 40234 for { 40235 v := b.Control 40236 if v.Op != OpARM64FlagLT_ULT { 40237 break 40238 } 40239 b.Kind = BlockFirst 40240 b.SetControl(nil) 40241 b.Aux = nil 40242 b.swapSuccessors() 40243 return true 40244 } 40245 // match: (GT (FlagLT_UGT) yes no) 40246 // cond: 40247 // result: (First nil no yes) 40248 for { 40249 v := b.Control 40250 if v.Op != OpARM64FlagLT_UGT { 40251 break 40252 } 40253 b.Kind = BlockFirst 40254 b.SetControl(nil) 40255 b.Aux = nil 40256 b.swapSuccessors() 40257 return true 40258 } 40259 // match: (GT (FlagGT_ULT) yes no) 40260 // cond: 40261 // result: (First nil yes no) 40262 for { 40263 v := b.Control 40264 if v.Op != OpARM64FlagGT_ULT { 40265 break 40266 } 40267 b.Kind = BlockFirst 40268 b.SetControl(nil) 40269 b.Aux = nil 40270 return true 40271 } 40272 // match: (GT (FlagGT_UGT) yes no) 40273 // cond: 40274 // result: (First nil yes no) 40275 for { 40276 v := b.Control 40277 if v.Op != OpARM64FlagGT_UGT { 40278 break 40279 } 40280 b.Kind = BlockFirst 40281 b.SetControl(nil) 40282 b.Aux = nil 40283 return true 40284 } 40285 // match: (GT (InvertFlags cmp) yes no) 40286 // cond: 40287 // result: (LT cmp yes no) 40288 for { 40289 v := b.Control 40290 if v.Op != OpARM64InvertFlags { 40291 break 40292 } 40293 cmp := v.Args[0] 40294 b.Kind = BlockARM64LT 40295 b.SetControl(cmp) 40296 b.Aux = nil 40297 return true 40298 } 40299 case BlockIf: 40300 // match: (If (Equal cc) yes no) 40301 // cond: 40302 // result: (EQ cc yes no) 40303 for { 40304 v := b.Control 40305 if v.Op != OpARM64Equal { 40306 break 40307 } 40308 cc := v.Args[0] 40309 b.Kind = BlockARM64EQ 40310 b.SetControl(cc) 40311 b.Aux = nil 40312 return true 40313 } 40314 // match: (If (NotEqual cc) yes no) 40315 // cond: 40316 // result: (NE cc yes no) 40317 for { 40318 v := b.Control 40319 if v.Op != OpARM64NotEqual { 40320 break 40321 } 40322 cc := v.Args[0] 40323 b.Kind = BlockARM64NE 40324 b.SetControl(cc) 40325 b.Aux = nil 40326 return true 40327 } 40328 // match: (If (LessThan cc) yes no) 40329 // cond: 40330 // result: (LT cc yes no) 40331 for { 40332 v := b.Control 40333 if v.Op != OpARM64LessThan { 40334 break 40335 } 40336 cc := v.Args[0] 40337 b.Kind = BlockARM64LT 40338 b.SetControl(cc) 40339 b.Aux = nil 40340 return true 40341 } 40342 // match: (If (LessThanU cc) yes no) 40343 // cond: 40344 // result: (ULT cc yes no) 40345 for { 40346 v := b.Control 40347 if v.Op != OpARM64LessThanU { 40348 break 40349 } 40350 cc := v.Args[0] 40351 b.Kind = BlockARM64ULT 40352 b.SetControl(cc) 40353 b.Aux = nil 40354 return true 40355 } 40356 // match: (If (LessEqual cc) yes no) 40357 // cond: 40358 // result: (LE cc yes no) 40359 for { 40360 v := b.Control 40361 if v.Op != OpARM64LessEqual { 40362 break 40363 } 40364 cc := v.Args[0] 40365 b.Kind = BlockARM64LE 40366 b.SetControl(cc) 40367 b.Aux = nil 40368 return true 40369 } 40370 // match: (If (LessEqualU cc) yes no) 40371 // cond: 40372 // result: (ULE cc yes no) 40373 for { 40374 v := b.Control 40375 if v.Op != OpARM64LessEqualU { 40376 break 40377 } 40378 cc := v.Args[0] 40379 b.Kind = BlockARM64ULE 40380 b.SetControl(cc) 40381 b.Aux = nil 40382 return true 40383 } 40384 // match: (If (GreaterThan cc) yes no) 40385 // cond: 40386 // result: (GT cc yes no) 40387 for { 40388 v := b.Control 40389 if v.Op != OpARM64GreaterThan { 40390 break 40391 } 40392 cc := v.Args[0] 40393 b.Kind = BlockARM64GT 40394 b.SetControl(cc) 40395 b.Aux = nil 40396 return true 40397 } 40398 // match: (If (GreaterThanU cc) yes no) 40399 // cond: 40400 // result: (UGT cc yes no) 40401 for { 40402 v := b.Control 40403 if v.Op != OpARM64GreaterThanU { 40404 break 40405 } 40406 cc := v.Args[0] 40407 b.Kind = BlockARM64UGT 40408 b.SetControl(cc) 40409 b.Aux = nil 40410 return true 40411 } 40412 // match: (If (GreaterEqual cc) yes no) 40413 // cond: 40414 // result: (GE cc yes no) 40415 for { 40416 v := b.Control 40417 if v.Op != OpARM64GreaterEqual { 40418 break 40419 } 40420 cc := v.Args[0] 40421 b.Kind = BlockARM64GE 40422 b.SetControl(cc) 40423 b.Aux = nil 40424 return true 40425 } 40426 // match: (If (GreaterEqualU cc) yes no) 40427 // cond: 40428 // result: (UGE cc yes no) 40429 for { 40430 v := b.Control 40431 if v.Op != OpARM64GreaterEqualU { 40432 break 40433 } 40434 cc := v.Args[0] 40435 b.Kind = BlockARM64UGE 40436 b.SetControl(cc) 40437 b.Aux = nil 40438 return true 40439 } 40440 // match: (If cond yes no) 40441 // cond: 40442 // result: (NZ cond yes no) 40443 for { 40444 v := b.Control 40445 _ = v 40446 cond := b.Control 40447 b.Kind = BlockARM64NZ 40448 b.SetControl(cond) 40449 b.Aux = nil 40450 return true 40451 } 40452 case BlockARM64LE: 40453 // match: (LE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 40454 // cond: x.Uses == 1 40455 // result: (LE (TSTWconst [c] y) yes no) 40456 for { 40457 v := b.Control 40458 if v.Op != OpARM64CMPWconst { 40459 break 40460 } 40461 if v.AuxInt != 0 { 40462 break 40463 } 40464 x := v.Args[0] 40465 if x.Op != OpARM64ANDconst { 40466 break 40467 } 40468 c := x.AuxInt 40469 y := x.Args[0] 40470 if !(x.Uses == 1) { 40471 break 40472 } 40473 b.Kind = BlockARM64LE 40474 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 40475 v0.AuxInt = c 40476 v0.AddArg(y) 40477 b.SetControl(v0) 40478 b.Aux = nil 40479 return true 40480 } 40481 // match: (LE (CMPconst [0] z:(AND x y)) yes no) 40482 // cond: z.Uses == 1 40483 // result: (LE (TST x y) yes no) 40484 for { 40485 v := b.Control 40486 if v.Op != OpARM64CMPconst { 40487 break 40488 } 40489 if v.AuxInt != 0 { 40490 break 40491 } 40492 z := v.Args[0] 40493 if z.Op != OpARM64AND { 40494 break 40495 } 40496 _ = z.Args[1] 40497 x := z.Args[0] 40498 y := z.Args[1] 40499 if !(z.Uses == 1) { 40500 break 40501 } 40502 b.Kind = BlockARM64LE 40503 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 40504 v0.AddArg(x) 40505 v0.AddArg(y) 40506 b.SetControl(v0) 40507 b.Aux = nil 40508 return true 40509 } 40510 // match: (LE (CMPWconst [0] z:(AND x y)) yes no) 40511 // cond: z.Uses == 1 40512 // result: (LE (TSTW x y) yes no) 40513 for { 40514 v := b.Control 40515 if v.Op != OpARM64CMPWconst { 40516 break 40517 } 40518 if v.AuxInt != 0 { 40519 break 40520 } 40521 z := v.Args[0] 40522 if z.Op != OpARM64AND { 40523 break 40524 } 40525 _ = z.Args[1] 40526 x := z.Args[0] 40527 y := z.Args[1] 40528 if !(z.Uses == 1) { 40529 break 40530 } 40531 b.Kind = BlockARM64LE 40532 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 40533 v0.AddArg(x) 40534 v0.AddArg(y) 40535 b.SetControl(v0) 40536 b.Aux = nil 40537 return true 40538 } 40539 // match: (LE (CMPconst [0] x:(ANDconst [c] y)) yes no) 40540 // cond: x.Uses == 1 40541 // result: (LE (TSTconst [c] y) yes no) 40542 for { 40543 v := b.Control 40544 if v.Op != OpARM64CMPconst { 40545 break 40546 } 40547 if v.AuxInt != 0 { 40548 break 40549 } 40550 x := v.Args[0] 40551 if x.Op != OpARM64ANDconst { 40552 break 40553 } 40554 c := x.AuxInt 40555 y := x.Args[0] 40556 if !(x.Uses == 1) { 40557 break 40558 } 40559 b.Kind = BlockARM64LE 40560 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 40561 v0.AuxInt = c 40562 v0.AddArg(y) 40563 b.SetControl(v0) 40564 b.Aux = nil 40565 return true 40566 } 40567 // match: (LE (CMPconst [0] x:(ADDconst [c] y)) yes no) 40568 // cond: x.Uses == 1 40569 // result: (LE (CMNconst [c] y) yes no) 40570 for { 40571 v := b.Control 40572 if v.Op != OpARM64CMPconst { 40573 break 40574 } 40575 if v.AuxInt != 0 { 40576 break 40577 } 40578 x := v.Args[0] 40579 if x.Op != OpARM64ADDconst { 40580 break 40581 } 40582 c := x.AuxInt 40583 y := x.Args[0] 40584 if !(x.Uses == 1) { 40585 break 40586 } 40587 b.Kind = BlockARM64LE 40588 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 40589 v0.AuxInt = c 40590 v0.AddArg(y) 40591 b.SetControl(v0) 40592 b.Aux = nil 40593 return true 40594 } 40595 // match: (LE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 40596 // cond: x.Uses == 1 40597 // result: (LE (CMNWconst [c] y) yes no) 40598 for { 40599 v := b.Control 40600 if v.Op != OpARM64CMPWconst { 40601 break 40602 } 40603 if v.AuxInt != 0 { 40604 break 40605 } 40606 x := v.Args[0] 40607 if x.Op != OpARM64ADDconst { 40608 break 40609 } 40610 c := x.AuxInt 40611 y := x.Args[0] 40612 if !(x.Uses == 1) { 40613 break 40614 } 40615 b.Kind = BlockARM64LE 40616 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 40617 v0.AuxInt = c 40618 v0.AddArg(y) 40619 b.SetControl(v0) 40620 b.Aux = nil 40621 return true 40622 } 40623 // match: (LE (CMPconst [0] z:(ADD x y)) yes no) 40624 // cond: z.Uses == 1 40625 // result: (LE (CMN x y) yes no) 40626 for { 40627 v := b.Control 40628 if v.Op != OpARM64CMPconst { 40629 break 40630 } 40631 if v.AuxInt != 0 { 40632 break 40633 } 40634 z := v.Args[0] 40635 if z.Op != OpARM64ADD { 40636 break 40637 } 40638 _ = z.Args[1] 40639 x := z.Args[0] 40640 y := z.Args[1] 40641 if !(z.Uses == 1) { 40642 break 40643 } 40644 b.Kind = BlockARM64LE 40645 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40646 v0.AddArg(x) 40647 v0.AddArg(y) 40648 b.SetControl(v0) 40649 b.Aux = nil 40650 return true 40651 } 40652 // match: (LE (CMPWconst [0] z:(ADD x y)) yes no) 40653 // cond: z.Uses == 1 40654 // result: (LE (CMNW x y) yes no) 40655 for { 40656 v := b.Control 40657 if v.Op != OpARM64CMPWconst { 40658 break 40659 } 40660 if v.AuxInt != 0 { 40661 break 40662 } 40663 z := v.Args[0] 40664 if z.Op != OpARM64ADD { 40665 break 40666 } 40667 _ = z.Args[1] 40668 x := z.Args[0] 40669 y := z.Args[1] 40670 if !(z.Uses == 1) { 40671 break 40672 } 40673 b.Kind = BlockARM64LE 40674 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40675 v0.AddArg(x) 40676 v0.AddArg(y) 40677 b.SetControl(v0) 40678 b.Aux = nil 40679 return true 40680 } 40681 // match: (LE (CMP x z:(NEG y)) yes no) 40682 // cond: z.Uses == 1 40683 // result: (LE (CMN x y) yes no) 40684 for { 40685 v := b.Control 40686 if v.Op != OpARM64CMP { 40687 break 40688 } 40689 _ = v.Args[1] 40690 x := v.Args[0] 40691 z := v.Args[1] 40692 if z.Op != OpARM64NEG { 40693 break 40694 } 40695 y := z.Args[0] 40696 if !(z.Uses == 1) { 40697 break 40698 } 40699 b.Kind = BlockARM64LE 40700 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40701 v0.AddArg(x) 40702 v0.AddArg(y) 40703 b.SetControl(v0) 40704 b.Aux = nil 40705 return true 40706 } 40707 // match: (LE (CMPW x z:(NEG y)) yes no) 40708 // cond: z.Uses == 1 40709 // result: (LE (CMNW x y) yes no) 40710 for { 40711 v := b.Control 40712 if v.Op != OpARM64CMPW { 40713 break 40714 } 40715 _ = v.Args[1] 40716 x := v.Args[0] 40717 z := v.Args[1] 40718 if z.Op != OpARM64NEG { 40719 break 40720 } 40721 y := z.Args[0] 40722 if !(z.Uses == 1) { 40723 break 40724 } 40725 b.Kind = BlockARM64LE 40726 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40727 v0.AddArg(x) 40728 v0.AddArg(y) 40729 b.SetControl(v0) 40730 b.Aux = nil 40731 return true 40732 } 40733 // match: (LE (CMPconst [0] z:(MADD a x y)) yes no) 40734 // cond: z.Uses==1 40735 // result: (LE (CMN a (MUL <x.Type> x y)) yes no) 40736 for { 40737 v := b.Control 40738 if v.Op != OpARM64CMPconst { 40739 break 40740 } 40741 if v.AuxInt != 0 { 40742 break 40743 } 40744 z := v.Args[0] 40745 if z.Op != OpARM64MADD { 40746 break 40747 } 40748 _ = z.Args[2] 40749 a := z.Args[0] 40750 x := z.Args[1] 40751 y := z.Args[2] 40752 if !(z.Uses == 1) { 40753 break 40754 } 40755 b.Kind = BlockARM64LE 40756 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 40757 v0.AddArg(a) 40758 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40759 v1.AddArg(x) 40760 v1.AddArg(y) 40761 v0.AddArg(v1) 40762 b.SetControl(v0) 40763 b.Aux = nil 40764 return true 40765 } 40766 // match: (LE (CMPconst [0] z:(MSUB a x y)) yes no) 40767 // cond: z.Uses==1 40768 // result: (LE (CMP a (MUL <x.Type> x y)) yes no) 40769 for { 40770 v := b.Control 40771 if v.Op != OpARM64CMPconst { 40772 break 40773 } 40774 if v.AuxInt != 0 { 40775 break 40776 } 40777 z := v.Args[0] 40778 if z.Op != OpARM64MSUB { 40779 break 40780 } 40781 _ = z.Args[2] 40782 a := z.Args[0] 40783 x := z.Args[1] 40784 y := z.Args[2] 40785 if !(z.Uses == 1) { 40786 break 40787 } 40788 b.Kind = BlockARM64LE 40789 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 40790 v0.AddArg(a) 40791 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 40792 v1.AddArg(x) 40793 v1.AddArg(y) 40794 v0.AddArg(v1) 40795 b.SetControl(v0) 40796 b.Aux = nil 40797 return true 40798 } 40799 // match: (LE (CMPWconst [0] z:(MADDW a x y)) yes no) 40800 // cond: z.Uses==1 40801 // result: (LE (CMNW a (MULW <x.Type> x y)) yes no) 40802 for { 40803 v := b.Control 40804 if v.Op != OpARM64CMPWconst { 40805 break 40806 } 40807 if v.AuxInt != 0 { 40808 break 40809 } 40810 z := v.Args[0] 40811 if z.Op != OpARM64MADDW { 40812 break 40813 } 40814 _ = z.Args[2] 40815 a := z.Args[0] 40816 x := z.Args[1] 40817 y := z.Args[2] 40818 if !(z.Uses == 1) { 40819 break 40820 } 40821 b.Kind = BlockARM64LE 40822 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 40823 v0.AddArg(a) 40824 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40825 v1.AddArg(x) 40826 v1.AddArg(y) 40827 v0.AddArg(v1) 40828 b.SetControl(v0) 40829 b.Aux = nil 40830 return true 40831 } 40832 // match: (LE (CMPWconst [0] z:(MSUBW a x y)) yes no) 40833 // cond: z.Uses==1 40834 // result: (LE (CMPW a (MULW <x.Type> x y)) yes no) 40835 for { 40836 v := b.Control 40837 if v.Op != OpARM64CMPWconst { 40838 break 40839 } 40840 if v.AuxInt != 0 { 40841 break 40842 } 40843 z := v.Args[0] 40844 if z.Op != OpARM64MSUBW { 40845 break 40846 } 40847 _ = z.Args[2] 40848 a := z.Args[0] 40849 x := z.Args[1] 40850 y := z.Args[2] 40851 if !(z.Uses == 1) { 40852 break 40853 } 40854 b.Kind = BlockARM64LE 40855 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 40856 v0.AddArg(a) 40857 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 40858 v1.AddArg(x) 40859 v1.AddArg(y) 40860 v0.AddArg(v1) 40861 b.SetControl(v0) 40862 b.Aux = nil 40863 return true 40864 } 40865 // match: (LE (FlagEQ) yes no) 40866 // cond: 40867 // result: (First nil yes no) 40868 for { 40869 v := b.Control 40870 if v.Op != OpARM64FlagEQ { 40871 break 40872 } 40873 b.Kind = BlockFirst 40874 b.SetControl(nil) 40875 b.Aux = nil 40876 return true 40877 } 40878 // match: (LE (FlagLT_ULT) yes no) 40879 // cond: 40880 // result: (First nil yes no) 40881 for { 40882 v := b.Control 40883 if v.Op != OpARM64FlagLT_ULT { 40884 break 40885 } 40886 b.Kind = BlockFirst 40887 b.SetControl(nil) 40888 b.Aux = nil 40889 return true 40890 } 40891 // match: (LE (FlagLT_UGT) yes no) 40892 // cond: 40893 // result: (First nil yes no) 40894 for { 40895 v := b.Control 40896 if v.Op != OpARM64FlagLT_UGT { 40897 break 40898 } 40899 b.Kind = BlockFirst 40900 b.SetControl(nil) 40901 b.Aux = nil 40902 return true 40903 } 40904 // match: (LE (FlagGT_ULT) yes no) 40905 // cond: 40906 // result: (First nil no yes) 40907 for { 40908 v := b.Control 40909 if v.Op != OpARM64FlagGT_ULT { 40910 break 40911 } 40912 b.Kind = BlockFirst 40913 b.SetControl(nil) 40914 b.Aux = nil 40915 b.swapSuccessors() 40916 return true 40917 } 40918 // match: (LE (FlagGT_UGT) yes no) 40919 // cond: 40920 // result: (First nil no yes) 40921 for { 40922 v := b.Control 40923 if v.Op != OpARM64FlagGT_UGT { 40924 break 40925 } 40926 b.Kind = BlockFirst 40927 b.SetControl(nil) 40928 b.Aux = nil 40929 b.swapSuccessors() 40930 return true 40931 } 40932 // match: (LE (InvertFlags cmp) yes no) 40933 // cond: 40934 // result: (GE cmp yes no) 40935 for { 40936 v := b.Control 40937 if v.Op != OpARM64InvertFlags { 40938 break 40939 } 40940 cmp := v.Args[0] 40941 b.Kind = BlockARM64GE 40942 b.SetControl(cmp) 40943 b.Aux = nil 40944 return true 40945 } 40946 case BlockARM64LT: 40947 // match: (LT (CMPWconst [0] x:(ANDconst [c] y)) yes no) 40948 // cond: x.Uses == 1 40949 // result: (LT (TSTWconst [c] y) yes no) 40950 for { 40951 v := b.Control 40952 if v.Op != OpARM64CMPWconst { 40953 break 40954 } 40955 if v.AuxInt != 0 { 40956 break 40957 } 40958 x := v.Args[0] 40959 if x.Op != OpARM64ANDconst { 40960 break 40961 } 40962 c := x.AuxInt 40963 y := x.Args[0] 40964 if !(x.Uses == 1) { 40965 break 40966 } 40967 b.Kind = BlockARM64LT 40968 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 40969 v0.AuxInt = c 40970 v0.AddArg(y) 40971 b.SetControl(v0) 40972 b.Aux = nil 40973 return true 40974 } 40975 // match: (LT (CMPconst [0] z:(AND x y)) yes no) 40976 // cond: z.Uses == 1 40977 // result: (LT (TST x y) yes no) 40978 for { 40979 v := b.Control 40980 if v.Op != OpARM64CMPconst { 40981 break 40982 } 40983 if v.AuxInt != 0 { 40984 break 40985 } 40986 z := v.Args[0] 40987 if z.Op != OpARM64AND { 40988 break 40989 } 40990 _ = z.Args[1] 40991 x := z.Args[0] 40992 y := z.Args[1] 40993 if !(z.Uses == 1) { 40994 break 40995 } 40996 b.Kind = BlockARM64LT 40997 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 40998 v0.AddArg(x) 40999 v0.AddArg(y) 41000 b.SetControl(v0) 41001 b.Aux = nil 41002 return true 41003 } 41004 // match: (LT (CMPWconst [0] z:(AND x y)) yes no) 41005 // cond: z.Uses == 1 41006 // result: (LT (TSTW x y) yes no) 41007 for { 41008 v := b.Control 41009 if v.Op != OpARM64CMPWconst { 41010 break 41011 } 41012 if v.AuxInt != 0 { 41013 break 41014 } 41015 z := v.Args[0] 41016 if z.Op != OpARM64AND { 41017 break 41018 } 41019 _ = z.Args[1] 41020 x := z.Args[0] 41021 y := z.Args[1] 41022 if !(z.Uses == 1) { 41023 break 41024 } 41025 b.Kind = BlockARM64LT 41026 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 41027 v0.AddArg(x) 41028 v0.AddArg(y) 41029 b.SetControl(v0) 41030 b.Aux = nil 41031 return true 41032 } 41033 // match: (LT (CMPconst [0] x:(ANDconst [c] y)) yes no) 41034 // cond: x.Uses == 1 41035 // result: (LT (TSTconst [c] y) yes no) 41036 for { 41037 v := b.Control 41038 if v.Op != OpARM64CMPconst { 41039 break 41040 } 41041 if v.AuxInt != 0 { 41042 break 41043 } 41044 x := v.Args[0] 41045 if x.Op != OpARM64ANDconst { 41046 break 41047 } 41048 c := x.AuxInt 41049 y := x.Args[0] 41050 if !(x.Uses == 1) { 41051 break 41052 } 41053 b.Kind = BlockARM64LT 41054 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 41055 v0.AuxInt = c 41056 v0.AddArg(y) 41057 b.SetControl(v0) 41058 b.Aux = nil 41059 return true 41060 } 41061 // match: (LT (CMPconst [0] x:(ADDconst [c] y)) yes no) 41062 // cond: x.Uses == 1 41063 // result: (LT (CMNconst [c] y) yes no) 41064 for { 41065 v := b.Control 41066 if v.Op != OpARM64CMPconst { 41067 break 41068 } 41069 if v.AuxInt != 0 { 41070 break 41071 } 41072 x := v.Args[0] 41073 if x.Op != OpARM64ADDconst { 41074 break 41075 } 41076 c := x.AuxInt 41077 y := x.Args[0] 41078 if !(x.Uses == 1) { 41079 break 41080 } 41081 b.Kind = BlockARM64LT 41082 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 41083 v0.AuxInt = c 41084 v0.AddArg(y) 41085 b.SetControl(v0) 41086 b.Aux = nil 41087 return true 41088 } 41089 // match: (LT (CMPWconst [0] x:(ADDconst [c] y)) yes no) 41090 // cond: x.Uses == 1 41091 // result: (LT (CMNWconst [c] y) yes no) 41092 for { 41093 v := b.Control 41094 if v.Op != OpARM64CMPWconst { 41095 break 41096 } 41097 if v.AuxInt != 0 { 41098 break 41099 } 41100 x := v.Args[0] 41101 if x.Op != OpARM64ADDconst { 41102 break 41103 } 41104 c := x.AuxInt 41105 y := x.Args[0] 41106 if !(x.Uses == 1) { 41107 break 41108 } 41109 b.Kind = BlockARM64LT 41110 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 41111 v0.AuxInt = c 41112 v0.AddArg(y) 41113 b.SetControl(v0) 41114 b.Aux = nil 41115 return true 41116 } 41117 // match: (LT (CMPconst [0] z:(ADD x y)) yes no) 41118 // cond: z.Uses == 1 41119 // result: (LT (CMN x y) yes no) 41120 for { 41121 v := b.Control 41122 if v.Op != OpARM64CMPconst { 41123 break 41124 } 41125 if v.AuxInt != 0 { 41126 break 41127 } 41128 z := v.Args[0] 41129 if z.Op != OpARM64ADD { 41130 break 41131 } 41132 _ = z.Args[1] 41133 x := z.Args[0] 41134 y := z.Args[1] 41135 if !(z.Uses == 1) { 41136 break 41137 } 41138 b.Kind = BlockARM64LT 41139 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41140 v0.AddArg(x) 41141 v0.AddArg(y) 41142 b.SetControl(v0) 41143 b.Aux = nil 41144 return true 41145 } 41146 // match: (LT (CMPWconst [0] z:(ADD x y)) yes no) 41147 // cond: z.Uses == 1 41148 // result: (LT (CMNW x y) yes no) 41149 for { 41150 v := b.Control 41151 if v.Op != OpARM64CMPWconst { 41152 break 41153 } 41154 if v.AuxInt != 0 { 41155 break 41156 } 41157 z := v.Args[0] 41158 if z.Op != OpARM64ADD { 41159 break 41160 } 41161 _ = z.Args[1] 41162 x := z.Args[0] 41163 y := z.Args[1] 41164 if !(z.Uses == 1) { 41165 break 41166 } 41167 b.Kind = BlockARM64LT 41168 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41169 v0.AddArg(x) 41170 v0.AddArg(y) 41171 b.SetControl(v0) 41172 b.Aux = nil 41173 return true 41174 } 41175 // match: (LT (CMP x z:(NEG y)) yes no) 41176 // cond: z.Uses == 1 41177 // result: (LT (CMN x y) yes no) 41178 for { 41179 v := b.Control 41180 if v.Op != OpARM64CMP { 41181 break 41182 } 41183 _ = v.Args[1] 41184 x := v.Args[0] 41185 z := v.Args[1] 41186 if z.Op != OpARM64NEG { 41187 break 41188 } 41189 y := z.Args[0] 41190 if !(z.Uses == 1) { 41191 break 41192 } 41193 b.Kind = BlockARM64LT 41194 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41195 v0.AddArg(x) 41196 v0.AddArg(y) 41197 b.SetControl(v0) 41198 b.Aux = nil 41199 return true 41200 } 41201 // match: (LT (CMPW x z:(NEG y)) yes no) 41202 // cond: z.Uses == 1 41203 // result: (LT (CMNW x y) yes no) 41204 for { 41205 v := b.Control 41206 if v.Op != OpARM64CMPW { 41207 break 41208 } 41209 _ = v.Args[1] 41210 x := v.Args[0] 41211 z := v.Args[1] 41212 if z.Op != OpARM64NEG { 41213 break 41214 } 41215 y := z.Args[0] 41216 if !(z.Uses == 1) { 41217 break 41218 } 41219 b.Kind = BlockARM64LT 41220 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41221 v0.AddArg(x) 41222 v0.AddArg(y) 41223 b.SetControl(v0) 41224 b.Aux = nil 41225 return true 41226 } 41227 // match: (LT (CMPconst [0] z:(MADD a x y)) yes no) 41228 // cond: z.Uses==1 41229 // result: (LT (CMN a (MUL <x.Type> x y)) yes no) 41230 for { 41231 v := b.Control 41232 if v.Op != OpARM64CMPconst { 41233 break 41234 } 41235 if v.AuxInt != 0 { 41236 break 41237 } 41238 z := v.Args[0] 41239 if z.Op != OpARM64MADD { 41240 break 41241 } 41242 _ = z.Args[2] 41243 a := z.Args[0] 41244 x := z.Args[1] 41245 y := z.Args[2] 41246 if !(z.Uses == 1) { 41247 break 41248 } 41249 b.Kind = BlockARM64LT 41250 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41251 v0.AddArg(a) 41252 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41253 v1.AddArg(x) 41254 v1.AddArg(y) 41255 v0.AddArg(v1) 41256 b.SetControl(v0) 41257 b.Aux = nil 41258 return true 41259 } 41260 // match: (LT (CMPconst [0] z:(MSUB a x y)) yes no) 41261 // cond: z.Uses==1 41262 // result: (LT (CMP a (MUL <x.Type> x y)) yes no) 41263 for { 41264 v := b.Control 41265 if v.Op != OpARM64CMPconst { 41266 break 41267 } 41268 if v.AuxInt != 0 { 41269 break 41270 } 41271 z := v.Args[0] 41272 if z.Op != OpARM64MSUB { 41273 break 41274 } 41275 _ = z.Args[2] 41276 a := z.Args[0] 41277 x := z.Args[1] 41278 y := z.Args[2] 41279 if !(z.Uses == 1) { 41280 break 41281 } 41282 b.Kind = BlockARM64LT 41283 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 41284 v0.AddArg(a) 41285 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41286 v1.AddArg(x) 41287 v1.AddArg(y) 41288 v0.AddArg(v1) 41289 b.SetControl(v0) 41290 b.Aux = nil 41291 return true 41292 } 41293 // match: (LT (CMPWconst [0] z:(MADDW a x y)) yes no) 41294 // cond: z.Uses==1 41295 // result: (LT (CMNW a (MULW <x.Type> x y)) yes no) 41296 for { 41297 v := b.Control 41298 if v.Op != OpARM64CMPWconst { 41299 break 41300 } 41301 if v.AuxInt != 0 { 41302 break 41303 } 41304 z := v.Args[0] 41305 if z.Op != OpARM64MADDW { 41306 break 41307 } 41308 _ = z.Args[2] 41309 a := z.Args[0] 41310 x := z.Args[1] 41311 y := z.Args[2] 41312 if !(z.Uses == 1) { 41313 break 41314 } 41315 b.Kind = BlockARM64LT 41316 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41317 v0.AddArg(a) 41318 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41319 v1.AddArg(x) 41320 v1.AddArg(y) 41321 v0.AddArg(v1) 41322 b.SetControl(v0) 41323 b.Aux = nil 41324 return true 41325 } 41326 // match: (LT (CMPWconst [0] z:(MSUBW a x y)) yes no) 41327 // cond: z.Uses==1 41328 // result: (LT (CMPW a (MULW <x.Type> x y)) yes no) 41329 for { 41330 v := b.Control 41331 if v.Op != OpARM64CMPWconst { 41332 break 41333 } 41334 if v.AuxInt != 0 { 41335 break 41336 } 41337 z := v.Args[0] 41338 if z.Op != OpARM64MSUBW { 41339 break 41340 } 41341 _ = z.Args[2] 41342 a := z.Args[0] 41343 x := z.Args[1] 41344 y := z.Args[2] 41345 if !(z.Uses == 1) { 41346 break 41347 } 41348 b.Kind = BlockARM64LT 41349 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 41350 v0.AddArg(a) 41351 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41352 v1.AddArg(x) 41353 v1.AddArg(y) 41354 v0.AddArg(v1) 41355 b.SetControl(v0) 41356 b.Aux = nil 41357 return true 41358 } 41359 // match: (LT (CMPWconst [0] x) yes no) 41360 // cond: 41361 // result: (TBNZ {int64(31)} x yes no) 41362 for { 41363 v := b.Control 41364 if v.Op != OpARM64CMPWconst { 41365 break 41366 } 41367 if v.AuxInt != 0 { 41368 break 41369 } 41370 x := v.Args[0] 41371 b.Kind = BlockARM64TBNZ 41372 b.SetControl(x) 41373 b.Aux = int64(31) 41374 return true 41375 } 41376 // match: (LT (CMPconst [0] x) yes no) 41377 // cond: 41378 // result: (TBNZ {int64(63)} x yes no) 41379 for { 41380 v := b.Control 41381 if v.Op != OpARM64CMPconst { 41382 break 41383 } 41384 if v.AuxInt != 0 { 41385 break 41386 } 41387 x := v.Args[0] 41388 b.Kind = BlockARM64TBNZ 41389 b.SetControl(x) 41390 b.Aux = int64(63) 41391 return true 41392 } 41393 // match: (LT (FlagEQ) yes no) 41394 // cond: 41395 // result: (First nil no yes) 41396 for { 41397 v := b.Control 41398 if v.Op != OpARM64FlagEQ { 41399 break 41400 } 41401 b.Kind = BlockFirst 41402 b.SetControl(nil) 41403 b.Aux = nil 41404 b.swapSuccessors() 41405 return true 41406 } 41407 // match: (LT (FlagLT_ULT) yes no) 41408 // cond: 41409 // result: (First nil yes no) 41410 for { 41411 v := b.Control 41412 if v.Op != OpARM64FlagLT_ULT { 41413 break 41414 } 41415 b.Kind = BlockFirst 41416 b.SetControl(nil) 41417 b.Aux = nil 41418 return true 41419 } 41420 // match: (LT (FlagLT_UGT) yes no) 41421 // cond: 41422 // result: (First nil yes no) 41423 for { 41424 v := b.Control 41425 if v.Op != OpARM64FlagLT_UGT { 41426 break 41427 } 41428 b.Kind = BlockFirst 41429 b.SetControl(nil) 41430 b.Aux = nil 41431 return true 41432 } 41433 // match: (LT (FlagGT_ULT) yes no) 41434 // cond: 41435 // result: (First nil no yes) 41436 for { 41437 v := b.Control 41438 if v.Op != OpARM64FlagGT_ULT { 41439 break 41440 } 41441 b.Kind = BlockFirst 41442 b.SetControl(nil) 41443 b.Aux = nil 41444 b.swapSuccessors() 41445 return true 41446 } 41447 // match: (LT (FlagGT_UGT) yes no) 41448 // cond: 41449 // result: (First nil no yes) 41450 for { 41451 v := b.Control 41452 if v.Op != OpARM64FlagGT_UGT { 41453 break 41454 } 41455 b.Kind = BlockFirst 41456 b.SetControl(nil) 41457 b.Aux = nil 41458 b.swapSuccessors() 41459 return true 41460 } 41461 // match: (LT (InvertFlags cmp) yes no) 41462 // cond: 41463 // result: (GT cmp yes no) 41464 for { 41465 v := b.Control 41466 if v.Op != OpARM64InvertFlags { 41467 break 41468 } 41469 cmp := v.Args[0] 41470 b.Kind = BlockARM64GT 41471 b.SetControl(cmp) 41472 b.Aux = nil 41473 return true 41474 } 41475 case BlockARM64NE: 41476 // match: (NE (CMPWconst [0] x:(ANDconst [c] y)) yes no) 41477 // cond: x.Uses == 1 41478 // result: (NE (TSTWconst [c] y) yes no) 41479 for { 41480 v := b.Control 41481 if v.Op != OpARM64CMPWconst { 41482 break 41483 } 41484 if v.AuxInt != 0 { 41485 break 41486 } 41487 x := v.Args[0] 41488 if x.Op != OpARM64ANDconst { 41489 break 41490 } 41491 c := x.AuxInt 41492 y := x.Args[0] 41493 if !(x.Uses == 1) { 41494 break 41495 } 41496 b.Kind = BlockARM64NE 41497 v0 := b.NewValue0(v.Pos, OpARM64TSTWconst, types.TypeFlags) 41498 v0.AuxInt = c 41499 v0.AddArg(y) 41500 b.SetControl(v0) 41501 b.Aux = nil 41502 return true 41503 } 41504 // match: (NE (CMPconst [0] z:(AND x y)) yes no) 41505 // cond: z.Uses == 1 41506 // result: (NE (TST x y) yes no) 41507 for { 41508 v := b.Control 41509 if v.Op != OpARM64CMPconst { 41510 break 41511 } 41512 if v.AuxInt != 0 { 41513 break 41514 } 41515 z := v.Args[0] 41516 if z.Op != OpARM64AND { 41517 break 41518 } 41519 _ = z.Args[1] 41520 x := z.Args[0] 41521 y := z.Args[1] 41522 if !(z.Uses == 1) { 41523 break 41524 } 41525 b.Kind = BlockARM64NE 41526 v0 := b.NewValue0(v.Pos, OpARM64TST, types.TypeFlags) 41527 v0.AddArg(x) 41528 v0.AddArg(y) 41529 b.SetControl(v0) 41530 b.Aux = nil 41531 return true 41532 } 41533 // match: (NE (CMPWconst [0] z:(AND x y)) yes no) 41534 // cond: z.Uses == 1 41535 // result: (NE (TSTW x y) yes no) 41536 for { 41537 v := b.Control 41538 if v.Op != OpARM64CMPWconst { 41539 break 41540 } 41541 if v.AuxInt != 0 { 41542 break 41543 } 41544 z := v.Args[0] 41545 if z.Op != OpARM64AND { 41546 break 41547 } 41548 _ = z.Args[1] 41549 x := z.Args[0] 41550 y := z.Args[1] 41551 if !(z.Uses == 1) { 41552 break 41553 } 41554 b.Kind = BlockARM64NE 41555 v0 := b.NewValue0(v.Pos, OpARM64TSTW, types.TypeFlags) 41556 v0.AddArg(x) 41557 v0.AddArg(y) 41558 b.SetControl(v0) 41559 b.Aux = nil 41560 return true 41561 } 41562 // match: (NE (CMPconst [0] x:(ANDconst [c] y)) yes no) 41563 // cond: x.Uses == 1 41564 // result: (NE (TSTconst [c] y) yes no) 41565 for { 41566 v := b.Control 41567 if v.Op != OpARM64CMPconst { 41568 break 41569 } 41570 if v.AuxInt != 0 { 41571 break 41572 } 41573 x := v.Args[0] 41574 if x.Op != OpARM64ANDconst { 41575 break 41576 } 41577 c := x.AuxInt 41578 y := x.Args[0] 41579 if !(x.Uses == 1) { 41580 break 41581 } 41582 b.Kind = BlockARM64NE 41583 v0 := b.NewValue0(v.Pos, OpARM64TSTconst, types.TypeFlags) 41584 v0.AuxInt = c 41585 v0.AddArg(y) 41586 b.SetControl(v0) 41587 b.Aux = nil 41588 return true 41589 } 41590 // match: (NE (CMPconst [0] x:(ADDconst [c] y)) yes no) 41591 // cond: x.Uses == 1 41592 // result: (NE (CMNconst [c] y) yes no) 41593 for { 41594 v := b.Control 41595 if v.Op != OpARM64CMPconst { 41596 break 41597 } 41598 if v.AuxInt != 0 { 41599 break 41600 } 41601 x := v.Args[0] 41602 if x.Op != OpARM64ADDconst { 41603 break 41604 } 41605 c := x.AuxInt 41606 y := x.Args[0] 41607 if !(x.Uses == 1) { 41608 break 41609 } 41610 b.Kind = BlockARM64NE 41611 v0 := b.NewValue0(v.Pos, OpARM64CMNconst, types.TypeFlags) 41612 v0.AuxInt = c 41613 v0.AddArg(y) 41614 b.SetControl(v0) 41615 b.Aux = nil 41616 return true 41617 } 41618 // match: (NE (CMPWconst [0] x:(ADDconst [c] y)) yes no) 41619 // cond: x.Uses == 1 41620 // result: (NE (CMNWconst [c] y) yes no) 41621 for { 41622 v := b.Control 41623 if v.Op != OpARM64CMPWconst { 41624 break 41625 } 41626 if v.AuxInt != 0 { 41627 break 41628 } 41629 x := v.Args[0] 41630 if x.Op != OpARM64ADDconst { 41631 break 41632 } 41633 c := x.AuxInt 41634 y := x.Args[0] 41635 if !(x.Uses == 1) { 41636 break 41637 } 41638 b.Kind = BlockARM64NE 41639 v0 := b.NewValue0(v.Pos, OpARM64CMNWconst, types.TypeFlags) 41640 v0.AuxInt = c 41641 v0.AddArg(y) 41642 b.SetControl(v0) 41643 b.Aux = nil 41644 return true 41645 } 41646 // match: (NE (CMPconst [0] z:(ADD x y)) yes no) 41647 // cond: z.Uses == 1 41648 // result: (NE (CMN x y) yes no) 41649 for { 41650 v := b.Control 41651 if v.Op != OpARM64CMPconst { 41652 break 41653 } 41654 if v.AuxInt != 0 { 41655 break 41656 } 41657 z := v.Args[0] 41658 if z.Op != OpARM64ADD { 41659 break 41660 } 41661 _ = z.Args[1] 41662 x := z.Args[0] 41663 y := z.Args[1] 41664 if !(z.Uses == 1) { 41665 break 41666 } 41667 b.Kind = BlockARM64NE 41668 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41669 v0.AddArg(x) 41670 v0.AddArg(y) 41671 b.SetControl(v0) 41672 b.Aux = nil 41673 return true 41674 } 41675 // match: (NE (CMPWconst [0] z:(ADD x y)) yes no) 41676 // cond: z.Uses == 1 41677 // result: (NE (CMNW x y) yes no) 41678 for { 41679 v := b.Control 41680 if v.Op != OpARM64CMPWconst { 41681 break 41682 } 41683 if v.AuxInt != 0 { 41684 break 41685 } 41686 z := v.Args[0] 41687 if z.Op != OpARM64ADD { 41688 break 41689 } 41690 _ = z.Args[1] 41691 x := z.Args[0] 41692 y := z.Args[1] 41693 if !(z.Uses == 1) { 41694 break 41695 } 41696 b.Kind = BlockARM64NE 41697 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41698 v0.AddArg(x) 41699 v0.AddArg(y) 41700 b.SetControl(v0) 41701 b.Aux = nil 41702 return true 41703 } 41704 // match: (NE (CMP x z:(NEG y)) yes no) 41705 // cond: z.Uses == 1 41706 // result: (NE (CMN x y) yes no) 41707 for { 41708 v := b.Control 41709 if v.Op != OpARM64CMP { 41710 break 41711 } 41712 _ = v.Args[1] 41713 x := v.Args[0] 41714 z := v.Args[1] 41715 if z.Op != OpARM64NEG { 41716 break 41717 } 41718 y := z.Args[0] 41719 if !(z.Uses == 1) { 41720 break 41721 } 41722 b.Kind = BlockARM64NE 41723 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41724 v0.AddArg(x) 41725 v0.AddArg(y) 41726 b.SetControl(v0) 41727 b.Aux = nil 41728 return true 41729 } 41730 // match: (NE (CMPW x z:(NEG y)) yes no) 41731 // cond: z.Uses == 1 41732 // result: (NE (CMNW x y) yes no) 41733 for { 41734 v := b.Control 41735 if v.Op != OpARM64CMPW { 41736 break 41737 } 41738 _ = v.Args[1] 41739 x := v.Args[0] 41740 z := v.Args[1] 41741 if z.Op != OpARM64NEG { 41742 break 41743 } 41744 y := z.Args[0] 41745 if !(z.Uses == 1) { 41746 break 41747 } 41748 b.Kind = BlockARM64NE 41749 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41750 v0.AddArg(x) 41751 v0.AddArg(y) 41752 b.SetControl(v0) 41753 b.Aux = nil 41754 return true 41755 } 41756 // match: (NE (CMPconst [0] x) yes no) 41757 // cond: 41758 // result: (NZ x yes no) 41759 for { 41760 v := b.Control 41761 if v.Op != OpARM64CMPconst { 41762 break 41763 } 41764 if v.AuxInt != 0 { 41765 break 41766 } 41767 x := v.Args[0] 41768 b.Kind = BlockARM64NZ 41769 b.SetControl(x) 41770 b.Aux = nil 41771 return true 41772 } 41773 // match: (NE (CMPWconst [0] x) yes no) 41774 // cond: 41775 // result: (NZW x yes no) 41776 for { 41777 v := b.Control 41778 if v.Op != OpARM64CMPWconst { 41779 break 41780 } 41781 if v.AuxInt != 0 { 41782 break 41783 } 41784 x := v.Args[0] 41785 b.Kind = BlockARM64NZW 41786 b.SetControl(x) 41787 b.Aux = nil 41788 return true 41789 } 41790 // match: (NE (CMPconst [0] z:(MADD a x y)) yes no) 41791 // cond: z.Uses==1 41792 // result: (NE (CMN a (MUL <x.Type> x y)) yes no) 41793 for { 41794 v := b.Control 41795 if v.Op != OpARM64CMPconst { 41796 break 41797 } 41798 if v.AuxInt != 0 { 41799 break 41800 } 41801 z := v.Args[0] 41802 if z.Op != OpARM64MADD { 41803 break 41804 } 41805 _ = z.Args[2] 41806 a := z.Args[0] 41807 x := z.Args[1] 41808 y := z.Args[2] 41809 if !(z.Uses == 1) { 41810 break 41811 } 41812 b.Kind = BlockARM64NE 41813 v0 := b.NewValue0(v.Pos, OpARM64CMN, types.TypeFlags) 41814 v0.AddArg(a) 41815 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41816 v1.AddArg(x) 41817 v1.AddArg(y) 41818 v0.AddArg(v1) 41819 b.SetControl(v0) 41820 b.Aux = nil 41821 return true 41822 } 41823 // match: (NE (CMPconst [0] z:(MSUB a x y)) yes no) 41824 // cond: z.Uses==1 41825 // result: (NE (CMP a (MUL <x.Type> x y)) yes no) 41826 for { 41827 v := b.Control 41828 if v.Op != OpARM64CMPconst { 41829 break 41830 } 41831 if v.AuxInt != 0 { 41832 break 41833 } 41834 z := v.Args[0] 41835 if z.Op != OpARM64MSUB { 41836 break 41837 } 41838 _ = z.Args[2] 41839 a := z.Args[0] 41840 x := z.Args[1] 41841 y := z.Args[2] 41842 if !(z.Uses == 1) { 41843 break 41844 } 41845 b.Kind = BlockARM64NE 41846 v0 := b.NewValue0(v.Pos, OpARM64CMP, types.TypeFlags) 41847 v0.AddArg(a) 41848 v1 := b.NewValue0(v.Pos, OpARM64MUL, x.Type) 41849 v1.AddArg(x) 41850 v1.AddArg(y) 41851 v0.AddArg(v1) 41852 b.SetControl(v0) 41853 b.Aux = nil 41854 return true 41855 } 41856 // match: (NE (CMPWconst [0] z:(MADDW a x y)) yes no) 41857 // cond: z.Uses==1 41858 // result: (NE (CMNW a (MULW <x.Type> x y)) yes no) 41859 for { 41860 v := b.Control 41861 if v.Op != OpARM64CMPWconst { 41862 break 41863 } 41864 if v.AuxInt != 0 { 41865 break 41866 } 41867 z := v.Args[0] 41868 if z.Op != OpARM64MADDW { 41869 break 41870 } 41871 _ = z.Args[2] 41872 a := z.Args[0] 41873 x := z.Args[1] 41874 y := z.Args[2] 41875 if !(z.Uses == 1) { 41876 break 41877 } 41878 b.Kind = BlockARM64NE 41879 v0 := b.NewValue0(v.Pos, OpARM64CMNW, types.TypeFlags) 41880 v0.AddArg(a) 41881 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41882 v1.AddArg(x) 41883 v1.AddArg(y) 41884 v0.AddArg(v1) 41885 b.SetControl(v0) 41886 b.Aux = nil 41887 return true 41888 } 41889 // match: (NE (CMPWconst [0] z:(MSUBW a x y)) yes no) 41890 // cond: z.Uses==1 41891 // result: (NE (CMPW a (MULW <x.Type> x y)) yes no) 41892 for { 41893 v := b.Control 41894 if v.Op != OpARM64CMPWconst { 41895 break 41896 } 41897 if v.AuxInt != 0 { 41898 break 41899 } 41900 z := v.Args[0] 41901 if z.Op != OpARM64MSUBW { 41902 break 41903 } 41904 _ = z.Args[2] 41905 a := z.Args[0] 41906 x := z.Args[1] 41907 y := z.Args[2] 41908 if !(z.Uses == 1) { 41909 break 41910 } 41911 b.Kind = BlockARM64NE 41912 v0 := b.NewValue0(v.Pos, OpARM64CMPW, types.TypeFlags) 41913 v0.AddArg(a) 41914 v1 := b.NewValue0(v.Pos, OpARM64MULW, x.Type) 41915 v1.AddArg(x) 41916 v1.AddArg(y) 41917 v0.AddArg(v1) 41918 b.SetControl(v0) 41919 b.Aux = nil 41920 return true 41921 } 41922 // match: (NE (TSTconst [c] x) yes no) 41923 // cond: oneBit(c) 41924 // result: (TBNZ {ntz(c)} x yes no) 41925 for { 41926 v := b.Control 41927 if v.Op != OpARM64TSTconst { 41928 break 41929 } 41930 c := v.AuxInt 41931 x := v.Args[0] 41932 if !(oneBit(c)) { 41933 break 41934 } 41935 b.Kind = BlockARM64TBNZ 41936 b.SetControl(x) 41937 b.Aux = ntz(c) 41938 return true 41939 } 41940 // match: (NE (TSTWconst [c] x) yes no) 41941 // cond: oneBit(int64(uint32(c))) 41942 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 41943 for { 41944 v := b.Control 41945 if v.Op != OpARM64TSTWconst { 41946 break 41947 } 41948 c := v.AuxInt 41949 x := v.Args[0] 41950 if !(oneBit(int64(uint32(c)))) { 41951 break 41952 } 41953 b.Kind = BlockARM64TBNZ 41954 b.SetControl(x) 41955 b.Aux = ntz(int64(uint32(c))) 41956 return true 41957 } 41958 // match: (NE (FlagEQ) yes no) 41959 // cond: 41960 // result: (First nil no yes) 41961 for { 41962 v := b.Control 41963 if v.Op != OpARM64FlagEQ { 41964 break 41965 } 41966 b.Kind = BlockFirst 41967 b.SetControl(nil) 41968 b.Aux = nil 41969 b.swapSuccessors() 41970 return true 41971 } 41972 // match: (NE (FlagLT_ULT) yes no) 41973 // cond: 41974 // result: (First nil yes no) 41975 for { 41976 v := b.Control 41977 if v.Op != OpARM64FlagLT_ULT { 41978 break 41979 } 41980 b.Kind = BlockFirst 41981 b.SetControl(nil) 41982 b.Aux = nil 41983 return true 41984 } 41985 // match: (NE (FlagLT_UGT) yes no) 41986 // cond: 41987 // result: (First nil yes no) 41988 for { 41989 v := b.Control 41990 if v.Op != OpARM64FlagLT_UGT { 41991 break 41992 } 41993 b.Kind = BlockFirst 41994 b.SetControl(nil) 41995 b.Aux = nil 41996 return true 41997 } 41998 // match: (NE (FlagGT_ULT) yes no) 41999 // cond: 42000 // result: (First nil yes no) 42001 for { 42002 v := b.Control 42003 if v.Op != OpARM64FlagGT_ULT { 42004 break 42005 } 42006 b.Kind = BlockFirst 42007 b.SetControl(nil) 42008 b.Aux = nil 42009 return true 42010 } 42011 // match: (NE (FlagGT_UGT) yes no) 42012 // cond: 42013 // result: (First nil yes no) 42014 for { 42015 v := b.Control 42016 if v.Op != OpARM64FlagGT_UGT { 42017 break 42018 } 42019 b.Kind = BlockFirst 42020 b.SetControl(nil) 42021 b.Aux = nil 42022 return true 42023 } 42024 // match: (NE (InvertFlags cmp) yes no) 42025 // cond: 42026 // result: (NE cmp yes no) 42027 for { 42028 v := b.Control 42029 if v.Op != OpARM64InvertFlags { 42030 break 42031 } 42032 cmp := v.Args[0] 42033 b.Kind = BlockARM64NE 42034 b.SetControl(cmp) 42035 b.Aux = nil 42036 return true 42037 } 42038 case BlockARM64NZ: 42039 // match: (NZ (Equal cc) yes no) 42040 // cond: 42041 // result: (EQ cc yes no) 42042 for { 42043 v := b.Control 42044 if v.Op != OpARM64Equal { 42045 break 42046 } 42047 cc := v.Args[0] 42048 b.Kind = BlockARM64EQ 42049 b.SetControl(cc) 42050 b.Aux = nil 42051 return true 42052 } 42053 // match: (NZ (NotEqual cc) yes no) 42054 // cond: 42055 // result: (NE cc yes no) 42056 for { 42057 v := b.Control 42058 if v.Op != OpARM64NotEqual { 42059 break 42060 } 42061 cc := v.Args[0] 42062 b.Kind = BlockARM64NE 42063 b.SetControl(cc) 42064 b.Aux = nil 42065 return true 42066 } 42067 // match: (NZ (LessThan cc) yes no) 42068 // cond: 42069 // result: (LT cc yes no) 42070 for { 42071 v := b.Control 42072 if v.Op != OpARM64LessThan { 42073 break 42074 } 42075 cc := v.Args[0] 42076 b.Kind = BlockARM64LT 42077 b.SetControl(cc) 42078 b.Aux = nil 42079 return true 42080 } 42081 // match: (NZ (LessThanU cc) yes no) 42082 // cond: 42083 // result: (ULT cc yes no) 42084 for { 42085 v := b.Control 42086 if v.Op != OpARM64LessThanU { 42087 break 42088 } 42089 cc := v.Args[0] 42090 b.Kind = BlockARM64ULT 42091 b.SetControl(cc) 42092 b.Aux = nil 42093 return true 42094 } 42095 // match: (NZ (LessEqual cc) yes no) 42096 // cond: 42097 // result: (LE cc yes no) 42098 for { 42099 v := b.Control 42100 if v.Op != OpARM64LessEqual { 42101 break 42102 } 42103 cc := v.Args[0] 42104 b.Kind = BlockARM64LE 42105 b.SetControl(cc) 42106 b.Aux = nil 42107 return true 42108 } 42109 // match: (NZ (LessEqualU cc) yes no) 42110 // cond: 42111 // result: (ULE cc yes no) 42112 for { 42113 v := b.Control 42114 if v.Op != OpARM64LessEqualU { 42115 break 42116 } 42117 cc := v.Args[0] 42118 b.Kind = BlockARM64ULE 42119 b.SetControl(cc) 42120 b.Aux = nil 42121 return true 42122 } 42123 // match: (NZ (GreaterThan cc) yes no) 42124 // cond: 42125 // result: (GT cc yes no) 42126 for { 42127 v := b.Control 42128 if v.Op != OpARM64GreaterThan { 42129 break 42130 } 42131 cc := v.Args[0] 42132 b.Kind = BlockARM64GT 42133 b.SetControl(cc) 42134 b.Aux = nil 42135 return true 42136 } 42137 // match: (NZ (GreaterThanU cc) yes no) 42138 // cond: 42139 // result: (UGT cc yes no) 42140 for { 42141 v := b.Control 42142 if v.Op != OpARM64GreaterThanU { 42143 break 42144 } 42145 cc := v.Args[0] 42146 b.Kind = BlockARM64UGT 42147 b.SetControl(cc) 42148 b.Aux = nil 42149 return true 42150 } 42151 // match: (NZ (GreaterEqual cc) yes no) 42152 // cond: 42153 // result: (GE cc yes no) 42154 for { 42155 v := b.Control 42156 if v.Op != OpARM64GreaterEqual { 42157 break 42158 } 42159 cc := v.Args[0] 42160 b.Kind = BlockARM64GE 42161 b.SetControl(cc) 42162 b.Aux = nil 42163 return true 42164 } 42165 // match: (NZ (GreaterEqualU cc) yes no) 42166 // cond: 42167 // result: (UGE cc yes no) 42168 for { 42169 v := b.Control 42170 if v.Op != OpARM64GreaterEqualU { 42171 break 42172 } 42173 cc := v.Args[0] 42174 b.Kind = BlockARM64UGE 42175 b.SetControl(cc) 42176 b.Aux = nil 42177 return true 42178 } 42179 // match: (NZ (ANDconst [c] x) yes no) 42180 // cond: oneBit(c) 42181 // result: (TBNZ {ntz(c)} x yes no) 42182 for { 42183 v := b.Control 42184 if v.Op != OpARM64ANDconst { 42185 break 42186 } 42187 c := v.AuxInt 42188 x := v.Args[0] 42189 if !(oneBit(c)) { 42190 break 42191 } 42192 b.Kind = BlockARM64TBNZ 42193 b.SetControl(x) 42194 b.Aux = ntz(c) 42195 return true 42196 } 42197 // match: (NZ (MOVDconst [0]) yes no) 42198 // cond: 42199 // result: (First nil no yes) 42200 for { 42201 v := b.Control 42202 if v.Op != OpARM64MOVDconst { 42203 break 42204 } 42205 if v.AuxInt != 0 { 42206 break 42207 } 42208 b.Kind = BlockFirst 42209 b.SetControl(nil) 42210 b.Aux = nil 42211 b.swapSuccessors() 42212 return true 42213 } 42214 // match: (NZ (MOVDconst [c]) yes no) 42215 // cond: c != 0 42216 // result: (First nil yes no) 42217 for { 42218 v := b.Control 42219 if v.Op != OpARM64MOVDconst { 42220 break 42221 } 42222 c := v.AuxInt 42223 if !(c != 0) { 42224 break 42225 } 42226 b.Kind = BlockFirst 42227 b.SetControl(nil) 42228 b.Aux = nil 42229 return true 42230 } 42231 case BlockARM64NZW: 42232 // match: (NZW (ANDconst [c] x) yes no) 42233 // cond: oneBit(int64(uint32(c))) 42234 // result: (TBNZ {ntz(int64(uint32(c)))} x yes no) 42235 for { 42236 v := b.Control 42237 if v.Op != OpARM64ANDconst { 42238 break 42239 } 42240 c := v.AuxInt 42241 x := v.Args[0] 42242 if !(oneBit(int64(uint32(c)))) { 42243 break 42244 } 42245 b.Kind = BlockARM64TBNZ 42246 b.SetControl(x) 42247 b.Aux = ntz(int64(uint32(c))) 42248 return true 42249 } 42250 // match: (NZW (MOVDconst [c]) yes no) 42251 // cond: int32(c) == 0 42252 // result: (First nil no yes) 42253 for { 42254 v := b.Control 42255 if v.Op != OpARM64MOVDconst { 42256 break 42257 } 42258 c := v.AuxInt 42259 if !(int32(c) == 0) { 42260 break 42261 } 42262 b.Kind = BlockFirst 42263 b.SetControl(nil) 42264 b.Aux = nil 42265 b.swapSuccessors() 42266 return true 42267 } 42268 // match: (NZW (MOVDconst [c]) yes no) 42269 // cond: int32(c) != 0 42270 // result: (First nil yes no) 42271 for { 42272 v := b.Control 42273 if v.Op != OpARM64MOVDconst { 42274 break 42275 } 42276 c := v.AuxInt 42277 if !(int32(c) != 0) { 42278 break 42279 } 42280 b.Kind = BlockFirst 42281 b.SetControl(nil) 42282 b.Aux = nil 42283 return true 42284 } 42285 case BlockARM64UGE: 42286 // match: (UGE (FlagEQ) yes no) 42287 // cond: 42288 // result: (First nil yes no) 42289 for { 42290 v := b.Control 42291 if v.Op != OpARM64FlagEQ { 42292 break 42293 } 42294 b.Kind = BlockFirst 42295 b.SetControl(nil) 42296 b.Aux = nil 42297 return true 42298 } 42299 // match: (UGE (FlagLT_ULT) yes no) 42300 // cond: 42301 // result: (First nil no yes) 42302 for { 42303 v := b.Control 42304 if v.Op != OpARM64FlagLT_ULT { 42305 break 42306 } 42307 b.Kind = BlockFirst 42308 b.SetControl(nil) 42309 b.Aux = nil 42310 b.swapSuccessors() 42311 return true 42312 } 42313 // match: (UGE (FlagLT_UGT) yes no) 42314 // cond: 42315 // result: (First nil yes no) 42316 for { 42317 v := b.Control 42318 if v.Op != OpARM64FlagLT_UGT { 42319 break 42320 } 42321 b.Kind = BlockFirst 42322 b.SetControl(nil) 42323 b.Aux = nil 42324 return true 42325 } 42326 // match: (UGE (FlagGT_ULT) yes no) 42327 // cond: 42328 // result: (First nil no yes) 42329 for { 42330 v := b.Control 42331 if v.Op != OpARM64FlagGT_ULT { 42332 break 42333 } 42334 b.Kind = BlockFirst 42335 b.SetControl(nil) 42336 b.Aux = nil 42337 b.swapSuccessors() 42338 return true 42339 } 42340 // match: (UGE (FlagGT_UGT) yes no) 42341 // cond: 42342 // result: (First nil yes no) 42343 for { 42344 v := b.Control 42345 if v.Op != OpARM64FlagGT_UGT { 42346 break 42347 } 42348 b.Kind = BlockFirst 42349 b.SetControl(nil) 42350 b.Aux = nil 42351 return true 42352 } 42353 // match: (UGE (InvertFlags cmp) yes no) 42354 // cond: 42355 // result: (ULE cmp yes no) 42356 for { 42357 v := b.Control 42358 if v.Op != OpARM64InvertFlags { 42359 break 42360 } 42361 cmp := v.Args[0] 42362 b.Kind = BlockARM64ULE 42363 b.SetControl(cmp) 42364 b.Aux = nil 42365 return true 42366 } 42367 case BlockARM64UGT: 42368 // match: (UGT (FlagEQ) yes no) 42369 // cond: 42370 // result: (First nil no yes) 42371 for { 42372 v := b.Control 42373 if v.Op != OpARM64FlagEQ { 42374 break 42375 } 42376 b.Kind = BlockFirst 42377 b.SetControl(nil) 42378 b.Aux = nil 42379 b.swapSuccessors() 42380 return true 42381 } 42382 // match: (UGT (FlagLT_ULT) yes no) 42383 // cond: 42384 // result: (First nil no yes) 42385 for { 42386 v := b.Control 42387 if v.Op != OpARM64FlagLT_ULT { 42388 break 42389 } 42390 b.Kind = BlockFirst 42391 b.SetControl(nil) 42392 b.Aux = nil 42393 b.swapSuccessors() 42394 return true 42395 } 42396 // match: (UGT (FlagLT_UGT) yes no) 42397 // cond: 42398 // result: (First nil yes no) 42399 for { 42400 v := b.Control 42401 if v.Op != OpARM64FlagLT_UGT { 42402 break 42403 } 42404 b.Kind = BlockFirst 42405 b.SetControl(nil) 42406 b.Aux = nil 42407 return true 42408 } 42409 // match: (UGT (FlagGT_ULT) yes no) 42410 // cond: 42411 // result: (First nil no yes) 42412 for { 42413 v := b.Control 42414 if v.Op != OpARM64FlagGT_ULT { 42415 break 42416 } 42417 b.Kind = BlockFirst 42418 b.SetControl(nil) 42419 b.Aux = nil 42420 b.swapSuccessors() 42421 return true 42422 } 42423 // match: (UGT (FlagGT_UGT) yes no) 42424 // cond: 42425 // result: (First nil yes no) 42426 for { 42427 v := b.Control 42428 if v.Op != OpARM64FlagGT_UGT { 42429 break 42430 } 42431 b.Kind = BlockFirst 42432 b.SetControl(nil) 42433 b.Aux = nil 42434 return true 42435 } 42436 // match: (UGT (InvertFlags cmp) yes no) 42437 // cond: 42438 // result: (ULT cmp yes no) 42439 for { 42440 v := b.Control 42441 if v.Op != OpARM64InvertFlags { 42442 break 42443 } 42444 cmp := v.Args[0] 42445 b.Kind = BlockARM64ULT 42446 b.SetControl(cmp) 42447 b.Aux = nil 42448 return true 42449 } 42450 case BlockARM64ULE: 42451 // match: (ULE (FlagEQ) yes no) 42452 // cond: 42453 // result: (First nil yes no) 42454 for { 42455 v := b.Control 42456 if v.Op != OpARM64FlagEQ { 42457 break 42458 } 42459 b.Kind = BlockFirst 42460 b.SetControl(nil) 42461 b.Aux = nil 42462 return true 42463 } 42464 // match: (ULE (FlagLT_ULT) yes no) 42465 // cond: 42466 // result: (First nil yes no) 42467 for { 42468 v := b.Control 42469 if v.Op != OpARM64FlagLT_ULT { 42470 break 42471 } 42472 b.Kind = BlockFirst 42473 b.SetControl(nil) 42474 b.Aux = nil 42475 return true 42476 } 42477 // match: (ULE (FlagLT_UGT) yes no) 42478 // cond: 42479 // result: (First nil no yes) 42480 for { 42481 v := b.Control 42482 if v.Op != OpARM64FlagLT_UGT { 42483 break 42484 } 42485 b.Kind = BlockFirst 42486 b.SetControl(nil) 42487 b.Aux = nil 42488 b.swapSuccessors() 42489 return true 42490 } 42491 // match: (ULE (FlagGT_ULT) yes no) 42492 // cond: 42493 // result: (First nil yes no) 42494 for { 42495 v := b.Control 42496 if v.Op != OpARM64FlagGT_ULT { 42497 break 42498 } 42499 b.Kind = BlockFirst 42500 b.SetControl(nil) 42501 b.Aux = nil 42502 return true 42503 } 42504 // match: (ULE (FlagGT_UGT) yes no) 42505 // cond: 42506 // result: (First nil no yes) 42507 for { 42508 v := b.Control 42509 if v.Op != OpARM64FlagGT_UGT { 42510 break 42511 } 42512 b.Kind = BlockFirst 42513 b.SetControl(nil) 42514 b.Aux = nil 42515 b.swapSuccessors() 42516 return true 42517 } 42518 // match: (ULE (InvertFlags cmp) yes no) 42519 // cond: 42520 // result: (UGE cmp yes no) 42521 for { 42522 v := b.Control 42523 if v.Op != OpARM64InvertFlags { 42524 break 42525 } 42526 cmp := v.Args[0] 42527 b.Kind = BlockARM64UGE 42528 b.SetControl(cmp) 42529 b.Aux = nil 42530 return true 42531 } 42532 case BlockARM64ULT: 42533 // match: (ULT (FlagEQ) yes no) 42534 // cond: 42535 // result: (First nil no yes) 42536 for { 42537 v := b.Control 42538 if v.Op != OpARM64FlagEQ { 42539 break 42540 } 42541 b.Kind = BlockFirst 42542 b.SetControl(nil) 42543 b.Aux = nil 42544 b.swapSuccessors() 42545 return true 42546 } 42547 // match: (ULT (FlagLT_ULT) yes no) 42548 // cond: 42549 // result: (First nil yes no) 42550 for { 42551 v := b.Control 42552 if v.Op != OpARM64FlagLT_ULT { 42553 break 42554 } 42555 b.Kind = BlockFirst 42556 b.SetControl(nil) 42557 b.Aux = nil 42558 return true 42559 } 42560 // match: (ULT (FlagLT_UGT) yes no) 42561 // cond: 42562 // result: (First nil no yes) 42563 for { 42564 v := b.Control 42565 if v.Op != OpARM64FlagLT_UGT { 42566 break 42567 } 42568 b.Kind = BlockFirst 42569 b.SetControl(nil) 42570 b.Aux = nil 42571 b.swapSuccessors() 42572 return true 42573 } 42574 // match: (ULT (FlagGT_ULT) yes no) 42575 // cond: 42576 // result: (First nil yes no) 42577 for { 42578 v := b.Control 42579 if v.Op != OpARM64FlagGT_ULT { 42580 break 42581 } 42582 b.Kind = BlockFirst 42583 b.SetControl(nil) 42584 b.Aux = nil 42585 return true 42586 } 42587 // match: (ULT (FlagGT_UGT) yes no) 42588 // cond: 42589 // result: (First nil no yes) 42590 for { 42591 v := b.Control 42592 if v.Op != OpARM64FlagGT_UGT { 42593 break 42594 } 42595 b.Kind = BlockFirst 42596 b.SetControl(nil) 42597 b.Aux = nil 42598 b.swapSuccessors() 42599 return true 42600 } 42601 // match: (ULT (InvertFlags cmp) yes no) 42602 // cond: 42603 // result: (UGT cmp yes no) 42604 for { 42605 v := b.Control 42606 if v.Op != OpARM64InvertFlags { 42607 break 42608 } 42609 cmp := v.Args[0] 42610 b.Kind = BlockARM64UGT 42611 b.SetControl(cmp) 42612 b.Aux = nil 42613 return true 42614 } 42615 case BlockARM64Z: 42616 // match: (Z (ANDconst [c] x) yes no) 42617 // cond: oneBit(c) 42618 // result: (TBZ {ntz(c)} x yes no) 42619 for { 42620 v := b.Control 42621 if v.Op != OpARM64ANDconst { 42622 break 42623 } 42624 c := v.AuxInt 42625 x := v.Args[0] 42626 if !(oneBit(c)) { 42627 break 42628 } 42629 b.Kind = BlockARM64TBZ 42630 b.SetControl(x) 42631 b.Aux = ntz(c) 42632 return true 42633 } 42634 // match: (Z (MOVDconst [0]) yes no) 42635 // cond: 42636 // result: (First nil yes no) 42637 for { 42638 v := b.Control 42639 if v.Op != OpARM64MOVDconst { 42640 break 42641 } 42642 if v.AuxInt != 0 { 42643 break 42644 } 42645 b.Kind = BlockFirst 42646 b.SetControl(nil) 42647 b.Aux = nil 42648 return true 42649 } 42650 // match: (Z (MOVDconst [c]) yes no) 42651 // cond: c != 0 42652 // result: (First nil no yes) 42653 for { 42654 v := b.Control 42655 if v.Op != OpARM64MOVDconst { 42656 break 42657 } 42658 c := v.AuxInt 42659 if !(c != 0) { 42660 break 42661 } 42662 b.Kind = BlockFirst 42663 b.SetControl(nil) 42664 b.Aux = nil 42665 b.swapSuccessors() 42666 return true 42667 } 42668 case BlockARM64ZW: 42669 // match: (ZW (ANDconst [c] x) yes no) 42670 // cond: oneBit(int64(uint32(c))) 42671 // result: (TBZ {ntz(int64(uint32(c)))} x yes no) 42672 for { 42673 v := b.Control 42674 if v.Op != OpARM64ANDconst { 42675 break 42676 } 42677 c := v.AuxInt 42678 x := v.Args[0] 42679 if !(oneBit(int64(uint32(c)))) { 42680 break 42681 } 42682 b.Kind = BlockARM64TBZ 42683 b.SetControl(x) 42684 b.Aux = ntz(int64(uint32(c))) 42685 return true 42686 } 42687 // match: (ZW (MOVDconst [c]) yes no) 42688 // cond: int32(c) == 0 42689 // result: (First nil yes no) 42690 for { 42691 v := b.Control 42692 if v.Op != OpARM64MOVDconst { 42693 break 42694 } 42695 c := v.AuxInt 42696 if !(int32(c) == 0) { 42697 break 42698 } 42699 b.Kind = BlockFirst 42700 b.SetControl(nil) 42701 b.Aux = nil 42702 return true 42703 } 42704 // match: (ZW (MOVDconst [c]) yes no) 42705 // cond: int32(c) != 0 42706 // result: (First nil no yes) 42707 for { 42708 v := b.Control 42709 if v.Op != OpARM64MOVDconst { 42710 break 42711 } 42712 c := v.AuxInt 42713 if !(int32(c) != 0) { 42714 break 42715 } 42716 b.Kind = BlockFirst 42717 b.SetControl(nil) 42718 b.Aux = nil 42719 b.swapSuccessors() 42720 return true 42721 } 42722 } 42723 return false 42724 }