github.com/hikaru7719/go@v0.0.0-20181025140707-c8b2ac68906a/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about) 1 // Code generated from gen/MIPS64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValueMIPS64(v *Value) bool { 17 switch v.Op { 18 case OpAdd16: 19 return rewriteValueMIPS64_OpAdd16_0(v) 20 case OpAdd32: 21 return rewriteValueMIPS64_OpAdd32_0(v) 22 case OpAdd32F: 23 return rewriteValueMIPS64_OpAdd32F_0(v) 24 case OpAdd64: 25 return rewriteValueMIPS64_OpAdd64_0(v) 26 case OpAdd64F: 27 return rewriteValueMIPS64_OpAdd64F_0(v) 28 case OpAdd8: 29 return rewriteValueMIPS64_OpAdd8_0(v) 30 case OpAddPtr: 31 return rewriteValueMIPS64_OpAddPtr_0(v) 32 case OpAddr: 33 return rewriteValueMIPS64_OpAddr_0(v) 34 case OpAnd16: 35 return rewriteValueMIPS64_OpAnd16_0(v) 36 case OpAnd32: 37 return rewriteValueMIPS64_OpAnd32_0(v) 38 case OpAnd64: 39 return rewriteValueMIPS64_OpAnd64_0(v) 40 case OpAnd8: 41 return rewriteValueMIPS64_OpAnd8_0(v) 42 case OpAndB: 43 return rewriteValueMIPS64_OpAndB_0(v) 44 case OpAtomicAdd32: 45 return rewriteValueMIPS64_OpAtomicAdd32_0(v) 46 case OpAtomicAdd64: 47 return rewriteValueMIPS64_OpAtomicAdd64_0(v) 48 case OpAtomicCompareAndSwap32: 49 return rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v) 50 case OpAtomicCompareAndSwap64: 51 return rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v) 52 case OpAtomicExchange32: 53 return rewriteValueMIPS64_OpAtomicExchange32_0(v) 54 case OpAtomicExchange64: 55 return rewriteValueMIPS64_OpAtomicExchange64_0(v) 56 case OpAtomicLoad32: 57 return rewriteValueMIPS64_OpAtomicLoad32_0(v) 58 case OpAtomicLoad64: 59 return rewriteValueMIPS64_OpAtomicLoad64_0(v) 60 case OpAtomicLoadPtr: 61 return rewriteValueMIPS64_OpAtomicLoadPtr_0(v) 62 case OpAtomicStore32: 63 return rewriteValueMIPS64_OpAtomicStore32_0(v) 64 case OpAtomicStore64: 65 return rewriteValueMIPS64_OpAtomicStore64_0(v) 66 case OpAtomicStorePtrNoWB: 67 return rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v) 68 case OpAvg64u: 69 return rewriteValueMIPS64_OpAvg64u_0(v) 70 case OpClosureCall: 71 return rewriteValueMIPS64_OpClosureCall_0(v) 72 case OpCom16: 73 return rewriteValueMIPS64_OpCom16_0(v) 74 case OpCom32: 75 return rewriteValueMIPS64_OpCom32_0(v) 76 case OpCom64: 77 return rewriteValueMIPS64_OpCom64_0(v) 78 case OpCom8: 79 return rewriteValueMIPS64_OpCom8_0(v) 80 case OpConst16: 81 return rewriteValueMIPS64_OpConst16_0(v) 82 case OpConst32: 83 return rewriteValueMIPS64_OpConst32_0(v) 84 case OpConst32F: 85 return rewriteValueMIPS64_OpConst32F_0(v) 86 case OpConst64: 87 return rewriteValueMIPS64_OpConst64_0(v) 88 case OpConst64F: 89 return rewriteValueMIPS64_OpConst64F_0(v) 90 case OpConst8: 91 return rewriteValueMIPS64_OpConst8_0(v) 92 case OpConstBool: 93 return rewriteValueMIPS64_OpConstBool_0(v) 94 case OpConstNil: 95 return rewriteValueMIPS64_OpConstNil_0(v) 96 case OpCvt32Fto32: 97 return rewriteValueMIPS64_OpCvt32Fto32_0(v) 98 case OpCvt32Fto64: 99 return rewriteValueMIPS64_OpCvt32Fto64_0(v) 100 case OpCvt32Fto64F: 101 return rewriteValueMIPS64_OpCvt32Fto64F_0(v) 102 case OpCvt32to32F: 103 return rewriteValueMIPS64_OpCvt32to32F_0(v) 104 case OpCvt32to64F: 105 return rewriteValueMIPS64_OpCvt32to64F_0(v) 106 case OpCvt64Fto32: 107 return rewriteValueMIPS64_OpCvt64Fto32_0(v) 108 case OpCvt64Fto32F: 109 return rewriteValueMIPS64_OpCvt64Fto32F_0(v) 110 case OpCvt64Fto64: 111 return rewriteValueMIPS64_OpCvt64Fto64_0(v) 112 case OpCvt64to32F: 113 return rewriteValueMIPS64_OpCvt64to32F_0(v) 114 case OpCvt64to64F: 115 return rewriteValueMIPS64_OpCvt64to64F_0(v) 116 case OpDiv16: 117 return rewriteValueMIPS64_OpDiv16_0(v) 118 case OpDiv16u: 119 return rewriteValueMIPS64_OpDiv16u_0(v) 120 case OpDiv32: 121 return rewriteValueMIPS64_OpDiv32_0(v) 122 case OpDiv32F: 123 return rewriteValueMIPS64_OpDiv32F_0(v) 124 case OpDiv32u: 125 return rewriteValueMIPS64_OpDiv32u_0(v) 126 case OpDiv64: 127 return rewriteValueMIPS64_OpDiv64_0(v) 128 case OpDiv64F: 129 return rewriteValueMIPS64_OpDiv64F_0(v) 130 case OpDiv64u: 131 return rewriteValueMIPS64_OpDiv64u_0(v) 132 case OpDiv8: 133 return rewriteValueMIPS64_OpDiv8_0(v) 134 case OpDiv8u: 135 return rewriteValueMIPS64_OpDiv8u_0(v) 136 case OpEq16: 137 return rewriteValueMIPS64_OpEq16_0(v) 138 case OpEq32: 139 return rewriteValueMIPS64_OpEq32_0(v) 140 case OpEq32F: 141 return rewriteValueMIPS64_OpEq32F_0(v) 142 case OpEq64: 143 return rewriteValueMIPS64_OpEq64_0(v) 144 case OpEq64F: 145 return rewriteValueMIPS64_OpEq64F_0(v) 146 case OpEq8: 147 return rewriteValueMIPS64_OpEq8_0(v) 148 case OpEqB: 149 return rewriteValueMIPS64_OpEqB_0(v) 150 case OpEqPtr: 151 return rewriteValueMIPS64_OpEqPtr_0(v) 152 case OpGeq16: 153 return rewriteValueMIPS64_OpGeq16_0(v) 154 case OpGeq16U: 155 return rewriteValueMIPS64_OpGeq16U_0(v) 156 case OpGeq32: 157 return rewriteValueMIPS64_OpGeq32_0(v) 158 case OpGeq32F: 159 return rewriteValueMIPS64_OpGeq32F_0(v) 160 case OpGeq32U: 161 return rewriteValueMIPS64_OpGeq32U_0(v) 162 case OpGeq64: 163 return rewriteValueMIPS64_OpGeq64_0(v) 164 case OpGeq64F: 165 return rewriteValueMIPS64_OpGeq64F_0(v) 166 case OpGeq64U: 167 return rewriteValueMIPS64_OpGeq64U_0(v) 168 case OpGeq8: 169 return rewriteValueMIPS64_OpGeq8_0(v) 170 case OpGeq8U: 171 return rewriteValueMIPS64_OpGeq8U_0(v) 172 case OpGetCallerPC: 173 return rewriteValueMIPS64_OpGetCallerPC_0(v) 174 case OpGetCallerSP: 175 return rewriteValueMIPS64_OpGetCallerSP_0(v) 176 case OpGetClosurePtr: 177 return rewriteValueMIPS64_OpGetClosurePtr_0(v) 178 case OpGreater16: 179 return rewriteValueMIPS64_OpGreater16_0(v) 180 case OpGreater16U: 181 return rewriteValueMIPS64_OpGreater16U_0(v) 182 case OpGreater32: 183 return rewriteValueMIPS64_OpGreater32_0(v) 184 case OpGreater32F: 185 return rewriteValueMIPS64_OpGreater32F_0(v) 186 case OpGreater32U: 187 return rewriteValueMIPS64_OpGreater32U_0(v) 188 case OpGreater64: 189 return rewriteValueMIPS64_OpGreater64_0(v) 190 case OpGreater64F: 191 return rewriteValueMIPS64_OpGreater64F_0(v) 192 case OpGreater64U: 193 return rewriteValueMIPS64_OpGreater64U_0(v) 194 case OpGreater8: 195 return rewriteValueMIPS64_OpGreater8_0(v) 196 case OpGreater8U: 197 return rewriteValueMIPS64_OpGreater8U_0(v) 198 case OpHmul32: 199 return rewriteValueMIPS64_OpHmul32_0(v) 200 case OpHmul32u: 201 return rewriteValueMIPS64_OpHmul32u_0(v) 202 case OpHmul64: 203 return rewriteValueMIPS64_OpHmul64_0(v) 204 case OpHmul64u: 205 return rewriteValueMIPS64_OpHmul64u_0(v) 206 case OpInterCall: 207 return rewriteValueMIPS64_OpInterCall_0(v) 208 case OpIsInBounds: 209 return rewriteValueMIPS64_OpIsInBounds_0(v) 210 case OpIsNonNil: 211 return rewriteValueMIPS64_OpIsNonNil_0(v) 212 case OpIsSliceInBounds: 213 return rewriteValueMIPS64_OpIsSliceInBounds_0(v) 214 case OpLeq16: 215 return rewriteValueMIPS64_OpLeq16_0(v) 216 case OpLeq16U: 217 return rewriteValueMIPS64_OpLeq16U_0(v) 218 case OpLeq32: 219 return rewriteValueMIPS64_OpLeq32_0(v) 220 case OpLeq32F: 221 return rewriteValueMIPS64_OpLeq32F_0(v) 222 case OpLeq32U: 223 return rewriteValueMIPS64_OpLeq32U_0(v) 224 case OpLeq64: 225 return rewriteValueMIPS64_OpLeq64_0(v) 226 case OpLeq64F: 227 return rewriteValueMIPS64_OpLeq64F_0(v) 228 case OpLeq64U: 229 return rewriteValueMIPS64_OpLeq64U_0(v) 230 case OpLeq8: 231 return rewriteValueMIPS64_OpLeq8_0(v) 232 case OpLeq8U: 233 return rewriteValueMIPS64_OpLeq8U_0(v) 234 case OpLess16: 235 return rewriteValueMIPS64_OpLess16_0(v) 236 case OpLess16U: 237 return rewriteValueMIPS64_OpLess16U_0(v) 238 case OpLess32: 239 return rewriteValueMIPS64_OpLess32_0(v) 240 case OpLess32F: 241 return rewriteValueMIPS64_OpLess32F_0(v) 242 case OpLess32U: 243 return rewriteValueMIPS64_OpLess32U_0(v) 244 case OpLess64: 245 return rewriteValueMIPS64_OpLess64_0(v) 246 case OpLess64F: 247 return rewriteValueMIPS64_OpLess64F_0(v) 248 case OpLess64U: 249 return rewriteValueMIPS64_OpLess64U_0(v) 250 case OpLess8: 251 return rewriteValueMIPS64_OpLess8_0(v) 252 case OpLess8U: 253 return rewriteValueMIPS64_OpLess8U_0(v) 254 case OpLoad: 255 return rewriteValueMIPS64_OpLoad_0(v) 256 case OpLocalAddr: 257 return rewriteValueMIPS64_OpLocalAddr_0(v) 258 case OpLsh16x16: 259 return rewriteValueMIPS64_OpLsh16x16_0(v) 260 case OpLsh16x32: 261 return rewriteValueMIPS64_OpLsh16x32_0(v) 262 case OpLsh16x64: 263 return rewriteValueMIPS64_OpLsh16x64_0(v) 264 case OpLsh16x8: 265 return rewriteValueMIPS64_OpLsh16x8_0(v) 266 case OpLsh32x16: 267 return rewriteValueMIPS64_OpLsh32x16_0(v) 268 case OpLsh32x32: 269 return rewriteValueMIPS64_OpLsh32x32_0(v) 270 case OpLsh32x64: 271 return rewriteValueMIPS64_OpLsh32x64_0(v) 272 case OpLsh32x8: 273 return rewriteValueMIPS64_OpLsh32x8_0(v) 274 case OpLsh64x16: 275 return rewriteValueMIPS64_OpLsh64x16_0(v) 276 case OpLsh64x32: 277 return rewriteValueMIPS64_OpLsh64x32_0(v) 278 case OpLsh64x64: 279 return rewriteValueMIPS64_OpLsh64x64_0(v) 280 case OpLsh64x8: 281 return rewriteValueMIPS64_OpLsh64x8_0(v) 282 case OpLsh8x16: 283 return rewriteValueMIPS64_OpLsh8x16_0(v) 284 case OpLsh8x32: 285 return rewriteValueMIPS64_OpLsh8x32_0(v) 286 case OpLsh8x64: 287 return rewriteValueMIPS64_OpLsh8x64_0(v) 288 case OpLsh8x8: 289 return rewriteValueMIPS64_OpLsh8x8_0(v) 290 case OpMIPS64ADDV: 291 return rewriteValueMIPS64_OpMIPS64ADDV_0(v) 292 case OpMIPS64ADDVconst: 293 return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v) 294 case OpMIPS64AND: 295 return rewriteValueMIPS64_OpMIPS64AND_0(v) 296 case OpMIPS64ANDconst: 297 return rewriteValueMIPS64_OpMIPS64ANDconst_0(v) 298 case OpMIPS64LoweredAtomicAdd32: 299 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v) 300 case OpMIPS64LoweredAtomicAdd64: 301 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v) 302 case OpMIPS64LoweredAtomicStore32: 303 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v) 304 case OpMIPS64LoweredAtomicStore64: 305 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v) 306 case OpMIPS64MOVBUload: 307 return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v) 308 case OpMIPS64MOVBUreg: 309 return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v) 310 case OpMIPS64MOVBload: 311 return rewriteValueMIPS64_OpMIPS64MOVBload_0(v) 312 case OpMIPS64MOVBreg: 313 return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v) 314 case OpMIPS64MOVBstore: 315 return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v) 316 case OpMIPS64MOVBstorezero: 317 return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v) 318 case OpMIPS64MOVDload: 319 return rewriteValueMIPS64_OpMIPS64MOVDload_0(v) 320 case OpMIPS64MOVDstore: 321 return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v) 322 case OpMIPS64MOVFload: 323 return rewriteValueMIPS64_OpMIPS64MOVFload_0(v) 324 case OpMIPS64MOVFstore: 325 return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v) 326 case OpMIPS64MOVHUload: 327 return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v) 328 case OpMIPS64MOVHUreg: 329 return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v) 330 case OpMIPS64MOVHload: 331 return rewriteValueMIPS64_OpMIPS64MOVHload_0(v) 332 case OpMIPS64MOVHreg: 333 return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v) 334 case OpMIPS64MOVHstore: 335 return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v) 336 case OpMIPS64MOVHstorezero: 337 return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v) 338 case OpMIPS64MOVVload: 339 return rewriteValueMIPS64_OpMIPS64MOVVload_0(v) 340 case OpMIPS64MOVVreg: 341 return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v) 342 case OpMIPS64MOVVstore: 343 return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v) 344 case OpMIPS64MOVVstorezero: 345 return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v) 346 case OpMIPS64MOVWUload: 347 return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v) 348 case OpMIPS64MOVWUreg: 349 return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v) 350 case OpMIPS64MOVWload: 351 return rewriteValueMIPS64_OpMIPS64MOVWload_0(v) 352 case OpMIPS64MOVWreg: 353 return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v) 354 case OpMIPS64MOVWstore: 355 return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v) 356 case OpMIPS64MOVWstorezero: 357 return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v) 358 case OpMIPS64NEGV: 359 return rewriteValueMIPS64_OpMIPS64NEGV_0(v) 360 case OpMIPS64NOR: 361 return rewriteValueMIPS64_OpMIPS64NOR_0(v) 362 case OpMIPS64NORconst: 363 return rewriteValueMIPS64_OpMIPS64NORconst_0(v) 364 case OpMIPS64OR: 365 return rewriteValueMIPS64_OpMIPS64OR_0(v) 366 case OpMIPS64ORconst: 367 return rewriteValueMIPS64_OpMIPS64ORconst_0(v) 368 case OpMIPS64SGT: 369 return rewriteValueMIPS64_OpMIPS64SGT_0(v) 370 case OpMIPS64SGTU: 371 return rewriteValueMIPS64_OpMIPS64SGTU_0(v) 372 case OpMIPS64SGTUconst: 373 return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v) 374 case OpMIPS64SGTconst: 375 return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v) 376 case OpMIPS64SLLV: 377 return rewriteValueMIPS64_OpMIPS64SLLV_0(v) 378 case OpMIPS64SLLVconst: 379 return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v) 380 case OpMIPS64SRAV: 381 return rewriteValueMIPS64_OpMIPS64SRAV_0(v) 382 case OpMIPS64SRAVconst: 383 return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v) 384 case OpMIPS64SRLV: 385 return rewriteValueMIPS64_OpMIPS64SRLV_0(v) 386 case OpMIPS64SRLVconst: 387 return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v) 388 case OpMIPS64SUBV: 389 return rewriteValueMIPS64_OpMIPS64SUBV_0(v) 390 case OpMIPS64SUBVconst: 391 return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v) 392 case OpMIPS64XOR: 393 return rewriteValueMIPS64_OpMIPS64XOR_0(v) 394 case OpMIPS64XORconst: 395 return rewriteValueMIPS64_OpMIPS64XORconst_0(v) 396 case OpMod16: 397 return rewriteValueMIPS64_OpMod16_0(v) 398 case OpMod16u: 399 return rewriteValueMIPS64_OpMod16u_0(v) 400 case OpMod32: 401 return rewriteValueMIPS64_OpMod32_0(v) 402 case OpMod32u: 403 return rewriteValueMIPS64_OpMod32u_0(v) 404 case OpMod64: 405 return rewriteValueMIPS64_OpMod64_0(v) 406 case OpMod64u: 407 return rewriteValueMIPS64_OpMod64u_0(v) 408 case OpMod8: 409 return rewriteValueMIPS64_OpMod8_0(v) 410 case OpMod8u: 411 return rewriteValueMIPS64_OpMod8u_0(v) 412 case OpMove: 413 return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v) 414 case OpMul16: 415 return rewriteValueMIPS64_OpMul16_0(v) 416 case OpMul32: 417 return rewriteValueMIPS64_OpMul32_0(v) 418 case OpMul32F: 419 return rewriteValueMIPS64_OpMul32F_0(v) 420 case OpMul64: 421 return rewriteValueMIPS64_OpMul64_0(v) 422 case OpMul64F: 423 return rewriteValueMIPS64_OpMul64F_0(v) 424 case OpMul8: 425 return rewriteValueMIPS64_OpMul8_0(v) 426 case OpNeg16: 427 return rewriteValueMIPS64_OpNeg16_0(v) 428 case OpNeg32: 429 return rewriteValueMIPS64_OpNeg32_0(v) 430 case OpNeg32F: 431 return rewriteValueMIPS64_OpNeg32F_0(v) 432 case OpNeg64: 433 return rewriteValueMIPS64_OpNeg64_0(v) 434 case OpNeg64F: 435 return rewriteValueMIPS64_OpNeg64F_0(v) 436 case OpNeg8: 437 return rewriteValueMIPS64_OpNeg8_0(v) 438 case OpNeq16: 439 return rewriteValueMIPS64_OpNeq16_0(v) 440 case OpNeq32: 441 return rewriteValueMIPS64_OpNeq32_0(v) 442 case OpNeq32F: 443 return rewriteValueMIPS64_OpNeq32F_0(v) 444 case OpNeq64: 445 return rewriteValueMIPS64_OpNeq64_0(v) 446 case OpNeq64F: 447 return rewriteValueMIPS64_OpNeq64F_0(v) 448 case OpNeq8: 449 return rewriteValueMIPS64_OpNeq8_0(v) 450 case OpNeqB: 451 return rewriteValueMIPS64_OpNeqB_0(v) 452 case OpNeqPtr: 453 return rewriteValueMIPS64_OpNeqPtr_0(v) 454 case OpNilCheck: 455 return rewriteValueMIPS64_OpNilCheck_0(v) 456 case OpNot: 457 return rewriteValueMIPS64_OpNot_0(v) 458 case OpOffPtr: 459 return rewriteValueMIPS64_OpOffPtr_0(v) 460 case OpOr16: 461 return rewriteValueMIPS64_OpOr16_0(v) 462 case OpOr32: 463 return rewriteValueMIPS64_OpOr32_0(v) 464 case OpOr64: 465 return rewriteValueMIPS64_OpOr64_0(v) 466 case OpOr8: 467 return rewriteValueMIPS64_OpOr8_0(v) 468 case OpOrB: 469 return rewriteValueMIPS64_OpOrB_0(v) 470 case OpRound32F: 471 return rewriteValueMIPS64_OpRound32F_0(v) 472 case OpRound64F: 473 return rewriteValueMIPS64_OpRound64F_0(v) 474 case OpRsh16Ux16: 475 return rewriteValueMIPS64_OpRsh16Ux16_0(v) 476 case OpRsh16Ux32: 477 return rewriteValueMIPS64_OpRsh16Ux32_0(v) 478 case OpRsh16Ux64: 479 return rewriteValueMIPS64_OpRsh16Ux64_0(v) 480 case OpRsh16Ux8: 481 return rewriteValueMIPS64_OpRsh16Ux8_0(v) 482 case OpRsh16x16: 483 return rewriteValueMIPS64_OpRsh16x16_0(v) 484 case OpRsh16x32: 485 return rewriteValueMIPS64_OpRsh16x32_0(v) 486 case OpRsh16x64: 487 return rewriteValueMIPS64_OpRsh16x64_0(v) 488 case OpRsh16x8: 489 return rewriteValueMIPS64_OpRsh16x8_0(v) 490 case OpRsh32Ux16: 491 return rewriteValueMIPS64_OpRsh32Ux16_0(v) 492 case OpRsh32Ux32: 493 return rewriteValueMIPS64_OpRsh32Ux32_0(v) 494 case OpRsh32Ux64: 495 return rewriteValueMIPS64_OpRsh32Ux64_0(v) 496 case OpRsh32Ux8: 497 return rewriteValueMIPS64_OpRsh32Ux8_0(v) 498 case OpRsh32x16: 499 return rewriteValueMIPS64_OpRsh32x16_0(v) 500 case OpRsh32x32: 501 return rewriteValueMIPS64_OpRsh32x32_0(v) 502 case OpRsh32x64: 503 return rewriteValueMIPS64_OpRsh32x64_0(v) 504 case OpRsh32x8: 505 return rewriteValueMIPS64_OpRsh32x8_0(v) 506 case OpRsh64Ux16: 507 return rewriteValueMIPS64_OpRsh64Ux16_0(v) 508 case OpRsh64Ux32: 509 return rewriteValueMIPS64_OpRsh64Ux32_0(v) 510 case OpRsh64Ux64: 511 return rewriteValueMIPS64_OpRsh64Ux64_0(v) 512 case OpRsh64Ux8: 513 return rewriteValueMIPS64_OpRsh64Ux8_0(v) 514 case OpRsh64x16: 515 return rewriteValueMIPS64_OpRsh64x16_0(v) 516 case OpRsh64x32: 517 return rewriteValueMIPS64_OpRsh64x32_0(v) 518 case OpRsh64x64: 519 return rewriteValueMIPS64_OpRsh64x64_0(v) 520 case OpRsh64x8: 521 return rewriteValueMIPS64_OpRsh64x8_0(v) 522 case OpRsh8Ux16: 523 return rewriteValueMIPS64_OpRsh8Ux16_0(v) 524 case OpRsh8Ux32: 525 return rewriteValueMIPS64_OpRsh8Ux32_0(v) 526 case OpRsh8Ux64: 527 return rewriteValueMIPS64_OpRsh8Ux64_0(v) 528 case OpRsh8Ux8: 529 return rewriteValueMIPS64_OpRsh8Ux8_0(v) 530 case OpRsh8x16: 531 return rewriteValueMIPS64_OpRsh8x16_0(v) 532 case OpRsh8x32: 533 return rewriteValueMIPS64_OpRsh8x32_0(v) 534 case OpRsh8x64: 535 return rewriteValueMIPS64_OpRsh8x64_0(v) 536 case OpRsh8x8: 537 return rewriteValueMIPS64_OpRsh8x8_0(v) 538 case OpSelect0: 539 return rewriteValueMIPS64_OpSelect0_0(v) 540 case OpSelect1: 541 return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v) 542 case OpSignExt16to32: 543 return rewriteValueMIPS64_OpSignExt16to32_0(v) 544 case OpSignExt16to64: 545 return rewriteValueMIPS64_OpSignExt16to64_0(v) 546 case OpSignExt32to64: 547 return rewriteValueMIPS64_OpSignExt32to64_0(v) 548 case OpSignExt8to16: 549 return rewriteValueMIPS64_OpSignExt8to16_0(v) 550 case OpSignExt8to32: 551 return rewriteValueMIPS64_OpSignExt8to32_0(v) 552 case OpSignExt8to64: 553 return rewriteValueMIPS64_OpSignExt8to64_0(v) 554 case OpSlicemask: 555 return rewriteValueMIPS64_OpSlicemask_0(v) 556 case OpSqrt: 557 return rewriteValueMIPS64_OpSqrt_0(v) 558 case OpStaticCall: 559 return rewriteValueMIPS64_OpStaticCall_0(v) 560 case OpStore: 561 return rewriteValueMIPS64_OpStore_0(v) 562 case OpSub16: 563 return rewriteValueMIPS64_OpSub16_0(v) 564 case OpSub32: 565 return rewriteValueMIPS64_OpSub32_0(v) 566 case OpSub32F: 567 return rewriteValueMIPS64_OpSub32F_0(v) 568 case OpSub64: 569 return rewriteValueMIPS64_OpSub64_0(v) 570 case OpSub64F: 571 return rewriteValueMIPS64_OpSub64F_0(v) 572 case OpSub8: 573 return rewriteValueMIPS64_OpSub8_0(v) 574 case OpSubPtr: 575 return rewriteValueMIPS64_OpSubPtr_0(v) 576 case OpTrunc16to8: 577 return rewriteValueMIPS64_OpTrunc16to8_0(v) 578 case OpTrunc32to16: 579 return rewriteValueMIPS64_OpTrunc32to16_0(v) 580 case OpTrunc32to8: 581 return rewriteValueMIPS64_OpTrunc32to8_0(v) 582 case OpTrunc64to16: 583 return rewriteValueMIPS64_OpTrunc64to16_0(v) 584 case OpTrunc64to32: 585 return rewriteValueMIPS64_OpTrunc64to32_0(v) 586 case OpTrunc64to8: 587 return rewriteValueMIPS64_OpTrunc64to8_0(v) 588 case OpWB: 589 return rewriteValueMIPS64_OpWB_0(v) 590 case OpXor16: 591 return rewriteValueMIPS64_OpXor16_0(v) 592 case OpXor32: 593 return rewriteValueMIPS64_OpXor32_0(v) 594 case OpXor64: 595 return rewriteValueMIPS64_OpXor64_0(v) 596 case OpXor8: 597 return rewriteValueMIPS64_OpXor8_0(v) 598 case OpZero: 599 return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v) 600 case OpZeroExt16to32: 601 return rewriteValueMIPS64_OpZeroExt16to32_0(v) 602 case OpZeroExt16to64: 603 return rewriteValueMIPS64_OpZeroExt16to64_0(v) 604 case OpZeroExt32to64: 605 return rewriteValueMIPS64_OpZeroExt32to64_0(v) 606 case OpZeroExt8to16: 607 return rewriteValueMIPS64_OpZeroExt8to16_0(v) 608 case OpZeroExt8to32: 609 return rewriteValueMIPS64_OpZeroExt8to32_0(v) 610 case OpZeroExt8to64: 611 return rewriteValueMIPS64_OpZeroExt8to64_0(v) 612 } 613 return false 614 } 615 func rewriteValueMIPS64_OpAdd16_0(v *Value) bool { 616 // match: (Add16 x y) 617 // cond: 618 // result: (ADDV x y) 619 for { 620 _ = v.Args[1] 621 x := v.Args[0] 622 y := v.Args[1] 623 v.reset(OpMIPS64ADDV) 624 v.AddArg(x) 625 v.AddArg(y) 626 return true 627 } 628 } 629 func rewriteValueMIPS64_OpAdd32_0(v *Value) bool { 630 // match: (Add32 x y) 631 // cond: 632 // result: (ADDV x y) 633 for { 634 _ = v.Args[1] 635 x := v.Args[0] 636 y := v.Args[1] 637 v.reset(OpMIPS64ADDV) 638 v.AddArg(x) 639 v.AddArg(y) 640 return true 641 } 642 } 643 func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool { 644 // match: (Add32F x y) 645 // cond: 646 // result: (ADDF x y) 647 for { 648 _ = v.Args[1] 649 x := v.Args[0] 650 y := v.Args[1] 651 v.reset(OpMIPS64ADDF) 652 v.AddArg(x) 653 v.AddArg(y) 654 return true 655 } 656 } 657 func rewriteValueMIPS64_OpAdd64_0(v *Value) bool { 658 // match: (Add64 x y) 659 // cond: 660 // result: (ADDV x y) 661 for { 662 _ = v.Args[1] 663 x := v.Args[0] 664 y := v.Args[1] 665 v.reset(OpMIPS64ADDV) 666 v.AddArg(x) 667 v.AddArg(y) 668 return true 669 } 670 } 671 func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool { 672 // match: (Add64F x y) 673 // cond: 674 // result: (ADDD x y) 675 for { 676 _ = v.Args[1] 677 x := v.Args[0] 678 y := v.Args[1] 679 v.reset(OpMIPS64ADDD) 680 v.AddArg(x) 681 v.AddArg(y) 682 return true 683 } 684 } 685 func rewriteValueMIPS64_OpAdd8_0(v *Value) bool { 686 // match: (Add8 x y) 687 // cond: 688 // result: (ADDV x y) 689 for { 690 _ = v.Args[1] 691 x := v.Args[0] 692 y := v.Args[1] 693 v.reset(OpMIPS64ADDV) 694 v.AddArg(x) 695 v.AddArg(y) 696 return true 697 } 698 } 699 func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool { 700 // match: (AddPtr x y) 701 // cond: 702 // result: (ADDV x y) 703 for { 704 _ = v.Args[1] 705 x := v.Args[0] 706 y := v.Args[1] 707 v.reset(OpMIPS64ADDV) 708 v.AddArg(x) 709 v.AddArg(y) 710 return true 711 } 712 } 713 func rewriteValueMIPS64_OpAddr_0(v *Value) bool { 714 // match: (Addr {sym} base) 715 // cond: 716 // result: (MOVVaddr {sym} base) 717 for { 718 sym := v.Aux 719 base := v.Args[0] 720 v.reset(OpMIPS64MOVVaddr) 721 v.Aux = sym 722 v.AddArg(base) 723 return true 724 } 725 } 726 func rewriteValueMIPS64_OpAnd16_0(v *Value) bool { 727 // match: (And16 x y) 728 // cond: 729 // result: (AND x y) 730 for { 731 _ = v.Args[1] 732 x := v.Args[0] 733 y := v.Args[1] 734 v.reset(OpMIPS64AND) 735 v.AddArg(x) 736 v.AddArg(y) 737 return true 738 } 739 } 740 func rewriteValueMIPS64_OpAnd32_0(v *Value) bool { 741 // match: (And32 x y) 742 // cond: 743 // result: (AND x y) 744 for { 745 _ = v.Args[1] 746 x := v.Args[0] 747 y := v.Args[1] 748 v.reset(OpMIPS64AND) 749 v.AddArg(x) 750 v.AddArg(y) 751 return true 752 } 753 } 754 func rewriteValueMIPS64_OpAnd64_0(v *Value) bool { 755 // match: (And64 x y) 756 // cond: 757 // result: (AND x y) 758 for { 759 _ = v.Args[1] 760 x := v.Args[0] 761 y := v.Args[1] 762 v.reset(OpMIPS64AND) 763 v.AddArg(x) 764 v.AddArg(y) 765 return true 766 } 767 } 768 func rewriteValueMIPS64_OpAnd8_0(v *Value) bool { 769 // match: (And8 x y) 770 // cond: 771 // result: (AND x y) 772 for { 773 _ = v.Args[1] 774 x := v.Args[0] 775 y := v.Args[1] 776 v.reset(OpMIPS64AND) 777 v.AddArg(x) 778 v.AddArg(y) 779 return true 780 } 781 } 782 func rewriteValueMIPS64_OpAndB_0(v *Value) bool { 783 // match: (AndB x y) 784 // cond: 785 // result: (AND x y) 786 for { 787 _ = v.Args[1] 788 x := v.Args[0] 789 y := v.Args[1] 790 v.reset(OpMIPS64AND) 791 v.AddArg(x) 792 v.AddArg(y) 793 return true 794 } 795 } 796 func rewriteValueMIPS64_OpAtomicAdd32_0(v *Value) bool { 797 // match: (AtomicAdd32 ptr val mem) 798 // cond: 799 // result: (LoweredAtomicAdd32 ptr val mem) 800 for { 801 _ = v.Args[2] 802 ptr := v.Args[0] 803 val := v.Args[1] 804 mem := v.Args[2] 805 v.reset(OpMIPS64LoweredAtomicAdd32) 806 v.AddArg(ptr) 807 v.AddArg(val) 808 v.AddArg(mem) 809 return true 810 } 811 } 812 func rewriteValueMIPS64_OpAtomicAdd64_0(v *Value) bool { 813 // match: (AtomicAdd64 ptr val mem) 814 // cond: 815 // result: (LoweredAtomicAdd64 ptr val mem) 816 for { 817 _ = v.Args[2] 818 ptr := v.Args[0] 819 val := v.Args[1] 820 mem := v.Args[2] 821 v.reset(OpMIPS64LoweredAtomicAdd64) 822 v.AddArg(ptr) 823 v.AddArg(val) 824 v.AddArg(mem) 825 return true 826 } 827 } 828 func rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v *Value) bool { 829 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 830 // cond: 831 // result: (LoweredAtomicCas32 ptr old new_ mem) 832 for { 833 _ = v.Args[3] 834 ptr := v.Args[0] 835 old := v.Args[1] 836 new_ := v.Args[2] 837 mem := v.Args[3] 838 v.reset(OpMIPS64LoweredAtomicCas32) 839 v.AddArg(ptr) 840 v.AddArg(old) 841 v.AddArg(new_) 842 v.AddArg(mem) 843 return true 844 } 845 } 846 func rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v *Value) bool { 847 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 848 // cond: 849 // result: (LoweredAtomicCas64 ptr old new_ mem) 850 for { 851 _ = v.Args[3] 852 ptr := v.Args[0] 853 old := v.Args[1] 854 new_ := v.Args[2] 855 mem := v.Args[3] 856 v.reset(OpMIPS64LoweredAtomicCas64) 857 v.AddArg(ptr) 858 v.AddArg(old) 859 v.AddArg(new_) 860 v.AddArg(mem) 861 return true 862 } 863 } 864 func rewriteValueMIPS64_OpAtomicExchange32_0(v *Value) bool { 865 // match: (AtomicExchange32 ptr val mem) 866 // cond: 867 // result: (LoweredAtomicExchange32 ptr val mem) 868 for { 869 _ = v.Args[2] 870 ptr := v.Args[0] 871 val := v.Args[1] 872 mem := v.Args[2] 873 v.reset(OpMIPS64LoweredAtomicExchange32) 874 v.AddArg(ptr) 875 v.AddArg(val) 876 v.AddArg(mem) 877 return true 878 } 879 } 880 func rewriteValueMIPS64_OpAtomicExchange64_0(v *Value) bool { 881 // match: (AtomicExchange64 ptr val mem) 882 // cond: 883 // result: (LoweredAtomicExchange64 ptr val mem) 884 for { 885 _ = v.Args[2] 886 ptr := v.Args[0] 887 val := v.Args[1] 888 mem := v.Args[2] 889 v.reset(OpMIPS64LoweredAtomicExchange64) 890 v.AddArg(ptr) 891 v.AddArg(val) 892 v.AddArg(mem) 893 return true 894 } 895 } 896 func rewriteValueMIPS64_OpAtomicLoad32_0(v *Value) bool { 897 // match: (AtomicLoad32 ptr mem) 898 // cond: 899 // result: (LoweredAtomicLoad32 ptr mem) 900 for { 901 _ = v.Args[1] 902 ptr := v.Args[0] 903 mem := v.Args[1] 904 v.reset(OpMIPS64LoweredAtomicLoad32) 905 v.AddArg(ptr) 906 v.AddArg(mem) 907 return true 908 } 909 } 910 func rewriteValueMIPS64_OpAtomicLoad64_0(v *Value) bool { 911 // match: (AtomicLoad64 ptr mem) 912 // cond: 913 // result: (LoweredAtomicLoad64 ptr mem) 914 for { 915 _ = v.Args[1] 916 ptr := v.Args[0] 917 mem := v.Args[1] 918 v.reset(OpMIPS64LoweredAtomicLoad64) 919 v.AddArg(ptr) 920 v.AddArg(mem) 921 return true 922 } 923 } 924 func rewriteValueMIPS64_OpAtomicLoadPtr_0(v *Value) bool { 925 // match: (AtomicLoadPtr ptr mem) 926 // cond: 927 // result: (LoweredAtomicLoad64 ptr mem) 928 for { 929 _ = v.Args[1] 930 ptr := v.Args[0] 931 mem := v.Args[1] 932 v.reset(OpMIPS64LoweredAtomicLoad64) 933 v.AddArg(ptr) 934 v.AddArg(mem) 935 return true 936 } 937 } 938 func rewriteValueMIPS64_OpAtomicStore32_0(v *Value) bool { 939 // match: (AtomicStore32 ptr val mem) 940 // cond: 941 // result: (LoweredAtomicStore32 ptr val mem) 942 for { 943 _ = v.Args[2] 944 ptr := v.Args[0] 945 val := v.Args[1] 946 mem := v.Args[2] 947 v.reset(OpMIPS64LoweredAtomicStore32) 948 v.AddArg(ptr) 949 v.AddArg(val) 950 v.AddArg(mem) 951 return true 952 } 953 } 954 func rewriteValueMIPS64_OpAtomicStore64_0(v *Value) bool { 955 // match: (AtomicStore64 ptr val mem) 956 // cond: 957 // result: (LoweredAtomicStore64 ptr val mem) 958 for { 959 _ = v.Args[2] 960 ptr := v.Args[0] 961 val := v.Args[1] 962 mem := v.Args[2] 963 v.reset(OpMIPS64LoweredAtomicStore64) 964 v.AddArg(ptr) 965 v.AddArg(val) 966 v.AddArg(mem) 967 return true 968 } 969 } 970 func rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v *Value) bool { 971 // match: (AtomicStorePtrNoWB ptr val mem) 972 // cond: 973 // result: (LoweredAtomicStore64 ptr val mem) 974 for { 975 _ = v.Args[2] 976 ptr := v.Args[0] 977 val := v.Args[1] 978 mem := v.Args[2] 979 v.reset(OpMIPS64LoweredAtomicStore64) 980 v.AddArg(ptr) 981 v.AddArg(val) 982 v.AddArg(mem) 983 return true 984 } 985 } 986 func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool { 987 b := v.Block 988 _ = b 989 // match: (Avg64u <t> x y) 990 // cond: 991 // result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y) 992 for { 993 t := v.Type 994 _ = v.Args[1] 995 x := v.Args[0] 996 y := v.Args[1] 997 v.reset(OpMIPS64ADDV) 998 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t) 999 v0.AuxInt = 1 1000 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t) 1001 v1.AddArg(x) 1002 v1.AddArg(y) 1003 v0.AddArg(v1) 1004 v.AddArg(v0) 1005 v.AddArg(y) 1006 return true 1007 } 1008 } 1009 func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool { 1010 // match: (ClosureCall [argwid] entry closure mem) 1011 // cond: 1012 // result: (CALLclosure [argwid] entry closure mem) 1013 for { 1014 argwid := v.AuxInt 1015 _ = v.Args[2] 1016 entry := v.Args[0] 1017 closure := v.Args[1] 1018 mem := v.Args[2] 1019 v.reset(OpMIPS64CALLclosure) 1020 v.AuxInt = argwid 1021 v.AddArg(entry) 1022 v.AddArg(closure) 1023 v.AddArg(mem) 1024 return true 1025 } 1026 } 1027 func rewriteValueMIPS64_OpCom16_0(v *Value) bool { 1028 b := v.Block 1029 _ = b 1030 typ := &b.Func.Config.Types 1031 _ = typ 1032 // match: (Com16 x) 1033 // cond: 1034 // result: (NOR (MOVVconst [0]) x) 1035 for { 1036 x := v.Args[0] 1037 v.reset(OpMIPS64NOR) 1038 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1039 v0.AuxInt = 0 1040 v.AddArg(v0) 1041 v.AddArg(x) 1042 return true 1043 } 1044 } 1045 func rewriteValueMIPS64_OpCom32_0(v *Value) bool { 1046 b := v.Block 1047 _ = b 1048 typ := &b.Func.Config.Types 1049 _ = typ 1050 // match: (Com32 x) 1051 // cond: 1052 // result: (NOR (MOVVconst [0]) x) 1053 for { 1054 x := v.Args[0] 1055 v.reset(OpMIPS64NOR) 1056 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1057 v0.AuxInt = 0 1058 v.AddArg(v0) 1059 v.AddArg(x) 1060 return true 1061 } 1062 } 1063 func rewriteValueMIPS64_OpCom64_0(v *Value) bool { 1064 b := v.Block 1065 _ = b 1066 typ := &b.Func.Config.Types 1067 _ = typ 1068 // match: (Com64 x) 1069 // cond: 1070 // result: (NOR (MOVVconst [0]) x) 1071 for { 1072 x := v.Args[0] 1073 v.reset(OpMIPS64NOR) 1074 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1075 v0.AuxInt = 0 1076 v.AddArg(v0) 1077 v.AddArg(x) 1078 return true 1079 } 1080 } 1081 func rewriteValueMIPS64_OpCom8_0(v *Value) bool { 1082 b := v.Block 1083 _ = b 1084 typ := &b.Func.Config.Types 1085 _ = typ 1086 // match: (Com8 x) 1087 // cond: 1088 // result: (NOR (MOVVconst [0]) x) 1089 for { 1090 x := v.Args[0] 1091 v.reset(OpMIPS64NOR) 1092 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1093 v0.AuxInt = 0 1094 v.AddArg(v0) 1095 v.AddArg(x) 1096 return true 1097 } 1098 } 1099 func rewriteValueMIPS64_OpConst16_0(v *Value) bool { 1100 // match: (Const16 [val]) 1101 // cond: 1102 // result: (MOVVconst [val]) 1103 for { 1104 val := v.AuxInt 1105 v.reset(OpMIPS64MOVVconst) 1106 v.AuxInt = val 1107 return true 1108 } 1109 } 1110 func rewriteValueMIPS64_OpConst32_0(v *Value) bool { 1111 // match: (Const32 [val]) 1112 // cond: 1113 // result: (MOVVconst [val]) 1114 for { 1115 val := v.AuxInt 1116 v.reset(OpMIPS64MOVVconst) 1117 v.AuxInt = val 1118 return true 1119 } 1120 } 1121 func rewriteValueMIPS64_OpConst32F_0(v *Value) bool { 1122 // match: (Const32F [val]) 1123 // cond: 1124 // result: (MOVFconst [val]) 1125 for { 1126 val := v.AuxInt 1127 v.reset(OpMIPS64MOVFconst) 1128 v.AuxInt = val 1129 return true 1130 } 1131 } 1132 func rewriteValueMIPS64_OpConst64_0(v *Value) bool { 1133 // match: (Const64 [val]) 1134 // cond: 1135 // result: (MOVVconst [val]) 1136 for { 1137 val := v.AuxInt 1138 v.reset(OpMIPS64MOVVconst) 1139 v.AuxInt = val 1140 return true 1141 } 1142 } 1143 func rewriteValueMIPS64_OpConst64F_0(v *Value) bool { 1144 // match: (Const64F [val]) 1145 // cond: 1146 // result: (MOVDconst [val]) 1147 for { 1148 val := v.AuxInt 1149 v.reset(OpMIPS64MOVDconst) 1150 v.AuxInt = val 1151 return true 1152 } 1153 } 1154 func rewriteValueMIPS64_OpConst8_0(v *Value) bool { 1155 // match: (Const8 [val]) 1156 // cond: 1157 // result: (MOVVconst [val]) 1158 for { 1159 val := v.AuxInt 1160 v.reset(OpMIPS64MOVVconst) 1161 v.AuxInt = val 1162 return true 1163 } 1164 } 1165 func rewriteValueMIPS64_OpConstBool_0(v *Value) bool { 1166 // match: (ConstBool [b]) 1167 // cond: 1168 // result: (MOVVconst [b]) 1169 for { 1170 b := v.AuxInt 1171 v.reset(OpMIPS64MOVVconst) 1172 v.AuxInt = b 1173 return true 1174 } 1175 } 1176 func rewriteValueMIPS64_OpConstNil_0(v *Value) bool { 1177 // match: (ConstNil) 1178 // cond: 1179 // result: (MOVVconst [0]) 1180 for { 1181 v.reset(OpMIPS64MOVVconst) 1182 v.AuxInt = 0 1183 return true 1184 } 1185 } 1186 func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool { 1187 // match: (Cvt32Fto32 x) 1188 // cond: 1189 // result: (TRUNCFW x) 1190 for { 1191 x := v.Args[0] 1192 v.reset(OpMIPS64TRUNCFW) 1193 v.AddArg(x) 1194 return true 1195 } 1196 } 1197 func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool { 1198 // match: (Cvt32Fto64 x) 1199 // cond: 1200 // result: (TRUNCFV x) 1201 for { 1202 x := v.Args[0] 1203 v.reset(OpMIPS64TRUNCFV) 1204 v.AddArg(x) 1205 return true 1206 } 1207 } 1208 func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool { 1209 // match: (Cvt32Fto64F x) 1210 // cond: 1211 // result: (MOVFD x) 1212 for { 1213 x := v.Args[0] 1214 v.reset(OpMIPS64MOVFD) 1215 v.AddArg(x) 1216 return true 1217 } 1218 } 1219 func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool { 1220 // match: (Cvt32to32F x) 1221 // cond: 1222 // result: (MOVWF x) 1223 for { 1224 x := v.Args[0] 1225 v.reset(OpMIPS64MOVWF) 1226 v.AddArg(x) 1227 return true 1228 } 1229 } 1230 func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool { 1231 // match: (Cvt32to64F x) 1232 // cond: 1233 // result: (MOVWD x) 1234 for { 1235 x := v.Args[0] 1236 v.reset(OpMIPS64MOVWD) 1237 v.AddArg(x) 1238 return true 1239 } 1240 } 1241 func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool { 1242 // match: (Cvt64Fto32 x) 1243 // cond: 1244 // result: (TRUNCDW x) 1245 for { 1246 x := v.Args[0] 1247 v.reset(OpMIPS64TRUNCDW) 1248 v.AddArg(x) 1249 return true 1250 } 1251 } 1252 func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool { 1253 // match: (Cvt64Fto32F x) 1254 // cond: 1255 // result: (MOVDF x) 1256 for { 1257 x := v.Args[0] 1258 v.reset(OpMIPS64MOVDF) 1259 v.AddArg(x) 1260 return true 1261 } 1262 } 1263 func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool { 1264 // match: (Cvt64Fto64 x) 1265 // cond: 1266 // result: (TRUNCDV x) 1267 for { 1268 x := v.Args[0] 1269 v.reset(OpMIPS64TRUNCDV) 1270 v.AddArg(x) 1271 return true 1272 } 1273 } 1274 func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool { 1275 // match: (Cvt64to32F x) 1276 // cond: 1277 // result: (MOVVF x) 1278 for { 1279 x := v.Args[0] 1280 v.reset(OpMIPS64MOVVF) 1281 v.AddArg(x) 1282 return true 1283 } 1284 } 1285 func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool { 1286 // match: (Cvt64to64F x) 1287 // cond: 1288 // result: (MOVVD x) 1289 for { 1290 x := v.Args[0] 1291 v.reset(OpMIPS64MOVVD) 1292 v.AddArg(x) 1293 return true 1294 } 1295 } 1296 func rewriteValueMIPS64_OpDiv16_0(v *Value) bool { 1297 b := v.Block 1298 _ = b 1299 typ := &b.Func.Config.Types 1300 _ = typ 1301 // match: (Div16 x y) 1302 // cond: 1303 // result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 1304 for { 1305 _ = v.Args[1] 1306 x := v.Args[0] 1307 y := v.Args[1] 1308 v.reset(OpSelect1) 1309 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1310 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1311 v1.AddArg(x) 1312 v0.AddArg(v1) 1313 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1314 v2.AddArg(y) 1315 v0.AddArg(v2) 1316 v.AddArg(v0) 1317 return true 1318 } 1319 } 1320 func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool { 1321 b := v.Block 1322 _ = b 1323 typ := &b.Func.Config.Types 1324 _ = typ 1325 // match: (Div16u x y) 1326 // cond: 1327 // result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 1328 for { 1329 _ = v.Args[1] 1330 x := v.Args[0] 1331 y := v.Args[1] 1332 v.reset(OpSelect1) 1333 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1334 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1335 v1.AddArg(x) 1336 v0.AddArg(v1) 1337 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1338 v2.AddArg(y) 1339 v0.AddArg(v2) 1340 v.AddArg(v0) 1341 return true 1342 } 1343 } 1344 func rewriteValueMIPS64_OpDiv32_0(v *Value) bool { 1345 b := v.Block 1346 _ = b 1347 typ := &b.Func.Config.Types 1348 _ = typ 1349 // match: (Div32 x y) 1350 // cond: 1351 // result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 1352 for { 1353 _ = v.Args[1] 1354 x := v.Args[0] 1355 y := v.Args[1] 1356 v.reset(OpSelect1) 1357 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1358 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1359 v1.AddArg(x) 1360 v0.AddArg(v1) 1361 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1362 v2.AddArg(y) 1363 v0.AddArg(v2) 1364 v.AddArg(v0) 1365 return true 1366 } 1367 } 1368 func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool { 1369 // match: (Div32F x y) 1370 // cond: 1371 // result: (DIVF x y) 1372 for { 1373 _ = v.Args[1] 1374 x := v.Args[0] 1375 y := v.Args[1] 1376 v.reset(OpMIPS64DIVF) 1377 v.AddArg(x) 1378 v.AddArg(y) 1379 return true 1380 } 1381 } 1382 func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool { 1383 b := v.Block 1384 _ = b 1385 typ := &b.Func.Config.Types 1386 _ = typ 1387 // match: (Div32u x y) 1388 // cond: 1389 // result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 1390 for { 1391 _ = v.Args[1] 1392 x := v.Args[0] 1393 y := v.Args[1] 1394 v.reset(OpSelect1) 1395 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1396 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1397 v1.AddArg(x) 1398 v0.AddArg(v1) 1399 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1400 v2.AddArg(y) 1401 v0.AddArg(v2) 1402 v.AddArg(v0) 1403 return true 1404 } 1405 } 1406 func rewriteValueMIPS64_OpDiv64_0(v *Value) bool { 1407 b := v.Block 1408 _ = b 1409 typ := &b.Func.Config.Types 1410 _ = typ 1411 // match: (Div64 x y) 1412 // cond: 1413 // result: (Select1 (DIVV x y)) 1414 for { 1415 _ = v.Args[1] 1416 x := v.Args[0] 1417 y := v.Args[1] 1418 v.reset(OpSelect1) 1419 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1420 v0.AddArg(x) 1421 v0.AddArg(y) 1422 v.AddArg(v0) 1423 return true 1424 } 1425 } 1426 func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool { 1427 // match: (Div64F x y) 1428 // cond: 1429 // result: (DIVD x y) 1430 for { 1431 _ = v.Args[1] 1432 x := v.Args[0] 1433 y := v.Args[1] 1434 v.reset(OpMIPS64DIVD) 1435 v.AddArg(x) 1436 v.AddArg(y) 1437 return true 1438 } 1439 } 1440 func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool { 1441 b := v.Block 1442 _ = b 1443 typ := &b.Func.Config.Types 1444 _ = typ 1445 // match: (Div64u x y) 1446 // cond: 1447 // result: (Select1 (DIVVU x y)) 1448 for { 1449 _ = v.Args[1] 1450 x := v.Args[0] 1451 y := v.Args[1] 1452 v.reset(OpSelect1) 1453 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1454 v0.AddArg(x) 1455 v0.AddArg(y) 1456 v.AddArg(v0) 1457 return true 1458 } 1459 } 1460 func rewriteValueMIPS64_OpDiv8_0(v *Value) bool { 1461 b := v.Block 1462 _ = b 1463 typ := &b.Func.Config.Types 1464 _ = typ 1465 // match: (Div8 x y) 1466 // cond: 1467 // result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 1468 for { 1469 _ = v.Args[1] 1470 x := v.Args[0] 1471 y := v.Args[1] 1472 v.reset(OpSelect1) 1473 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1474 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1475 v1.AddArg(x) 1476 v0.AddArg(v1) 1477 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1478 v2.AddArg(y) 1479 v0.AddArg(v2) 1480 v.AddArg(v0) 1481 return true 1482 } 1483 } 1484 func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool { 1485 b := v.Block 1486 _ = b 1487 typ := &b.Func.Config.Types 1488 _ = typ 1489 // match: (Div8u x y) 1490 // cond: 1491 // result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 1492 for { 1493 _ = v.Args[1] 1494 x := v.Args[0] 1495 y := v.Args[1] 1496 v.reset(OpSelect1) 1497 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1498 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1499 v1.AddArg(x) 1500 v0.AddArg(v1) 1501 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1502 v2.AddArg(y) 1503 v0.AddArg(v2) 1504 v.AddArg(v0) 1505 return true 1506 } 1507 } 1508 func rewriteValueMIPS64_OpEq16_0(v *Value) bool { 1509 b := v.Block 1510 _ = b 1511 typ := &b.Func.Config.Types 1512 _ = typ 1513 // match: (Eq16 x y) 1514 // cond: 1515 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y))) 1516 for { 1517 _ = v.Args[1] 1518 x := v.Args[0] 1519 y := v.Args[1] 1520 v.reset(OpMIPS64SGTU) 1521 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1522 v0.AuxInt = 1 1523 v.AddArg(v0) 1524 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1525 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1526 v2.AddArg(x) 1527 v1.AddArg(v2) 1528 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1529 v3.AddArg(y) 1530 v1.AddArg(v3) 1531 v.AddArg(v1) 1532 return true 1533 } 1534 } 1535 func rewriteValueMIPS64_OpEq32_0(v *Value) bool { 1536 b := v.Block 1537 _ = b 1538 typ := &b.Func.Config.Types 1539 _ = typ 1540 // match: (Eq32 x y) 1541 // cond: 1542 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y))) 1543 for { 1544 _ = v.Args[1] 1545 x := v.Args[0] 1546 y := v.Args[1] 1547 v.reset(OpMIPS64SGTU) 1548 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1549 v0.AuxInt = 1 1550 v.AddArg(v0) 1551 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1552 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1553 v2.AddArg(x) 1554 v1.AddArg(v2) 1555 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1556 v3.AddArg(y) 1557 v1.AddArg(v3) 1558 v.AddArg(v1) 1559 return true 1560 } 1561 } 1562 func rewriteValueMIPS64_OpEq32F_0(v *Value) bool { 1563 b := v.Block 1564 _ = b 1565 // match: (Eq32F x y) 1566 // cond: 1567 // result: (FPFlagTrue (CMPEQF x y)) 1568 for { 1569 _ = v.Args[1] 1570 x := v.Args[0] 1571 y := v.Args[1] 1572 v.reset(OpMIPS64FPFlagTrue) 1573 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags) 1574 v0.AddArg(x) 1575 v0.AddArg(y) 1576 v.AddArg(v0) 1577 return true 1578 } 1579 } 1580 func rewriteValueMIPS64_OpEq64_0(v *Value) bool { 1581 b := v.Block 1582 _ = b 1583 typ := &b.Func.Config.Types 1584 _ = typ 1585 // match: (Eq64 x y) 1586 // cond: 1587 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1588 for { 1589 _ = v.Args[1] 1590 x := v.Args[0] 1591 y := v.Args[1] 1592 v.reset(OpMIPS64SGTU) 1593 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1594 v0.AuxInt = 1 1595 v.AddArg(v0) 1596 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1597 v1.AddArg(x) 1598 v1.AddArg(y) 1599 v.AddArg(v1) 1600 return true 1601 } 1602 } 1603 func rewriteValueMIPS64_OpEq64F_0(v *Value) bool { 1604 b := v.Block 1605 _ = b 1606 // match: (Eq64F x y) 1607 // cond: 1608 // result: (FPFlagTrue (CMPEQD x y)) 1609 for { 1610 _ = v.Args[1] 1611 x := v.Args[0] 1612 y := v.Args[1] 1613 v.reset(OpMIPS64FPFlagTrue) 1614 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags) 1615 v0.AddArg(x) 1616 v0.AddArg(y) 1617 v.AddArg(v0) 1618 return true 1619 } 1620 } 1621 func rewriteValueMIPS64_OpEq8_0(v *Value) bool { 1622 b := v.Block 1623 _ = b 1624 typ := &b.Func.Config.Types 1625 _ = typ 1626 // match: (Eq8 x y) 1627 // cond: 1628 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y))) 1629 for { 1630 _ = v.Args[1] 1631 x := v.Args[0] 1632 y := v.Args[1] 1633 v.reset(OpMIPS64SGTU) 1634 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1635 v0.AuxInt = 1 1636 v.AddArg(v0) 1637 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1638 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1639 v2.AddArg(x) 1640 v1.AddArg(v2) 1641 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1642 v3.AddArg(y) 1643 v1.AddArg(v3) 1644 v.AddArg(v1) 1645 return true 1646 } 1647 } 1648 func rewriteValueMIPS64_OpEqB_0(v *Value) bool { 1649 b := v.Block 1650 _ = b 1651 typ := &b.Func.Config.Types 1652 _ = typ 1653 // match: (EqB x y) 1654 // cond: 1655 // result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y)) 1656 for { 1657 _ = v.Args[1] 1658 x := v.Args[0] 1659 y := v.Args[1] 1660 v.reset(OpMIPS64XOR) 1661 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1662 v0.AuxInt = 1 1663 v.AddArg(v0) 1664 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool) 1665 v1.AddArg(x) 1666 v1.AddArg(y) 1667 v.AddArg(v1) 1668 return true 1669 } 1670 } 1671 func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool { 1672 b := v.Block 1673 _ = b 1674 typ := &b.Func.Config.Types 1675 _ = typ 1676 // match: (EqPtr x y) 1677 // cond: 1678 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1679 for { 1680 _ = v.Args[1] 1681 x := v.Args[0] 1682 y := v.Args[1] 1683 v.reset(OpMIPS64SGTU) 1684 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1685 v0.AuxInt = 1 1686 v.AddArg(v0) 1687 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1688 v1.AddArg(x) 1689 v1.AddArg(y) 1690 v.AddArg(v1) 1691 return true 1692 } 1693 } 1694 func rewriteValueMIPS64_OpGeq16_0(v *Value) bool { 1695 b := v.Block 1696 _ = b 1697 typ := &b.Func.Config.Types 1698 _ = typ 1699 // match: (Geq16 x y) 1700 // cond: 1701 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x))) 1702 for { 1703 _ = v.Args[1] 1704 x := v.Args[0] 1705 y := v.Args[1] 1706 v.reset(OpMIPS64XOR) 1707 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1708 v0.AuxInt = 1 1709 v.AddArg(v0) 1710 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1711 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1712 v2.AddArg(y) 1713 v1.AddArg(v2) 1714 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1715 v3.AddArg(x) 1716 v1.AddArg(v3) 1717 v.AddArg(v1) 1718 return true 1719 } 1720 } 1721 func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool { 1722 b := v.Block 1723 _ = b 1724 typ := &b.Func.Config.Types 1725 _ = typ 1726 // match: (Geq16U x y) 1727 // cond: 1728 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))) 1729 for { 1730 _ = v.Args[1] 1731 x := v.Args[0] 1732 y := v.Args[1] 1733 v.reset(OpMIPS64XOR) 1734 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1735 v0.AuxInt = 1 1736 v.AddArg(v0) 1737 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1738 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1739 v2.AddArg(y) 1740 v1.AddArg(v2) 1741 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1742 v3.AddArg(x) 1743 v1.AddArg(v3) 1744 v.AddArg(v1) 1745 return true 1746 } 1747 } 1748 func rewriteValueMIPS64_OpGeq32_0(v *Value) bool { 1749 b := v.Block 1750 _ = b 1751 typ := &b.Func.Config.Types 1752 _ = typ 1753 // match: (Geq32 x y) 1754 // cond: 1755 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x))) 1756 for { 1757 _ = v.Args[1] 1758 x := v.Args[0] 1759 y := v.Args[1] 1760 v.reset(OpMIPS64XOR) 1761 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1762 v0.AuxInt = 1 1763 v.AddArg(v0) 1764 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1765 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1766 v2.AddArg(y) 1767 v1.AddArg(v2) 1768 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1769 v3.AddArg(x) 1770 v1.AddArg(v3) 1771 v.AddArg(v1) 1772 return true 1773 } 1774 } 1775 func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool { 1776 b := v.Block 1777 _ = b 1778 // match: (Geq32F x y) 1779 // cond: 1780 // result: (FPFlagTrue (CMPGEF x y)) 1781 for { 1782 _ = v.Args[1] 1783 x := v.Args[0] 1784 y := v.Args[1] 1785 v.reset(OpMIPS64FPFlagTrue) 1786 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags) 1787 v0.AddArg(x) 1788 v0.AddArg(y) 1789 v.AddArg(v0) 1790 return true 1791 } 1792 } 1793 func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool { 1794 b := v.Block 1795 _ = b 1796 typ := &b.Func.Config.Types 1797 _ = typ 1798 // match: (Geq32U x y) 1799 // cond: 1800 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))) 1801 for { 1802 _ = v.Args[1] 1803 x := v.Args[0] 1804 y := v.Args[1] 1805 v.reset(OpMIPS64XOR) 1806 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1807 v0.AuxInt = 1 1808 v.AddArg(v0) 1809 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1810 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1811 v2.AddArg(y) 1812 v1.AddArg(v2) 1813 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1814 v3.AddArg(x) 1815 v1.AddArg(v3) 1816 v.AddArg(v1) 1817 return true 1818 } 1819 } 1820 func rewriteValueMIPS64_OpGeq64_0(v *Value) bool { 1821 b := v.Block 1822 _ = b 1823 typ := &b.Func.Config.Types 1824 _ = typ 1825 // match: (Geq64 x y) 1826 // cond: 1827 // result: (XOR (MOVVconst [1]) (SGT y x)) 1828 for { 1829 _ = v.Args[1] 1830 x := v.Args[0] 1831 y := v.Args[1] 1832 v.reset(OpMIPS64XOR) 1833 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1834 v0.AuxInt = 1 1835 v.AddArg(v0) 1836 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1837 v1.AddArg(y) 1838 v1.AddArg(x) 1839 v.AddArg(v1) 1840 return true 1841 } 1842 } 1843 func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool { 1844 b := v.Block 1845 _ = b 1846 // match: (Geq64F x y) 1847 // cond: 1848 // result: (FPFlagTrue (CMPGED x y)) 1849 for { 1850 _ = v.Args[1] 1851 x := v.Args[0] 1852 y := v.Args[1] 1853 v.reset(OpMIPS64FPFlagTrue) 1854 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags) 1855 v0.AddArg(x) 1856 v0.AddArg(y) 1857 v.AddArg(v0) 1858 return true 1859 } 1860 } 1861 func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool { 1862 b := v.Block 1863 _ = b 1864 typ := &b.Func.Config.Types 1865 _ = typ 1866 // match: (Geq64U x y) 1867 // cond: 1868 // result: (XOR (MOVVconst [1]) (SGTU y x)) 1869 for { 1870 _ = v.Args[1] 1871 x := v.Args[0] 1872 y := v.Args[1] 1873 v.reset(OpMIPS64XOR) 1874 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1875 v0.AuxInt = 1 1876 v.AddArg(v0) 1877 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1878 v1.AddArg(y) 1879 v1.AddArg(x) 1880 v.AddArg(v1) 1881 return true 1882 } 1883 } 1884 func rewriteValueMIPS64_OpGeq8_0(v *Value) bool { 1885 b := v.Block 1886 _ = b 1887 typ := &b.Func.Config.Types 1888 _ = typ 1889 // match: (Geq8 x y) 1890 // cond: 1891 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x))) 1892 for { 1893 _ = v.Args[1] 1894 x := v.Args[0] 1895 y := v.Args[1] 1896 v.reset(OpMIPS64XOR) 1897 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1898 v0.AuxInt = 1 1899 v.AddArg(v0) 1900 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1901 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1902 v2.AddArg(y) 1903 v1.AddArg(v2) 1904 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1905 v3.AddArg(x) 1906 v1.AddArg(v3) 1907 v.AddArg(v1) 1908 return true 1909 } 1910 } 1911 func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool { 1912 b := v.Block 1913 _ = b 1914 typ := &b.Func.Config.Types 1915 _ = typ 1916 // match: (Geq8U x y) 1917 // cond: 1918 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))) 1919 for { 1920 _ = v.Args[1] 1921 x := v.Args[0] 1922 y := v.Args[1] 1923 v.reset(OpMIPS64XOR) 1924 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1925 v0.AuxInt = 1 1926 v.AddArg(v0) 1927 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1928 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1929 v2.AddArg(y) 1930 v1.AddArg(v2) 1931 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1932 v3.AddArg(x) 1933 v1.AddArg(v3) 1934 v.AddArg(v1) 1935 return true 1936 } 1937 } 1938 func rewriteValueMIPS64_OpGetCallerPC_0(v *Value) bool { 1939 // match: (GetCallerPC) 1940 // cond: 1941 // result: (LoweredGetCallerPC) 1942 for { 1943 v.reset(OpMIPS64LoweredGetCallerPC) 1944 return true 1945 } 1946 } 1947 func rewriteValueMIPS64_OpGetCallerSP_0(v *Value) bool { 1948 // match: (GetCallerSP) 1949 // cond: 1950 // result: (LoweredGetCallerSP) 1951 for { 1952 v.reset(OpMIPS64LoweredGetCallerSP) 1953 return true 1954 } 1955 } 1956 func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool { 1957 // match: (GetClosurePtr) 1958 // cond: 1959 // result: (LoweredGetClosurePtr) 1960 for { 1961 v.reset(OpMIPS64LoweredGetClosurePtr) 1962 return true 1963 } 1964 } 1965 func rewriteValueMIPS64_OpGreater16_0(v *Value) bool { 1966 b := v.Block 1967 _ = b 1968 typ := &b.Func.Config.Types 1969 _ = typ 1970 // match: (Greater16 x y) 1971 // cond: 1972 // result: (SGT (SignExt16to64 x) (SignExt16to64 y)) 1973 for { 1974 _ = v.Args[1] 1975 x := v.Args[0] 1976 y := v.Args[1] 1977 v.reset(OpMIPS64SGT) 1978 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1979 v0.AddArg(x) 1980 v.AddArg(v0) 1981 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1982 v1.AddArg(y) 1983 v.AddArg(v1) 1984 return true 1985 } 1986 } 1987 func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool { 1988 b := v.Block 1989 _ = b 1990 typ := &b.Func.Config.Types 1991 _ = typ 1992 // match: (Greater16U x y) 1993 // cond: 1994 // result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)) 1995 for { 1996 _ = v.Args[1] 1997 x := v.Args[0] 1998 y := v.Args[1] 1999 v.reset(OpMIPS64SGTU) 2000 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2001 v0.AddArg(x) 2002 v.AddArg(v0) 2003 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2004 v1.AddArg(y) 2005 v.AddArg(v1) 2006 return true 2007 } 2008 } 2009 func rewriteValueMIPS64_OpGreater32_0(v *Value) bool { 2010 b := v.Block 2011 _ = b 2012 typ := &b.Func.Config.Types 2013 _ = typ 2014 // match: (Greater32 x y) 2015 // cond: 2016 // result: (SGT (SignExt32to64 x) (SignExt32to64 y)) 2017 for { 2018 _ = v.Args[1] 2019 x := v.Args[0] 2020 y := v.Args[1] 2021 v.reset(OpMIPS64SGT) 2022 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2023 v0.AddArg(x) 2024 v.AddArg(v0) 2025 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2026 v1.AddArg(y) 2027 v.AddArg(v1) 2028 return true 2029 } 2030 } 2031 func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool { 2032 b := v.Block 2033 _ = b 2034 // match: (Greater32F x y) 2035 // cond: 2036 // result: (FPFlagTrue (CMPGTF x y)) 2037 for { 2038 _ = v.Args[1] 2039 x := v.Args[0] 2040 y := v.Args[1] 2041 v.reset(OpMIPS64FPFlagTrue) 2042 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags) 2043 v0.AddArg(x) 2044 v0.AddArg(y) 2045 v.AddArg(v0) 2046 return true 2047 } 2048 } 2049 func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool { 2050 b := v.Block 2051 _ = b 2052 typ := &b.Func.Config.Types 2053 _ = typ 2054 // match: (Greater32U x y) 2055 // cond: 2056 // result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)) 2057 for { 2058 _ = v.Args[1] 2059 x := v.Args[0] 2060 y := v.Args[1] 2061 v.reset(OpMIPS64SGTU) 2062 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2063 v0.AddArg(x) 2064 v.AddArg(v0) 2065 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2066 v1.AddArg(y) 2067 v.AddArg(v1) 2068 return true 2069 } 2070 } 2071 func rewriteValueMIPS64_OpGreater64_0(v *Value) bool { 2072 // match: (Greater64 x y) 2073 // cond: 2074 // result: (SGT x y) 2075 for { 2076 _ = v.Args[1] 2077 x := v.Args[0] 2078 y := v.Args[1] 2079 v.reset(OpMIPS64SGT) 2080 v.AddArg(x) 2081 v.AddArg(y) 2082 return true 2083 } 2084 } 2085 func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool { 2086 b := v.Block 2087 _ = b 2088 // match: (Greater64F x y) 2089 // cond: 2090 // result: (FPFlagTrue (CMPGTD x y)) 2091 for { 2092 _ = v.Args[1] 2093 x := v.Args[0] 2094 y := v.Args[1] 2095 v.reset(OpMIPS64FPFlagTrue) 2096 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags) 2097 v0.AddArg(x) 2098 v0.AddArg(y) 2099 v.AddArg(v0) 2100 return true 2101 } 2102 } 2103 func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool { 2104 // match: (Greater64U x y) 2105 // cond: 2106 // result: (SGTU x y) 2107 for { 2108 _ = v.Args[1] 2109 x := v.Args[0] 2110 y := v.Args[1] 2111 v.reset(OpMIPS64SGTU) 2112 v.AddArg(x) 2113 v.AddArg(y) 2114 return true 2115 } 2116 } 2117 func rewriteValueMIPS64_OpGreater8_0(v *Value) bool { 2118 b := v.Block 2119 _ = b 2120 typ := &b.Func.Config.Types 2121 _ = typ 2122 // match: (Greater8 x y) 2123 // cond: 2124 // result: (SGT (SignExt8to64 x) (SignExt8to64 y)) 2125 for { 2126 _ = v.Args[1] 2127 x := v.Args[0] 2128 y := v.Args[1] 2129 v.reset(OpMIPS64SGT) 2130 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2131 v0.AddArg(x) 2132 v.AddArg(v0) 2133 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2134 v1.AddArg(y) 2135 v.AddArg(v1) 2136 return true 2137 } 2138 } 2139 func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool { 2140 b := v.Block 2141 _ = b 2142 typ := &b.Func.Config.Types 2143 _ = typ 2144 // match: (Greater8U x y) 2145 // cond: 2146 // result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)) 2147 for { 2148 _ = v.Args[1] 2149 x := v.Args[0] 2150 y := v.Args[1] 2151 v.reset(OpMIPS64SGTU) 2152 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2153 v0.AddArg(x) 2154 v.AddArg(v0) 2155 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2156 v1.AddArg(y) 2157 v.AddArg(v1) 2158 return true 2159 } 2160 } 2161 func rewriteValueMIPS64_OpHmul32_0(v *Value) bool { 2162 b := v.Block 2163 _ = b 2164 typ := &b.Func.Config.Types 2165 _ = typ 2166 // match: (Hmul32 x y) 2167 // cond: 2168 // result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32]) 2169 for { 2170 _ = v.Args[1] 2171 x := v.Args[0] 2172 y := v.Args[1] 2173 v.reset(OpMIPS64SRAVconst) 2174 v.AuxInt = 32 2175 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64) 2176 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64)) 2177 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2178 v2.AddArg(x) 2179 v1.AddArg(v2) 2180 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2181 v3.AddArg(y) 2182 v1.AddArg(v3) 2183 v0.AddArg(v1) 2184 v.AddArg(v0) 2185 return true 2186 } 2187 } 2188 func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool { 2189 b := v.Block 2190 _ = b 2191 typ := &b.Func.Config.Types 2192 _ = typ 2193 // match: (Hmul32u x y) 2194 // cond: 2195 // result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32]) 2196 for { 2197 _ = v.Args[1] 2198 x := v.Args[0] 2199 y := v.Args[1] 2200 v.reset(OpMIPS64SRLVconst) 2201 v.AuxInt = 32 2202 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 2203 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 2204 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2205 v2.AddArg(x) 2206 v1.AddArg(v2) 2207 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2208 v3.AddArg(y) 2209 v1.AddArg(v3) 2210 v0.AddArg(v1) 2211 v.AddArg(v0) 2212 return true 2213 } 2214 } 2215 func rewriteValueMIPS64_OpHmul64_0(v *Value) bool { 2216 b := v.Block 2217 _ = b 2218 typ := &b.Func.Config.Types 2219 _ = typ 2220 // match: (Hmul64 x y) 2221 // cond: 2222 // result: (Select0 (MULV x y)) 2223 for { 2224 _ = v.Args[1] 2225 x := v.Args[0] 2226 y := v.Args[1] 2227 v.reset(OpSelect0) 2228 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64)) 2229 v0.AddArg(x) 2230 v0.AddArg(y) 2231 v.AddArg(v0) 2232 return true 2233 } 2234 } 2235 func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool { 2236 b := v.Block 2237 _ = b 2238 typ := &b.Func.Config.Types 2239 _ = typ 2240 // match: (Hmul64u x y) 2241 // cond: 2242 // result: (Select0 (MULVU x y)) 2243 for { 2244 _ = v.Args[1] 2245 x := v.Args[0] 2246 y := v.Args[1] 2247 v.reset(OpSelect0) 2248 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 2249 v0.AddArg(x) 2250 v0.AddArg(y) 2251 v.AddArg(v0) 2252 return true 2253 } 2254 } 2255 func rewriteValueMIPS64_OpInterCall_0(v *Value) bool { 2256 // match: (InterCall [argwid] entry mem) 2257 // cond: 2258 // result: (CALLinter [argwid] entry mem) 2259 for { 2260 argwid := v.AuxInt 2261 _ = v.Args[1] 2262 entry := v.Args[0] 2263 mem := v.Args[1] 2264 v.reset(OpMIPS64CALLinter) 2265 v.AuxInt = argwid 2266 v.AddArg(entry) 2267 v.AddArg(mem) 2268 return true 2269 } 2270 } 2271 func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool { 2272 // match: (IsInBounds idx len) 2273 // cond: 2274 // result: (SGTU len idx) 2275 for { 2276 _ = v.Args[1] 2277 idx := v.Args[0] 2278 len := v.Args[1] 2279 v.reset(OpMIPS64SGTU) 2280 v.AddArg(len) 2281 v.AddArg(idx) 2282 return true 2283 } 2284 } 2285 func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool { 2286 b := v.Block 2287 _ = b 2288 typ := &b.Func.Config.Types 2289 _ = typ 2290 // match: (IsNonNil ptr) 2291 // cond: 2292 // result: (SGTU ptr (MOVVconst [0])) 2293 for { 2294 ptr := v.Args[0] 2295 v.reset(OpMIPS64SGTU) 2296 v.AddArg(ptr) 2297 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2298 v0.AuxInt = 0 2299 v.AddArg(v0) 2300 return true 2301 } 2302 } 2303 func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool { 2304 b := v.Block 2305 _ = b 2306 typ := &b.Func.Config.Types 2307 _ = typ 2308 // match: (IsSliceInBounds idx len) 2309 // cond: 2310 // result: (XOR (MOVVconst [1]) (SGTU idx len)) 2311 for { 2312 _ = v.Args[1] 2313 idx := v.Args[0] 2314 len := v.Args[1] 2315 v.reset(OpMIPS64XOR) 2316 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2317 v0.AuxInt = 1 2318 v.AddArg(v0) 2319 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2320 v1.AddArg(idx) 2321 v1.AddArg(len) 2322 v.AddArg(v1) 2323 return true 2324 } 2325 } 2326 func rewriteValueMIPS64_OpLeq16_0(v *Value) bool { 2327 b := v.Block 2328 _ = b 2329 typ := &b.Func.Config.Types 2330 _ = typ 2331 // match: (Leq16 x y) 2332 // cond: 2333 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y))) 2334 for { 2335 _ = v.Args[1] 2336 x := v.Args[0] 2337 y := v.Args[1] 2338 v.reset(OpMIPS64XOR) 2339 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2340 v0.AuxInt = 1 2341 v.AddArg(v0) 2342 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2343 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2344 v2.AddArg(x) 2345 v1.AddArg(v2) 2346 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2347 v3.AddArg(y) 2348 v1.AddArg(v3) 2349 v.AddArg(v1) 2350 return true 2351 } 2352 } 2353 func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool { 2354 b := v.Block 2355 _ = b 2356 typ := &b.Func.Config.Types 2357 _ = typ 2358 // match: (Leq16U x y) 2359 // cond: 2360 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))) 2361 for { 2362 _ = v.Args[1] 2363 x := v.Args[0] 2364 y := v.Args[1] 2365 v.reset(OpMIPS64XOR) 2366 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2367 v0.AuxInt = 1 2368 v.AddArg(v0) 2369 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2370 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2371 v2.AddArg(x) 2372 v1.AddArg(v2) 2373 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2374 v3.AddArg(y) 2375 v1.AddArg(v3) 2376 v.AddArg(v1) 2377 return true 2378 } 2379 } 2380 func rewriteValueMIPS64_OpLeq32_0(v *Value) bool { 2381 b := v.Block 2382 _ = b 2383 typ := &b.Func.Config.Types 2384 _ = typ 2385 // match: (Leq32 x y) 2386 // cond: 2387 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y))) 2388 for { 2389 _ = v.Args[1] 2390 x := v.Args[0] 2391 y := v.Args[1] 2392 v.reset(OpMIPS64XOR) 2393 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2394 v0.AuxInt = 1 2395 v.AddArg(v0) 2396 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2397 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2398 v2.AddArg(x) 2399 v1.AddArg(v2) 2400 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2401 v3.AddArg(y) 2402 v1.AddArg(v3) 2403 v.AddArg(v1) 2404 return true 2405 } 2406 } 2407 func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool { 2408 b := v.Block 2409 _ = b 2410 // match: (Leq32F x y) 2411 // cond: 2412 // result: (FPFlagTrue (CMPGEF y x)) 2413 for { 2414 _ = v.Args[1] 2415 x := v.Args[0] 2416 y := v.Args[1] 2417 v.reset(OpMIPS64FPFlagTrue) 2418 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags) 2419 v0.AddArg(y) 2420 v0.AddArg(x) 2421 v.AddArg(v0) 2422 return true 2423 } 2424 } 2425 func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool { 2426 b := v.Block 2427 _ = b 2428 typ := &b.Func.Config.Types 2429 _ = typ 2430 // match: (Leq32U x y) 2431 // cond: 2432 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))) 2433 for { 2434 _ = v.Args[1] 2435 x := v.Args[0] 2436 y := v.Args[1] 2437 v.reset(OpMIPS64XOR) 2438 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2439 v0.AuxInt = 1 2440 v.AddArg(v0) 2441 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2442 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2443 v2.AddArg(x) 2444 v1.AddArg(v2) 2445 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2446 v3.AddArg(y) 2447 v1.AddArg(v3) 2448 v.AddArg(v1) 2449 return true 2450 } 2451 } 2452 func rewriteValueMIPS64_OpLeq64_0(v *Value) bool { 2453 b := v.Block 2454 _ = b 2455 typ := &b.Func.Config.Types 2456 _ = typ 2457 // match: (Leq64 x y) 2458 // cond: 2459 // result: (XOR (MOVVconst [1]) (SGT x y)) 2460 for { 2461 _ = v.Args[1] 2462 x := v.Args[0] 2463 y := v.Args[1] 2464 v.reset(OpMIPS64XOR) 2465 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2466 v0.AuxInt = 1 2467 v.AddArg(v0) 2468 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2469 v1.AddArg(x) 2470 v1.AddArg(y) 2471 v.AddArg(v1) 2472 return true 2473 } 2474 } 2475 func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool { 2476 b := v.Block 2477 _ = b 2478 // match: (Leq64F x y) 2479 // cond: 2480 // result: (FPFlagTrue (CMPGED y x)) 2481 for { 2482 _ = v.Args[1] 2483 x := v.Args[0] 2484 y := v.Args[1] 2485 v.reset(OpMIPS64FPFlagTrue) 2486 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags) 2487 v0.AddArg(y) 2488 v0.AddArg(x) 2489 v.AddArg(v0) 2490 return true 2491 } 2492 } 2493 func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool { 2494 b := v.Block 2495 _ = b 2496 typ := &b.Func.Config.Types 2497 _ = typ 2498 // match: (Leq64U x y) 2499 // cond: 2500 // result: (XOR (MOVVconst [1]) (SGTU x y)) 2501 for { 2502 _ = v.Args[1] 2503 x := v.Args[0] 2504 y := v.Args[1] 2505 v.reset(OpMIPS64XOR) 2506 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2507 v0.AuxInt = 1 2508 v.AddArg(v0) 2509 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2510 v1.AddArg(x) 2511 v1.AddArg(y) 2512 v.AddArg(v1) 2513 return true 2514 } 2515 } 2516 func rewriteValueMIPS64_OpLeq8_0(v *Value) bool { 2517 b := v.Block 2518 _ = b 2519 typ := &b.Func.Config.Types 2520 _ = typ 2521 // match: (Leq8 x y) 2522 // cond: 2523 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y))) 2524 for { 2525 _ = v.Args[1] 2526 x := v.Args[0] 2527 y := v.Args[1] 2528 v.reset(OpMIPS64XOR) 2529 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2530 v0.AuxInt = 1 2531 v.AddArg(v0) 2532 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2533 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2534 v2.AddArg(x) 2535 v1.AddArg(v2) 2536 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2537 v3.AddArg(y) 2538 v1.AddArg(v3) 2539 v.AddArg(v1) 2540 return true 2541 } 2542 } 2543 func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool { 2544 b := v.Block 2545 _ = b 2546 typ := &b.Func.Config.Types 2547 _ = typ 2548 // match: (Leq8U x y) 2549 // cond: 2550 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))) 2551 for { 2552 _ = v.Args[1] 2553 x := v.Args[0] 2554 y := v.Args[1] 2555 v.reset(OpMIPS64XOR) 2556 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2557 v0.AuxInt = 1 2558 v.AddArg(v0) 2559 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2560 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2561 v2.AddArg(x) 2562 v1.AddArg(v2) 2563 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2564 v3.AddArg(y) 2565 v1.AddArg(v3) 2566 v.AddArg(v1) 2567 return true 2568 } 2569 } 2570 func rewriteValueMIPS64_OpLess16_0(v *Value) bool { 2571 b := v.Block 2572 _ = b 2573 typ := &b.Func.Config.Types 2574 _ = typ 2575 // match: (Less16 x y) 2576 // cond: 2577 // result: (SGT (SignExt16to64 y) (SignExt16to64 x)) 2578 for { 2579 _ = v.Args[1] 2580 x := v.Args[0] 2581 y := v.Args[1] 2582 v.reset(OpMIPS64SGT) 2583 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2584 v0.AddArg(y) 2585 v.AddArg(v0) 2586 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2587 v1.AddArg(x) 2588 v.AddArg(v1) 2589 return true 2590 } 2591 } 2592 func rewriteValueMIPS64_OpLess16U_0(v *Value) bool { 2593 b := v.Block 2594 _ = b 2595 typ := &b.Func.Config.Types 2596 _ = typ 2597 // match: (Less16U x y) 2598 // cond: 2599 // result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)) 2600 for { 2601 _ = v.Args[1] 2602 x := v.Args[0] 2603 y := v.Args[1] 2604 v.reset(OpMIPS64SGTU) 2605 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2606 v0.AddArg(y) 2607 v.AddArg(v0) 2608 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2609 v1.AddArg(x) 2610 v.AddArg(v1) 2611 return true 2612 } 2613 } 2614 func rewriteValueMIPS64_OpLess32_0(v *Value) bool { 2615 b := v.Block 2616 _ = b 2617 typ := &b.Func.Config.Types 2618 _ = typ 2619 // match: (Less32 x y) 2620 // cond: 2621 // result: (SGT (SignExt32to64 y) (SignExt32to64 x)) 2622 for { 2623 _ = v.Args[1] 2624 x := v.Args[0] 2625 y := v.Args[1] 2626 v.reset(OpMIPS64SGT) 2627 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2628 v0.AddArg(y) 2629 v.AddArg(v0) 2630 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2631 v1.AddArg(x) 2632 v.AddArg(v1) 2633 return true 2634 } 2635 } 2636 func rewriteValueMIPS64_OpLess32F_0(v *Value) bool { 2637 b := v.Block 2638 _ = b 2639 // match: (Less32F x y) 2640 // cond: 2641 // result: (FPFlagTrue (CMPGTF y x)) 2642 for { 2643 _ = v.Args[1] 2644 x := v.Args[0] 2645 y := v.Args[1] 2646 v.reset(OpMIPS64FPFlagTrue) 2647 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags) 2648 v0.AddArg(y) 2649 v0.AddArg(x) 2650 v.AddArg(v0) 2651 return true 2652 } 2653 } 2654 func rewriteValueMIPS64_OpLess32U_0(v *Value) bool { 2655 b := v.Block 2656 _ = b 2657 typ := &b.Func.Config.Types 2658 _ = typ 2659 // match: (Less32U x y) 2660 // cond: 2661 // result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)) 2662 for { 2663 _ = v.Args[1] 2664 x := v.Args[0] 2665 y := v.Args[1] 2666 v.reset(OpMIPS64SGTU) 2667 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2668 v0.AddArg(y) 2669 v.AddArg(v0) 2670 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2671 v1.AddArg(x) 2672 v.AddArg(v1) 2673 return true 2674 } 2675 } 2676 func rewriteValueMIPS64_OpLess64_0(v *Value) bool { 2677 // match: (Less64 x y) 2678 // cond: 2679 // result: (SGT y x) 2680 for { 2681 _ = v.Args[1] 2682 x := v.Args[0] 2683 y := v.Args[1] 2684 v.reset(OpMIPS64SGT) 2685 v.AddArg(y) 2686 v.AddArg(x) 2687 return true 2688 } 2689 } 2690 func rewriteValueMIPS64_OpLess64F_0(v *Value) bool { 2691 b := v.Block 2692 _ = b 2693 // match: (Less64F x y) 2694 // cond: 2695 // result: (FPFlagTrue (CMPGTD y x)) 2696 for { 2697 _ = v.Args[1] 2698 x := v.Args[0] 2699 y := v.Args[1] 2700 v.reset(OpMIPS64FPFlagTrue) 2701 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags) 2702 v0.AddArg(y) 2703 v0.AddArg(x) 2704 v.AddArg(v0) 2705 return true 2706 } 2707 } 2708 func rewriteValueMIPS64_OpLess64U_0(v *Value) bool { 2709 // match: (Less64U x y) 2710 // cond: 2711 // result: (SGTU y x) 2712 for { 2713 _ = v.Args[1] 2714 x := v.Args[0] 2715 y := v.Args[1] 2716 v.reset(OpMIPS64SGTU) 2717 v.AddArg(y) 2718 v.AddArg(x) 2719 return true 2720 } 2721 } 2722 func rewriteValueMIPS64_OpLess8_0(v *Value) bool { 2723 b := v.Block 2724 _ = b 2725 typ := &b.Func.Config.Types 2726 _ = typ 2727 // match: (Less8 x y) 2728 // cond: 2729 // result: (SGT (SignExt8to64 y) (SignExt8to64 x)) 2730 for { 2731 _ = v.Args[1] 2732 x := v.Args[0] 2733 y := v.Args[1] 2734 v.reset(OpMIPS64SGT) 2735 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2736 v0.AddArg(y) 2737 v.AddArg(v0) 2738 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2739 v1.AddArg(x) 2740 v.AddArg(v1) 2741 return true 2742 } 2743 } 2744 func rewriteValueMIPS64_OpLess8U_0(v *Value) bool { 2745 b := v.Block 2746 _ = b 2747 typ := &b.Func.Config.Types 2748 _ = typ 2749 // match: (Less8U x y) 2750 // cond: 2751 // result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)) 2752 for { 2753 _ = v.Args[1] 2754 x := v.Args[0] 2755 y := v.Args[1] 2756 v.reset(OpMIPS64SGTU) 2757 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2758 v0.AddArg(y) 2759 v.AddArg(v0) 2760 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2761 v1.AddArg(x) 2762 v.AddArg(v1) 2763 return true 2764 } 2765 } 2766 func rewriteValueMIPS64_OpLoad_0(v *Value) bool { 2767 // match: (Load <t> ptr mem) 2768 // cond: t.IsBoolean() 2769 // result: (MOVBUload ptr mem) 2770 for { 2771 t := v.Type 2772 _ = v.Args[1] 2773 ptr := v.Args[0] 2774 mem := v.Args[1] 2775 if !(t.IsBoolean()) { 2776 break 2777 } 2778 v.reset(OpMIPS64MOVBUload) 2779 v.AddArg(ptr) 2780 v.AddArg(mem) 2781 return true 2782 } 2783 // match: (Load <t> ptr mem) 2784 // cond: (is8BitInt(t) && isSigned(t)) 2785 // result: (MOVBload ptr mem) 2786 for { 2787 t := v.Type 2788 _ = v.Args[1] 2789 ptr := v.Args[0] 2790 mem := v.Args[1] 2791 if !(is8BitInt(t) && isSigned(t)) { 2792 break 2793 } 2794 v.reset(OpMIPS64MOVBload) 2795 v.AddArg(ptr) 2796 v.AddArg(mem) 2797 return true 2798 } 2799 // match: (Load <t> ptr mem) 2800 // cond: (is8BitInt(t) && !isSigned(t)) 2801 // result: (MOVBUload ptr mem) 2802 for { 2803 t := v.Type 2804 _ = v.Args[1] 2805 ptr := v.Args[0] 2806 mem := v.Args[1] 2807 if !(is8BitInt(t) && !isSigned(t)) { 2808 break 2809 } 2810 v.reset(OpMIPS64MOVBUload) 2811 v.AddArg(ptr) 2812 v.AddArg(mem) 2813 return true 2814 } 2815 // match: (Load <t> ptr mem) 2816 // cond: (is16BitInt(t) && isSigned(t)) 2817 // result: (MOVHload ptr mem) 2818 for { 2819 t := v.Type 2820 _ = v.Args[1] 2821 ptr := v.Args[0] 2822 mem := v.Args[1] 2823 if !(is16BitInt(t) && isSigned(t)) { 2824 break 2825 } 2826 v.reset(OpMIPS64MOVHload) 2827 v.AddArg(ptr) 2828 v.AddArg(mem) 2829 return true 2830 } 2831 // match: (Load <t> ptr mem) 2832 // cond: (is16BitInt(t) && !isSigned(t)) 2833 // result: (MOVHUload ptr mem) 2834 for { 2835 t := v.Type 2836 _ = v.Args[1] 2837 ptr := v.Args[0] 2838 mem := v.Args[1] 2839 if !(is16BitInt(t) && !isSigned(t)) { 2840 break 2841 } 2842 v.reset(OpMIPS64MOVHUload) 2843 v.AddArg(ptr) 2844 v.AddArg(mem) 2845 return true 2846 } 2847 // match: (Load <t> ptr mem) 2848 // cond: (is32BitInt(t) && isSigned(t)) 2849 // result: (MOVWload ptr mem) 2850 for { 2851 t := v.Type 2852 _ = v.Args[1] 2853 ptr := v.Args[0] 2854 mem := v.Args[1] 2855 if !(is32BitInt(t) && isSigned(t)) { 2856 break 2857 } 2858 v.reset(OpMIPS64MOVWload) 2859 v.AddArg(ptr) 2860 v.AddArg(mem) 2861 return true 2862 } 2863 // match: (Load <t> ptr mem) 2864 // cond: (is32BitInt(t) && !isSigned(t)) 2865 // result: (MOVWUload ptr mem) 2866 for { 2867 t := v.Type 2868 _ = v.Args[1] 2869 ptr := v.Args[0] 2870 mem := v.Args[1] 2871 if !(is32BitInt(t) && !isSigned(t)) { 2872 break 2873 } 2874 v.reset(OpMIPS64MOVWUload) 2875 v.AddArg(ptr) 2876 v.AddArg(mem) 2877 return true 2878 } 2879 // match: (Load <t> ptr mem) 2880 // cond: (is64BitInt(t) || isPtr(t)) 2881 // result: (MOVVload ptr mem) 2882 for { 2883 t := v.Type 2884 _ = v.Args[1] 2885 ptr := v.Args[0] 2886 mem := v.Args[1] 2887 if !(is64BitInt(t) || isPtr(t)) { 2888 break 2889 } 2890 v.reset(OpMIPS64MOVVload) 2891 v.AddArg(ptr) 2892 v.AddArg(mem) 2893 return true 2894 } 2895 // match: (Load <t> ptr mem) 2896 // cond: is32BitFloat(t) 2897 // result: (MOVFload ptr mem) 2898 for { 2899 t := v.Type 2900 _ = v.Args[1] 2901 ptr := v.Args[0] 2902 mem := v.Args[1] 2903 if !(is32BitFloat(t)) { 2904 break 2905 } 2906 v.reset(OpMIPS64MOVFload) 2907 v.AddArg(ptr) 2908 v.AddArg(mem) 2909 return true 2910 } 2911 // match: (Load <t> ptr mem) 2912 // cond: is64BitFloat(t) 2913 // result: (MOVDload ptr mem) 2914 for { 2915 t := v.Type 2916 _ = v.Args[1] 2917 ptr := v.Args[0] 2918 mem := v.Args[1] 2919 if !(is64BitFloat(t)) { 2920 break 2921 } 2922 v.reset(OpMIPS64MOVDload) 2923 v.AddArg(ptr) 2924 v.AddArg(mem) 2925 return true 2926 } 2927 return false 2928 } 2929 func rewriteValueMIPS64_OpLocalAddr_0(v *Value) bool { 2930 // match: (LocalAddr {sym} base _) 2931 // cond: 2932 // result: (MOVVaddr {sym} base) 2933 for { 2934 sym := v.Aux 2935 _ = v.Args[1] 2936 base := v.Args[0] 2937 v.reset(OpMIPS64MOVVaddr) 2938 v.Aux = sym 2939 v.AddArg(base) 2940 return true 2941 } 2942 } 2943 func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool { 2944 b := v.Block 2945 _ = b 2946 typ := &b.Func.Config.Types 2947 _ = typ 2948 // match: (Lsh16x16 <t> x y) 2949 // cond: 2950 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 2951 for { 2952 t := v.Type 2953 _ = v.Args[1] 2954 x := v.Args[0] 2955 y := v.Args[1] 2956 v.reset(OpMIPS64AND) 2957 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2958 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2959 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2960 v2.AuxInt = 64 2961 v1.AddArg(v2) 2962 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2963 v3.AddArg(y) 2964 v1.AddArg(v3) 2965 v0.AddArg(v1) 2966 v.AddArg(v0) 2967 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2968 v4.AddArg(x) 2969 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2970 v5.AddArg(y) 2971 v4.AddArg(v5) 2972 v.AddArg(v4) 2973 return true 2974 } 2975 } 2976 func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool { 2977 b := v.Block 2978 _ = b 2979 typ := &b.Func.Config.Types 2980 _ = typ 2981 // match: (Lsh16x32 <t> x y) 2982 // cond: 2983 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 2984 for { 2985 t := v.Type 2986 _ = v.Args[1] 2987 x := v.Args[0] 2988 y := v.Args[1] 2989 v.reset(OpMIPS64AND) 2990 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2991 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2992 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2993 v2.AuxInt = 64 2994 v1.AddArg(v2) 2995 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2996 v3.AddArg(y) 2997 v1.AddArg(v3) 2998 v0.AddArg(v1) 2999 v.AddArg(v0) 3000 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3001 v4.AddArg(x) 3002 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3003 v5.AddArg(y) 3004 v4.AddArg(v5) 3005 v.AddArg(v4) 3006 return true 3007 } 3008 } 3009 func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool { 3010 b := v.Block 3011 _ = b 3012 typ := &b.Func.Config.Types 3013 _ = typ 3014 // match: (Lsh16x64 <t> x y) 3015 // cond: 3016 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3017 for { 3018 t := v.Type 3019 _ = v.Args[1] 3020 x := v.Args[0] 3021 y := v.Args[1] 3022 v.reset(OpMIPS64AND) 3023 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3024 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3025 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3026 v2.AuxInt = 64 3027 v1.AddArg(v2) 3028 v1.AddArg(y) 3029 v0.AddArg(v1) 3030 v.AddArg(v0) 3031 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3032 v3.AddArg(x) 3033 v3.AddArg(y) 3034 v.AddArg(v3) 3035 return true 3036 } 3037 } 3038 func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool { 3039 b := v.Block 3040 _ = b 3041 typ := &b.Func.Config.Types 3042 _ = typ 3043 // match: (Lsh16x8 <t> x y) 3044 // cond: 3045 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3046 for { 3047 t := v.Type 3048 _ = v.Args[1] 3049 x := v.Args[0] 3050 y := v.Args[1] 3051 v.reset(OpMIPS64AND) 3052 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3053 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3054 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3055 v2.AuxInt = 64 3056 v1.AddArg(v2) 3057 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3058 v3.AddArg(y) 3059 v1.AddArg(v3) 3060 v0.AddArg(v1) 3061 v.AddArg(v0) 3062 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3063 v4.AddArg(x) 3064 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3065 v5.AddArg(y) 3066 v4.AddArg(v5) 3067 v.AddArg(v4) 3068 return true 3069 } 3070 } 3071 func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool { 3072 b := v.Block 3073 _ = b 3074 typ := &b.Func.Config.Types 3075 _ = typ 3076 // match: (Lsh32x16 <t> x y) 3077 // cond: 3078 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3079 for { 3080 t := v.Type 3081 _ = v.Args[1] 3082 x := v.Args[0] 3083 y := v.Args[1] 3084 v.reset(OpMIPS64AND) 3085 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3086 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3087 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3088 v2.AuxInt = 64 3089 v1.AddArg(v2) 3090 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3091 v3.AddArg(y) 3092 v1.AddArg(v3) 3093 v0.AddArg(v1) 3094 v.AddArg(v0) 3095 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3096 v4.AddArg(x) 3097 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3098 v5.AddArg(y) 3099 v4.AddArg(v5) 3100 v.AddArg(v4) 3101 return true 3102 } 3103 } 3104 func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool { 3105 b := v.Block 3106 _ = b 3107 typ := &b.Func.Config.Types 3108 _ = typ 3109 // match: (Lsh32x32 <t> x y) 3110 // cond: 3111 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3112 for { 3113 t := v.Type 3114 _ = v.Args[1] 3115 x := v.Args[0] 3116 y := v.Args[1] 3117 v.reset(OpMIPS64AND) 3118 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3119 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3120 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3121 v2.AuxInt = 64 3122 v1.AddArg(v2) 3123 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3124 v3.AddArg(y) 3125 v1.AddArg(v3) 3126 v0.AddArg(v1) 3127 v.AddArg(v0) 3128 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3129 v4.AddArg(x) 3130 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3131 v5.AddArg(y) 3132 v4.AddArg(v5) 3133 v.AddArg(v4) 3134 return true 3135 } 3136 } 3137 func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool { 3138 b := v.Block 3139 _ = b 3140 typ := &b.Func.Config.Types 3141 _ = typ 3142 // match: (Lsh32x64 <t> x y) 3143 // cond: 3144 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3145 for { 3146 t := v.Type 3147 _ = v.Args[1] 3148 x := v.Args[0] 3149 y := v.Args[1] 3150 v.reset(OpMIPS64AND) 3151 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3152 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3153 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3154 v2.AuxInt = 64 3155 v1.AddArg(v2) 3156 v1.AddArg(y) 3157 v0.AddArg(v1) 3158 v.AddArg(v0) 3159 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3160 v3.AddArg(x) 3161 v3.AddArg(y) 3162 v.AddArg(v3) 3163 return true 3164 } 3165 } 3166 func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool { 3167 b := v.Block 3168 _ = b 3169 typ := &b.Func.Config.Types 3170 _ = typ 3171 // match: (Lsh32x8 <t> x y) 3172 // cond: 3173 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3174 for { 3175 t := v.Type 3176 _ = v.Args[1] 3177 x := v.Args[0] 3178 y := v.Args[1] 3179 v.reset(OpMIPS64AND) 3180 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3181 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3182 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3183 v2.AuxInt = 64 3184 v1.AddArg(v2) 3185 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3186 v3.AddArg(y) 3187 v1.AddArg(v3) 3188 v0.AddArg(v1) 3189 v.AddArg(v0) 3190 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3191 v4.AddArg(x) 3192 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3193 v5.AddArg(y) 3194 v4.AddArg(v5) 3195 v.AddArg(v4) 3196 return true 3197 } 3198 } 3199 func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool { 3200 b := v.Block 3201 _ = b 3202 typ := &b.Func.Config.Types 3203 _ = typ 3204 // match: (Lsh64x16 <t> x y) 3205 // cond: 3206 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3207 for { 3208 t := v.Type 3209 _ = v.Args[1] 3210 x := v.Args[0] 3211 y := v.Args[1] 3212 v.reset(OpMIPS64AND) 3213 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3214 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3215 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3216 v2.AuxInt = 64 3217 v1.AddArg(v2) 3218 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3219 v3.AddArg(y) 3220 v1.AddArg(v3) 3221 v0.AddArg(v1) 3222 v.AddArg(v0) 3223 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3224 v4.AddArg(x) 3225 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3226 v5.AddArg(y) 3227 v4.AddArg(v5) 3228 v.AddArg(v4) 3229 return true 3230 } 3231 } 3232 func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool { 3233 b := v.Block 3234 _ = b 3235 typ := &b.Func.Config.Types 3236 _ = typ 3237 // match: (Lsh64x32 <t> x y) 3238 // cond: 3239 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3240 for { 3241 t := v.Type 3242 _ = v.Args[1] 3243 x := v.Args[0] 3244 y := v.Args[1] 3245 v.reset(OpMIPS64AND) 3246 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3247 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3248 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3249 v2.AuxInt = 64 3250 v1.AddArg(v2) 3251 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3252 v3.AddArg(y) 3253 v1.AddArg(v3) 3254 v0.AddArg(v1) 3255 v.AddArg(v0) 3256 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3257 v4.AddArg(x) 3258 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3259 v5.AddArg(y) 3260 v4.AddArg(v5) 3261 v.AddArg(v4) 3262 return true 3263 } 3264 } 3265 func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool { 3266 b := v.Block 3267 _ = b 3268 typ := &b.Func.Config.Types 3269 _ = typ 3270 // match: (Lsh64x64 <t> x y) 3271 // cond: 3272 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3273 for { 3274 t := v.Type 3275 _ = v.Args[1] 3276 x := v.Args[0] 3277 y := v.Args[1] 3278 v.reset(OpMIPS64AND) 3279 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3280 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3281 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3282 v2.AuxInt = 64 3283 v1.AddArg(v2) 3284 v1.AddArg(y) 3285 v0.AddArg(v1) 3286 v.AddArg(v0) 3287 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3288 v3.AddArg(x) 3289 v3.AddArg(y) 3290 v.AddArg(v3) 3291 return true 3292 } 3293 } 3294 func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool { 3295 b := v.Block 3296 _ = b 3297 typ := &b.Func.Config.Types 3298 _ = typ 3299 // match: (Lsh64x8 <t> x y) 3300 // cond: 3301 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3302 for { 3303 t := v.Type 3304 _ = v.Args[1] 3305 x := v.Args[0] 3306 y := v.Args[1] 3307 v.reset(OpMIPS64AND) 3308 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3309 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3310 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3311 v2.AuxInt = 64 3312 v1.AddArg(v2) 3313 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3314 v3.AddArg(y) 3315 v1.AddArg(v3) 3316 v0.AddArg(v1) 3317 v.AddArg(v0) 3318 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3319 v4.AddArg(x) 3320 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3321 v5.AddArg(y) 3322 v4.AddArg(v5) 3323 v.AddArg(v4) 3324 return true 3325 } 3326 } 3327 func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool { 3328 b := v.Block 3329 _ = b 3330 typ := &b.Func.Config.Types 3331 _ = typ 3332 // match: (Lsh8x16 <t> x y) 3333 // cond: 3334 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3335 for { 3336 t := v.Type 3337 _ = v.Args[1] 3338 x := v.Args[0] 3339 y := v.Args[1] 3340 v.reset(OpMIPS64AND) 3341 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3342 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3343 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3344 v2.AuxInt = 64 3345 v1.AddArg(v2) 3346 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3347 v3.AddArg(y) 3348 v1.AddArg(v3) 3349 v0.AddArg(v1) 3350 v.AddArg(v0) 3351 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3352 v4.AddArg(x) 3353 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3354 v5.AddArg(y) 3355 v4.AddArg(v5) 3356 v.AddArg(v4) 3357 return true 3358 } 3359 } 3360 func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool { 3361 b := v.Block 3362 _ = b 3363 typ := &b.Func.Config.Types 3364 _ = typ 3365 // match: (Lsh8x32 <t> x y) 3366 // cond: 3367 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3368 for { 3369 t := v.Type 3370 _ = v.Args[1] 3371 x := v.Args[0] 3372 y := v.Args[1] 3373 v.reset(OpMIPS64AND) 3374 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3375 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3376 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3377 v2.AuxInt = 64 3378 v1.AddArg(v2) 3379 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3380 v3.AddArg(y) 3381 v1.AddArg(v3) 3382 v0.AddArg(v1) 3383 v.AddArg(v0) 3384 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3385 v4.AddArg(x) 3386 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3387 v5.AddArg(y) 3388 v4.AddArg(v5) 3389 v.AddArg(v4) 3390 return true 3391 } 3392 } 3393 func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool { 3394 b := v.Block 3395 _ = b 3396 typ := &b.Func.Config.Types 3397 _ = typ 3398 // match: (Lsh8x64 <t> x y) 3399 // cond: 3400 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3401 for { 3402 t := v.Type 3403 _ = v.Args[1] 3404 x := v.Args[0] 3405 y := v.Args[1] 3406 v.reset(OpMIPS64AND) 3407 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3408 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3409 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3410 v2.AuxInt = 64 3411 v1.AddArg(v2) 3412 v1.AddArg(y) 3413 v0.AddArg(v1) 3414 v.AddArg(v0) 3415 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3416 v3.AddArg(x) 3417 v3.AddArg(y) 3418 v.AddArg(v3) 3419 return true 3420 } 3421 } 3422 func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool { 3423 b := v.Block 3424 _ = b 3425 typ := &b.Func.Config.Types 3426 _ = typ 3427 // match: (Lsh8x8 <t> x y) 3428 // cond: 3429 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3430 for { 3431 t := v.Type 3432 _ = v.Args[1] 3433 x := v.Args[0] 3434 y := v.Args[1] 3435 v.reset(OpMIPS64AND) 3436 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3437 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3438 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3439 v2.AuxInt = 64 3440 v1.AddArg(v2) 3441 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3442 v3.AddArg(y) 3443 v1.AddArg(v3) 3444 v0.AddArg(v1) 3445 v.AddArg(v0) 3446 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3447 v4.AddArg(x) 3448 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3449 v5.AddArg(y) 3450 v4.AddArg(v5) 3451 v.AddArg(v4) 3452 return true 3453 } 3454 } 3455 func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool { 3456 // match: (ADDV x (MOVVconst [c])) 3457 // cond: is32Bit(c) 3458 // result: (ADDVconst [c] x) 3459 for { 3460 _ = v.Args[1] 3461 x := v.Args[0] 3462 v_1 := v.Args[1] 3463 if v_1.Op != OpMIPS64MOVVconst { 3464 break 3465 } 3466 c := v_1.AuxInt 3467 if !(is32Bit(c)) { 3468 break 3469 } 3470 v.reset(OpMIPS64ADDVconst) 3471 v.AuxInt = c 3472 v.AddArg(x) 3473 return true 3474 } 3475 // match: (ADDV (MOVVconst [c]) x) 3476 // cond: is32Bit(c) 3477 // result: (ADDVconst [c] x) 3478 for { 3479 _ = v.Args[1] 3480 v_0 := v.Args[0] 3481 if v_0.Op != OpMIPS64MOVVconst { 3482 break 3483 } 3484 c := v_0.AuxInt 3485 x := v.Args[1] 3486 if !(is32Bit(c)) { 3487 break 3488 } 3489 v.reset(OpMIPS64ADDVconst) 3490 v.AuxInt = c 3491 v.AddArg(x) 3492 return true 3493 } 3494 // match: (ADDV x (NEGV y)) 3495 // cond: 3496 // result: (SUBV x y) 3497 for { 3498 _ = v.Args[1] 3499 x := v.Args[0] 3500 v_1 := v.Args[1] 3501 if v_1.Op != OpMIPS64NEGV { 3502 break 3503 } 3504 y := v_1.Args[0] 3505 v.reset(OpMIPS64SUBV) 3506 v.AddArg(x) 3507 v.AddArg(y) 3508 return true 3509 } 3510 // match: (ADDV (NEGV y) x) 3511 // cond: 3512 // result: (SUBV x y) 3513 for { 3514 _ = v.Args[1] 3515 v_0 := v.Args[0] 3516 if v_0.Op != OpMIPS64NEGV { 3517 break 3518 } 3519 y := v_0.Args[0] 3520 x := v.Args[1] 3521 v.reset(OpMIPS64SUBV) 3522 v.AddArg(x) 3523 v.AddArg(y) 3524 return true 3525 } 3526 return false 3527 } 3528 func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool { 3529 // match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr)) 3530 // cond: 3531 // result: (MOVVaddr [off1+off2] {sym} ptr) 3532 for { 3533 off1 := v.AuxInt 3534 v_0 := v.Args[0] 3535 if v_0.Op != OpMIPS64MOVVaddr { 3536 break 3537 } 3538 off2 := v_0.AuxInt 3539 sym := v_0.Aux 3540 ptr := v_0.Args[0] 3541 v.reset(OpMIPS64MOVVaddr) 3542 v.AuxInt = off1 + off2 3543 v.Aux = sym 3544 v.AddArg(ptr) 3545 return true 3546 } 3547 // match: (ADDVconst [0] x) 3548 // cond: 3549 // result: x 3550 for { 3551 if v.AuxInt != 0 { 3552 break 3553 } 3554 x := v.Args[0] 3555 v.reset(OpCopy) 3556 v.Type = x.Type 3557 v.AddArg(x) 3558 return true 3559 } 3560 // match: (ADDVconst [c] (MOVVconst [d])) 3561 // cond: 3562 // result: (MOVVconst [c+d]) 3563 for { 3564 c := v.AuxInt 3565 v_0 := v.Args[0] 3566 if v_0.Op != OpMIPS64MOVVconst { 3567 break 3568 } 3569 d := v_0.AuxInt 3570 v.reset(OpMIPS64MOVVconst) 3571 v.AuxInt = c + d 3572 return true 3573 } 3574 // match: (ADDVconst [c] (ADDVconst [d] x)) 3575 // cond: is32Bit(c+d) 3576 // result: (ADDVconst [c+d] x) 3577 for { 3578 c := v.AuxInt 3579 v_0 := v.Args[0] 3580 if v_0.Op != OpMIPS64ADDVconst { 3581 break 3582 } 3583 d := v_0.AuxInt 3584 x := v_0.Args[0] 3585 if !(is32Bit(c + d)) { 3586 break 3587 } 3588 v.reset(OpMIPS64ADDVconst) 3589 v.AuxInt = c + d 3590 v.AddArg(x) 3591 return true 3592 } 3593 // match: (ADDVconst [c] (SUBVconst [d] x)) 3594 // cond: is32Bit(c-d) 3595 // result: (ADDVconst [c-d] x) 3596 for { 3597 c := v.AuxInt 3598 v_0 := v.Args[0] 3599 if v_0.Op != OpMIPS64SUBVconst { 3600 break 3601 } 3602 d := v_0.AuxInt 3603 x := v_0.Args[0] 3604 if !(is32Bit(c - d)) { 3605 break 3606 } 3607 v.reset(OpMIPS64ADDVconst) 3608 v.AuxInt = c - d 3609 v.AddArg(x) 3610 return true 3611 } 3612 return false 3613 } 3614 func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool { 3615 // match: (AND x (MOVVconst [c])) 3616 // cond: is32Bit(c) 3617 // result: (ANDconst [c] x) 3618 for { 3619 _ = v.Args[1] 3620 x := v.Args[0] 3621 v_1 := v.Args[1] 3622 if v_1.Op != OpMIPS64MOVVconst { 3623 break 3624 } 3625 c := v_1.AuxInt 3626 if !(is32Bit(c)) { 3627 break 3628 } 3629 v.reset(OpMIPS64ANDconst) 3630 v.AuxInt = c 3631 v.AddArg(x) 3632 return true 3633 } 3634 // match: (AND (MOVVconst [c]) x) 3635 // cond: is32Bit(c) 3636 // result: (ANDconst [c] x) 3637 for { 3638 _ = v.Args[1] 3639 v_0 := v.Args[0] 3640 if v_0.Op != OpMIPS64MOVVconst { 3641 break 3642 } 3643 c := v_0.AuxInt 3644 x := v.Args[1] 3645 if !(is32Bit(c)) { 3646 break 3647 } 3648 v.reset(OpMIPS64ANDconst) 3649 v.AuxInt = c 3650 v.AddArg(x) 3651 return true 3652 } 3653 // match: (AND x x) 3654 // cond: 3655 // result: x 3656 for { 3657 _ = v.Args[1] 3658 x := v.Args[0] 3659 if x != v.Args[1] { 3660 break 3661 } 3662 v.reset(OpCopy) 3663 v.Type = x.Type 3664 v.AddArg(x) 3665 return true 3666 } 3667 return false 3668 } 3669 func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool { 3670 // match: (ANDconst [0] _) 3671 // cond: 3672 // result: (MOVVconst [0]) 3673 for { 3674 if v.AuxInt != 0 { 3675 break 3676 } 3677 v.reset(OpMIPS64MOVVconst) 3678 v.AuxInt = 0 3679 return true 3680 } 3681 // match: (ANDconst [-1] x) 3682 // cond: 3683 // result: x 3684 for { 3685 if v.AuxInt != -1 { 3686 break 3687 } 3688 x := v.Args[0] 3689 v.reset(OpCopy) 3690 v.Type = x.Type 3691 v.AddArg(x) 3692 return true 3693 } 3694 // match: (ANDconst [c] (MOVVconst [d])) 3695 // cond: 3696 // result: (MOVVconst [c&d]) 3697 for { 3698 c := v.AuxInt 3699 v_0 := v.Args[0] 3700 if v_0.Op != OpMIPS64MOVVconst { 3701 break 3702 } 3703 d := v_0.AuxInt 3704 v.reset(OpMIPS64MOVVconst) 3705 v.AuxInt = c & d 3706 return true 3707 } 3708 // match: (ANDconst [c] (ANDconst [d] x)) 3709 // cond: 3710 // result: (ANDconst [c&d] x) 3711 for { 3712 c := v.AuxInt 3713 v_0 := v.Args[0] 3714 if v_0.Op != OpMIPS64ANDconst { 3715 break 3716 } 3717 d := v_0.AuxInt 3718 x := v_0.Args[0] 3719 v.reset(OpMIPS64ANDconst) 3720 v.AuxInt = c & d 3721 v.AddArg(x) 3722 return true 3723 } 3724 return false 3725 } 3726 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v *Value) bool { 3727 // match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem) 3728 // cond: is32Bit(c) 3729 // result: (LoweredAtomicAddconst32 [c] ptr mem) 3730 for { 3731 _ = v.Args[2] 3732 ptr := v.Args[0] 3733 v_1 := v.Args[1] 3734 if v_1.Op != OpMIPS64MOVVconst { 3735 break 3736 } 3737 c := v_1.AuxInt 3738 mem := v.Args[2] 3739 if !(is32Bit(c)) { 3740 break 3741 } 3742 v.reset(OpMIPS64LoweredAtomicAddconst32) 3743 v.AuxInt = c 3744 v.AddArg(ptr) 3745 v.AddArg(mem) 3746 return true 3747 } 3748 return false 3749 } 3750 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v *Value) bool { 3751 // match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem) 3752 // cond: is32Bit(c) 3753 // result: (LoweredAtomicAddconst64 [c] ptr mem) 3754 for { 3755 _ = v.Args[2] 3756 ptr := v.Args[0] 3757 v_1 := v.Args[1] 3758 if v_1.Op != OpMIPS64MOVVconst { 3759 break 3760 } 3761 c := v_1.AuxInt 3762 mem := v.Args[2] 3763 if !(is32Bit(c)) { 3764 break 3765 } 3766 v.reset(OpMIPS64LoweredAtomicAddconst64) 3767 v.AuxInt = c 3768 v.AddArg(ptr) 3769 v.AddArg(mem) 3770 return true 3771 } 3772 return false 3773 } 3774 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v *Value) bool { 3775 // match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem) 3776 // cond: 3777 // result: (LoweredAtomicStorezero32 ptr mem) 3778 for { 3779 _ = v.Args[2] 3780 ptr := v.Args[0] 3781 v_1 := v.Args[1] 3782 if v_1.Op != OpMIPS64MOVVconst { 3783 break 3784 } 3785 if v_1.AuxInt != 0 { 3786 break 3787 } 3788 mem := v.Args[2] 3789 v.reset(OpMIPS64LoweredAtomicStorezero32) 3790 v.AddArg(ptr) 3791 v.AddArg(mem) 3792 return true 3793 } 3794 return false 3795 } 3796 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v *Value) bool { 3797 // match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem) 3798 // cond: 3799 // result: (LoweredAtomicStorezero64 ptr mem) 3800 for { 3801 _ = v.Args[2] 3802 ptr := v.Args[0] 3803 v_1 := v.Args[1] 3804 if v_1.Op != OpMIPS64MOVVconst { 3805 break 3806 } 3807 if v_1.AuxInt != 0 { 3808 break 3809 } 3810 mem := v.Args[2] 3811 v.reset(OpMIPS64LoweredAtomicStorezero64) 3812 v.AddArg(ptr) 3813 v.AddArg(mem) 3814 return true 3815 } 3816 return false 3817 } 3818 func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool { 3819 // match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem) 3820 // cond: is32Bit(off1+off2) 3821 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3822 for { 3823 off1 := v.AuxInt 3824 sym := v.Aux 3825 _ = v.Args[1] 3826 v_0 := v.Args[0] 3827 if v_0.Op != OpMIPS64ADDVconst { 3828 break 3829 } 3830 off2 := v_0.AuxInt 3831 ptr := v_0.Args[0] 3832 mem := v.Args[1] 3833 if !(is32Bit(off1 + off2)) { 3834 break 3835 } 3836 v.reset(OpMIPS64MOVBUload) 3837 v.AuxInt = off1 + off2 3838 v.Aux = sym 3839 v.AddArg(ptr) 3840 v.AddArg(mem) 3841 return true 3842 } 3843 // match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3844 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3845 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3846 for { 3847 off1 := v.AuxInt 3848 sym1 := v.Aux 3849 _ = v.Args[1] 3850 v_0 := v.Args[0] 3851 if v_0.Op != OpMIPS64MOVVaddr { 3852 break 3853 } 3854 off2 := v_0.AuxInt 3855 sym2 := v_0.Aux 3856 ptr := v_0.Args[0] 3857 mem := v.Args[1] 3858 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3859 break 3860 } 3861 v.reset(OpMIPS64MOVBUload) 3862 v.AuxInt = off1 + off2 3863 v.Aux = mergeSym(sym1, sym2) 3864 v.AddArg(ptr) 3865 v.AddArg(mem) 3866 return true 3867 } 3868 return false 3869 } 3870 func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool { 3871 // match: (MOVBUreg x:(MOVBUload _ _)) 3872 // cond: 3873 // result: (MOVVreg x) 3874 for { 3875 x := v.Args[0] 3876 if x.Op != OpMIPS64MOVBUload { 3877 break 3878 } 3879 _ = x.Args[1] 3880 v.reset(OpMIPS64MOVVreg) 3881 v.AddArg(x) 3882 return true 3883 } 3884 // match: (MOVBUreg x:(MOVBUreg _)) 3885 // cond: 3886 // result: (MOVVreg x) 3887 for { 3888 x := v.Args[0] 3889 if x.Op != OpMIPS64MOVBUreg { 3890 break 3891 } 3892 v.reset(OpMIPS64MOVVreg) 3893 v.AddArg(x) 3894 return true 3895 } 3896 // match: (MOVBUreg (MOVVconst [c])) 3897 // cond: 3898 // result: (MOVVconst [int64(uint8(c))]) 3899 for { 3900 v_0 := v.Args[0] 3901 if v_0.Op != OpMIPS64MOVVconst { 3902 break 3903 } 3904 c := v_0.AuxInt 3905 v.reset(OpMIPS64MOVVconst) 3906 v.AuxInt = int64(uint8(c)) 3907 return true 3908 } 3909 return false 3910 } 3911 func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool { 3912 // match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem) 3913 // cond: is32Bit(off1+off2) 3914 // result: (MOVBload [off1+off2] {sym} ptr mem) 3915 for { 3916 off1 := v.AuxInt 3917 sym := v.Aux 3918 _ = v.Args[1] 3919 v_0 := v.Args[0] 3920 if v_0.Op != OpMIPS64ADDVconst { 3921 break 3922 } 3923 off2 := v_0.AuxInt 3924 ptr := v_0.Args[0] 3925 mem := v.Args[1] 3926 if !(is32Bit(off1 + off2)) { 3927 break 3928 } 3929 v.reset(OpMIPS64MOVBload) 3930 v.AuxInt = off1 + off2 3931 v.Aux = sym 3932 v.AddArg(ptr) 3933 v.AddArg(mem) 3934 return true 3935 } 3936 // match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3937 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3938 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3939 for { 3940 off1 := v.AuxInt 3941 sym1 := v.Aux 3942 _ = v.Args[1] 3943 v_0 := v.Args[0] 3944 if v_0.Op != OpMIPS64MOVVaddr { 3945 break 3946 } 3947 off2 := v_0.AuxInt 3948 sym2 := v_0.Aux 3949 ptr := v_0.Args[0] 3950 mem := v.Args[1] 3951 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3952 break 3953 } 3954 v.reset(OpMIPS64MOVBload) 3955 v.AuxInt = off1 + off2 3956 v.Aux = mergeSym(sym1, sym2) 3957 v.AddArg(ptr) 3958 v.AddArg(mem) 3959 return true 3960 } 3961 return false 3962 } 3963 func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool { 3964 // match: (MOVBreg x:(MOVBload _ _)) 3965 // cond: 3966 // result: (MOVVreg x) 3967 for { 3968 x := v.Args[0] 3969 if x.Op != OpMIPS64MOVBload { 3970 break 3971 } 3972 _ = x.Args[1] 3973 v.reset(OpMIPS64MOVVreg) 3974 v.AddArg(x) 3975 return true 3976 } 3977 // match: (MOVBreg x:(MOVBreg _)) 3978 // cond: 3979 // result: (MOVVreg x) 3980 for { 3981 x := v.Args[0] 3982 if x.Op != OpMIPS64MOVBreg { 3983 break 3984 } 3985 v.reset(OpMIPS64MOVVreg) 3986 v.AddArg(x) 3987 return true 3988 } 3989 // match: (MOVBreg (MOVVconst [c])) 3990 // cond: 3991 // result: (MOVVconst [int64(int8(c))]) 3992 for { 3993 v_0 := v.Args[0] 3994 if v_0.Op != OpMIPS64MOVVconst { 3995 break 3996 } 3997 c := v_0.AuxInt 3998 v.reset(OpMIPS64MOVVconst) 3999 v.AuxInt = int64(int8(c)) 4000 return true 4001 } 4002 return false 4003 } 4004 func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool { 4005 // match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4006 // cond: is32Bit(off1+off2) 4007 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 4008 for { 4009 off1 := v.AuxInt 4010 sym := v.Aux 4011 _ = v.Args[2] 4012 v_0 := v.Args[0] 4013 if v_0.Op != OpMIPS64ADDVconst { 4014 break 4015 } 4016 off2 := v_0.AuxInt 4017 ptr := v_0.Args[0] 4018 val := v.Args[1] 4019 mem := v.Args[2] 4020 if !(is32Bit(off1 + off2)) { 4021 break 4022 } 4023 v.reset(OpMIPS64MOVBstore) 4024 v.AuxInt = off1 + off2 4025 v.Aux = sym 4026 v.AddArg(ptr) 4027 v.AddArg(val) 4028 v.AddArg(mem) 4029 return true 4030 } 4031 // match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4032 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4033 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4034 for { 4035 off1 := v.AuxInt 4036 sym1 := v.Aux 4037 _ = v.Args[2] 4038 v_0 := v.Args[0] 4039 if v_0.Op != OpMIPS64MOVVaddr { 4040 break 4041 } 4042 off2 := v_0.AuxInt 4043 sym2 := v_0.Aux 4044 ptr := v_0.Args[0] 4045 val := v.Args[1] 4046 mem := v.Args[2] 4047 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4048 break 4049 } 4050 v.reset(OpMIPS64MOVBstore) 4051 v.AuxInt = off1 + off2 4052 v.Aux = mergeSym(sym1, sym2) 4053 v.AddArg(ptr) 4054 v.AddArg(val) 4055 v.AddArg(mem) 4056 return true 4057 } 4058 // match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem) 4059 // cond: 4060 // result: (MOVBstorezero [off] {sym} ptr mem) 4061 for { 4062 off := v.AuxInt 4063 sym := v.Aux 4064 _ = v.Args[2] 4065 ptr := v.Args[0] 4066 v_1 := v.Args[1] 4067 if v_1.Op != OpMIPS64MOVVconst { 4068 break 4069 } 4070 if v_1.AuxInt != 0 { 4071 break 4072 } 4073 mem := v.Args[2] 4074 v.reset(OpMIPS64MOVBstorezero) 4075 v.AuxInt = off 4076 v.Aux = sym 4077 v.AddArg(ptr) 4078 v.AddArg(mem) 4079 return true 4080 } 4081 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 4082 // cond: 4083 // result: (MOVBstore [off] {sym} ptr x mem) 4084 for { 4085 off := v.AuxInt 4086 sym := v.Aux 4087 _ = v.Args[2] 4088 ptr := v.Args[0] 4089 v_1 := v.Args[1] 4090 if v_1.Op != OpMIPS64MOVBreg { 4091 break 4092 } 4093 x := v_1.Args[0] 4094 mem := v.Args[2] 4095 v.reset(OpMIPS64MOVBstore) 4096 v.AuxInt = off 4097 v.Aux = sym 4098 v.AddArg(ptr) 4099 v.AddArg(x) 4100 v.AddArg(mem) 4101 return true 4102 } 4103 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 4104 // cond: 4105 // result: (MOVBstore [off] {sym} ptr x mem) 4106 for { 4107 off := v.AuxInt 4108 sym := v.Aux 4109 _ = v.Args[2] 4110 ptr := v.Args[0] 4111 v_1 := v.Args[1] 4112 if v_1.Op != OpMIPS64MOVBUreg { 4113 break 4114 } 4115 x := v_1.Args[0] 4116 mem := v.Args[2] 4117 v.reset(OpMIPS64MOVBstore) 4118 v.AuxInt = off 4119 v.Aux = sym 4120 v.AddArg(ptr) 4121 v.AddArg(x) 4122 v.AddArg(mem) 4123 return true 4124 } 4125 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 4126 // cond: 4127 // result: (MOVBstore [off] {sym} ptr x mem) 4128 for { 4129 off := v.AuxInt 4130 sym := v.Aux 4131 _ = v.Args[2] 4132 ptr := v.Args[0] 4133 v_1 := v.Args[1] 4134 if v_1.Op != OpMIPS64MOVHreg { 4135 break 4136 } 4137 x := v_1.Args[0] 4138 mem := v.Args[2] 4139 v.reset(OpMIPS64MOVBstore) 4140 v.AuxInt = off 4141 v.Aux = sym 4142 v.AddArg(ptr) 4143 v.AddArg(x) 4144 v.AddArg(mem) 4145 return true 4146 } 4147 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 4148 // cond: 4149 // result: (MOVBstore [off] {sym} ptr x mem) 4150 for { 4151 off := v.AuxInt 4152 sym := v.Aux 4153 _ = v.Args[2] 4154 ptr := v.Args[0] 4155 v_1 := v.Args[1] 4156 if v_1.Op != OpMIPS64MOVHUreg { 4157 break 4158 } 4159 x := v_1.Args[0] 4160 mem := v.Args[2] 4161 v.reset(OpMIPS64MOVBstore) 4162 v.AuxInt = off 4163 v.Aux = sym 4164 v.AddArg(ptr) 4165 v.AddArg(x) 4166 v.AddArg(mem) 4167 return true 4168 } 4169 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 4170 // cond: 4171 // result: (MOVBstore [off] {sym} ptr x mem) 4172 for { 4173 off := v.AuxInt 4174 sym := v.Aux 4175 _ = v.Args[2] 4176 ptr := v.Args[0] 4177 v_1 := v.Args[1] 4178 if v_1.Op != OpMIPS64MOVWreg { 4179 break 4180 } 4181 x := v_1.Args[0] 4182 mem := v.Args[2] 4183 v.reset(OpMIPS64MOVBstore) 4184 v.AuxInt = off 4185 v.Aux = sym 4186 v.AddArg(ptr) 4187 v.AddArg(x) 4188 v.AddArg(mem) 4189 return true 4190 } 4191 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 4192 // cond: 4193 // result: (MOVBstore [off] {sym} ptr x mem) 4194 for { 4195 off := v.AuxInt 4196 sym := v.Aux 4197 _ = v.Args[2] 4198 ptr := v.Args[0] 4199 v_1 := v.Args[1] 4200 if v_1.Op != OpMIPS64MOVWUreg { 4201 break 4202 } 4203 x := v_1.Args[0] 4204 mem := v.Args[2] 4205 v.reset(OpMIPS64MOVBstore) 4206 v.AuxInt = off 4207 v.Aux = sym 4208 v.AddArg(ptr) 4209 v.AddArg(x) 4210 v.AddArg(mem) 4211 return true 4212 } 4213 return false 4214 } 4215 func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool { 4216 // match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 4217 // cond: is32Bit(off1+off2) 4218 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 4219 for { 4220 off1 := v.AuxInt 4221 sym := v.Aux 4222 _ = v.Args[1] 4223 v_0 := v.Args[0] 4224 if v_0.Op != OpMIPS64ADDVconst { 4225 break 4226 } 4227 off2 := v_0.AuxInt 4228 ptr := v_0.Args[0] 4229 mem := v.Args[1] 4230 if !(is32Bit(off1 + off2)) { 4231 break 4232 } 4233 v.reset(OpMIPS64MOVBstorezero) 4234 v.AuxInt = off1 + off2 4235 v.Aux = sym 4236 v.AddArg(ptr) 4237 v.AddArg(mem) 4238 return true 4239 } 4240 // match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4241 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4242 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4243 for { 4244 off1 := v.AuxInt 4245 sym1 := v.Aux 4246 _ = v.Args[1] 4247 v_0 := v.Args[0] 4248 if v_0.Op != OpMIPS64MOVVaddr { 4249 break 4250 } 4251 off2 := v_0.AuxInt 4252 sym2 := v_0.Aux 4253 ptr := v_0.Args[0] 4254 mem := v.Args[1] 4255 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4256 break 4257 } 4258 v.reset(OpMIPS64MOVBstorezero) 4259 v.AuxInt = off1 + off2 4260 v.Aux = mergeSym(sym1, sym2) 4261 v.AddArg(ptr) 4262 v.AddArg(mem) 4263 return true 4264 } 4265 return false 4266 } 4267 func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool { 4268 // match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem) 4269 // cond: is32Bit(off1+off2) 4270 // result: (MOVDload [off1+off2] {sym} ptr mem) 4271 for { 4272 off1 := v.AuxInt 4273 sym := v.Aux 4274 _ = v.Args[1] 4275 v_0 := v.Args[0] 4276 if v_0.Op != OpMIPS64ADDVconst { 4277 break 4278 } 4279 off2 := v_0.AuxInt 4280 ptr := v_0.Args[0] 4281 mem := v.Args[1] 4282 if !(is32Bit(off1 + off2)) { 4283 break 4284 } 4285 v.reset(OpMIPS64MOVDload) 4286 v.AuxInt = off1 + off2 4287 v.Aux = sym 4288 v.AddArg(ptr) 4289 v.AddArg(mem) 4290 return true 4291 } 4292 // match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4293 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4294 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4295 for { 4296 off1 := v.AuxInt 4297 sym1 := v.Aux 4298 _ = v.Args[1] 4299 v_0 := v.Args[0] 4300 if v_0.Op != OpMIPS64MOVVaddr { 4301 break 4302 } 4303 off2 := v_0.AuxInt 4304 sym2 := v_0.Aux 4305 ptr := v_0.Args[0] 4306 mem := v.Args[1] 4307 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4308 break 4309 } 4310 v.reset(OpMIPS64MOVDload) 4311 v.AuxInt = off1 + off2 4312 v.Aux = mergeSym(sym1, sym2) 4313 v.AddArg(ptr) 4314 v.AddArg(mem) 4315 return true 4316 } 4317 return false 4318 } 4319 func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool { 4320 // match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4321 // cond: is32Bit(off1+off2) 4322 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4323 for { 4324 off1 := v.AuxInt 4325 sym := v.Aux 4326 _ = v.Args[2] 4327 v_0 := v.Args[0] 4328 if v_0.Op != OpMIPS64ADDVconst { 4329 break 4330 } 4331 off2 := v_0.AuxInt 4332 ptr := v_0.Args[0] 4333 val := v.Args[1] 4334 mem := v.Args[2] 4335 if !(is32Bit(off1 + off2)) { 4336 break 4337 } 4338 v.reset(OpMIPS64MOVDstore) 4339 v.AuxInt = off1 + off2 4340 v.Aux = sym 4341 v.AddArg(ptr) 4342 v.AddArg(val) 4343 v.AddArg(mem) 4344 return true 4345 } 4346 // match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4347 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4348 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4349 for { 4350 off1 := v.AuxInt 4351 sym1 := v.Aux 4352 _ = v.Args[2] 4353 v_0 := v.Args[0] 4354 if v_0.Op != OpMIPS64MOVVaddr { 4355 break 4356 } 4357 off2 := v_0.AuxInt 4358 sym2 := v_0.Aux 4359 ptr := v_0.Args[0] 4360 val := v.Args[1] 4361 mem := v.Args[2] 4362 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4363 break 4364 } 4365 v.reset(OpMIPS64MOVDstore) 4366 v.AuxInt = off1 + off2 4367 v.Aux = mergeSym(sym1, sym2) 4368 v.AddArg(ptr) 4369 v.AddArg(val) 4370 v.AddArg(mem) 4371 return true 4372 } 4373 return false 4374 } 4375 func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool { 4376 // match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem) 4377 // cond: is32Bit(off1+off2) 4378 // result: (MOVFload [off1+off2] {sym} ptr mem) 4379 for { 4380 off1 := v.AuxInt 4381 sym := v.Aux 4382 _ = v.Args[1] 4383 v_0 := v.Args[0] 4384 if v_0.Op != OpMIPS64ADDVconst { 4385 break 4386 } 4387 off2 := v_0.AuxInt 4388 ptr := v_0.Args[0] 4389 mem := v.Args[1] 4390 if !(is32Bit(off1 + off2)) { 4391 break 4392 } 4393 v.reset(OpMIPS64MOVFload) 4394 v.AuxInt = off1 + off2 4395 v.Aux = sym 4396 v.AddArg(ptr) 4397 v.AddArg(mem) 4398 return true 4399 } 4400 // match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4401 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4402 // result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4403 for { 4404 off1 := v.AuxInt 4405 sym1 := v.Aux 4406 _ = v.Args[1] 4407 v_0 := v.Args[0] 4408 if v_0.Op != OpMIPS64MOVVaddr { 4409 break 4410 } 4411 off2 := v_0.AuxInt 4412 sym2 := v_0.Aux 4413 ptr := v_0.Args[0] 4414 mem := v.Args[1] 4415 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4416 break 4417 } 4418 v.reset(OpMIPS64MOVFload) 4419 v.AuxInt = off1 + off2 4420 v.Aux = mergeSym(sym1, sym2) 4421 v.AddArg(ptr) 4422 v.AddArg(mem) 4423 return true 4424 } 4425 return false 4426 } 4427 func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool { 4428 // match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4429 // cond: is32Bit(off1+off2) 4430 // result: (MOVFstore [off1+off2] {sym} ptr val mem) 4431 for { 4432 off1 := v.AuxInt 4433 sym := v.Aux 4434 _ = v.Args[2] 4435 v_0 := v.Args[0] 4436 if v_0.Op != OpMIPS64ADDVconst { 4437 break 4438 } 4439 off2 := v_0.AuxInt 4440 ptr := v_0.Args[0] 4441 val := v.Args[1] 4442 mem := v.Args[2] 4443 if !(is32Bit(off1 + off2)) { 4444 break 4445 } 4446 v.reset(OpMIPS64MOVFstore) 4447 v.AuxInt = off1 + off2 4448 v.Aux = sym 4449 v.AddArg(ptr) 4450 v.AddArg(val) 4451 v.AddArg(mem) 4452 return true 4453 } 4454 // match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4455 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4456 // result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4457 for { 4458 off1 := v.AuxInt 4459 sym1 := v.Aux 4460 _ = v.Args[2] 4461 v_0 := v.Args[0] 4462 if v_0.Op != OpMIPS64MOVVaddr { 4463 break 4464 } 4465 off2 := v_0.AuxInt 4466 sym2 := v_0.Aux 4467 ptr := v_0.Args[0] 4468 val := v.Args[1] 4469 mem := v.Args[2] 4470 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4471 break 4472 } 4473 v.reset(OpMIPS64MOVFstore) 4474 v.AuxInt = off1 + off2 4475 v.Aux = mergeSym(sym1, sym2) 4476 v.AddArg(ptr) 4477 v.AddArg(val) 4478 v.AddArg(mem) 4479 return true 4480 } 4481 return false 4482 } 4483 func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool { 4484 // match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem) 4485 // cond: is32Bit(off1+off2) 4486 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4487 for { 4488 off1 := v.AuxInt 4489 sym := v.Aux 4490 _ = v.Args[1] 4491 v_0 := v.Args[0] 4492 if v_0.Op != OpMIPS64ADDVconst { 4493 break 4494 } 4495 off2 := v_0.AuxInt 4496 ptr := v_0.Args[0] 4497 mem := v.Args[1] 4498 if !(is32Bit(off1 + off2)) { 4499 break 4500 } 4501 v.reset(OpMIPS64MOVHUload) 4502 v.AuxInt = off1 + off2 4503 v.Aux = sym 4504 v.AddArg(ptr) 4505 v.AddArg(mem) 4506 return true 4507 } 4508 // match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4509 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4510 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4511 for { 4512 off1 := v.AuxInt 4513 sym1 := v.Aux 4514 _ = v.Args[1] 4515 v_0 := v.Args[0] 4516 if v_0.Op != OpMIPS64MOVVaddr { 4517 break 4518 } 4519 off2 := v_0.AuxInt 4520 sym2 := v_0.Aux 4521 ptr := v_0.Args[0] 4522 mem := v.Args[1] 4523 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4524 break 4525 } 4526 v.reset(OpMIPS64MOVHUload) 4527 v.AuxInt = off1 + off2 4528 v.Aux = mergeSym(sym1, sym2) 4529 v.AddArg(ptr) 4530 v.AddArg(mem) 4531 return true 4532 } 4533 return false 4534 } 4535 func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool { 4536 // match: (MOVHUreg x:(MOVBUload _ _)) 4537 // cond: 4538 // result: (MOVVreg x) 4539 for { 4540 x := v.Args[0] 4541 if x.Op != OpMIPS64MOVBUload { 4542 break 4543 } 4544 _ = x.Args[1] 4545 v.reset(OpMIPS64MOVVreg) 4546 v.AddArg(x) 4547 return true 4548 } 4549 // match: (MOVHUreg x:(MOVHUload _ _)) 4550 // cond: 4551 // result: (MOVVreg x) 4552 for { 4553 x := v.Args[0] 4554 if x.Op != OpMIPS64MOVHUload { 4555 break 4556 } 4557 _ = x.Args[1] 4558 v.reset(OpMIPS64MOVVreg) 4559 v.AddArg(x) 4560 return true 4561 } 4562 // match: (MOVHUreg x:(MOVBUreg _)) 4563 // cond: 4564 // result: (MOVVreg x) 4565 for { 4566 x := v.Args[0] 4567 if x.Op != OpMIPS64MOVBUreg { 4568 break 4569 } 4570 v.reset(OpMIPS64MOVVreg) 4571 v.AddArg(x) 4572 return true 4573 } 4574 // match: (MOVHUreg x:(MOVHUreg _)) 4575 // cond: 4576 // result: (MOVVreg x) 4577 for { 4578 x := v.Args[0] 4579 if x.Op != OpMIPS64MOVHUreg { 4580 break 4581 } 4582 v.reset(OpMIPS64MOVVreg) 4583 v.AddArg(x) 4584 return true 4585 } 4586 // match: (MOVHUreg (MOVVconst [c])) 4587 // cond: 4588 // result: (MOVVconst [int64(uint16(c))]) 4589 for { 4590 v_0 := v.Args[0] 4591 if v_0.Op != OpMIPS64MOVVconst { 4592 break 4593 } 4594 c := v_0.AuxInt 4595 v.reset(OpMIPS64MOVVconst) 4596 v.AuxInt = int64(uint16(c)) 4597 return true 4598 } 4599 return false 4600 } 4601 func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool { 4602 // match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem) 4603 // cond: is32Bit(off1+off2) 4604 // result: (MOVHload [off1+off2] {sym} ptr mem) 4605 for { 4606 off1 := v.AuxInt 4607 sym := v.Aux 4608 _ = v.Args[1] 4609 v_0 := v.Args[0] 4610 if v_0.Op != OpMIPS64ADDVconst { 4611 break 4612 } 4613 off2 := v_0.AuxInt 4614 ptr := v_0.Args[0] 4615 mem := v.Args[1] 4616 if !(is32Bit(off1 + off2)) { 4617 break 4618 } 4619 v.reset(OpMIPS64MOVHload) 4620 v.AuxInt = off1 + off2 4621 v.Aux = sym 4622 v.AddArg(ptr) 4623 v.AddArg(mem) 4624 return true 4625 } 4626 // match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4627 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4628 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4629 for { 4630 off1 := v.AuxInt 4631 sym1 := v.Aux 4632 _ = v.Args[1] 4633 v_0 := v.Args[0] 4634 if v_0.Op != OpMIPS64MOVVaddr { 4635 break 4636 } 4637 off2 := v_0.AuxInt 4638 sym2 := v_0.Aux 4639 ptr := v_0.Args[0] 4640 mem := v.Args[1] 4641 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4642 break 4643 } 4644 v.reset(OpMIPS64MOVHload) 4645 v.AuxInt = off1 + off2 4646 v.Aux = mergeSym(sym1, sym2) 4647 v.AddArg(ptr) 4648 v.AddArg(mem) 4649 return true 4650 } 4651 return false 4652 } 4653 func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool { 4654 // match: (MOVHreg x:(MOVBload _ _)) 4655 // cond: 4656 // result: (MOVVreg x) 4657 for { 4658 x := v.Args[0] 4659 if x.Op != OpMIPS64MOVBload { 4660 break 4661 } 4662 _ = x.Args[1] 4663 v.reset(OpMIPS64MOVVreg) 4664 v.AddArg(x) 4665 return true 4666 } 4667 // match: (MOVHreg x:(MOVBUload _ _)) 4668 // cond: 4669 // result: (MOVVreg x) 4670 for { 4671 x := v.Args[0] 4672 if x.Op != OpMIPS64MOVBUload { 4673 break 4674 } 4675 _ = x.Args[1] 4676 v.reset(OpMIPS64MOVVreg) 4677 v.AddArg(x) 4678 return true 4679 } 4680 // match: (MOVHreg x:(MOVHload _ _)) 4681 // cond: 4682 // result: (MOVVreg x) 4683 for { 4684 x := v.Args[0] 4685 if x.Op != OpMIPS64MOVHload { 4686 break 4687 } 4688 _ = x.Args[1] 4689 v.reset(OpMIPS64MOVVreg) 4690 v.AddArg(x) 4691 return true 4692 } 4693 // match: (MOVHreg x:(MOVBreg _)) 4694 // cond: 4695 // result: (MOVVreg x) 4696 for { 4697 x := v.Args[0] 4698 if x.Op != OpMIPS64MOVBreg { 4699 break 4700 } 4701 v.reset(OpMIPS64MOVVreg) 4702 v.AddArg(x) 4703 return true 4704 } 4705 // match: (MOVHreg x:(MOVBUreg _)) 4706 // cond: 4707 // result: (MOVVreg x) 4708 for { 4709 x := v.Args[0] 4710 if x.Op != OpMIPS64MOVBUreg { 4711 break 4712 } 4713 v.reset(OpMIPS64MOVVreg) 4714 v.AddArg(x) 4715 return true 4716 } 4717 // match: (MOVHreg x:(MOVHreg _)) 4718 // cond: 4719 // result: (MOVVreg x) 4720 for { 4721 x := v.Args[0] 4722 if x.Op != OpMIPS64MOVHreg { 4723 break 4724 } 4725 v.reset(OpMIPS64MOVVreg) 4726 v.AddArg(x) 4727 return true 4728 } 4729 // match: (MOVHreg (MOVVconst [c])) 4730 // cond: 4731 // result: (MOVVconst [int64(int16(c))]) 4732 for { 4733 v_0 := v.Args[0] 4734 if v_0.Op != OpMIPS64MOVVconst { 4735 break 4736 } 4737 c := v_0.AuxInt 4738 v.reset(OpMIPS64MOVVconst) 4739 v.AuxInt = int64(int16(c)) 4740 return true 4741 } 4742 return false 4743 } 4744 func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool { 4745 // match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4746 // cond: is32Bit(off1+off2) 4747 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4748 for { 4749 off1 := v.AuxInt 4750 sym := v.Aux 4751 _ = v.Args[2] 4752 v_0 := v.Args[0] 4753 if v_0.Op != OpMIPS64ADDVconst { 4754 break 4755 } 4756 off2 := v_0.AuxInt 4757 ptr := v_0.Args[0] 4758 val := v.Args[1] 4759 mem := v.Args[2] 4760 if !(is32Bit(off1 + off2)) { 4761 break 4762 } 4763 v.reset(OpMIPS64MOVHstore) 4764 v.AuxInt = off1 + off2 4765 v.Aux = sym 4766 v.AddArg(ptr) 4767 v.AddArg(val) 4768 v.AddArg(mem) 4769 return true 4770 } 4771 // match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4772 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4773 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4774 for { 4775 off1 := v.AuxInt 4776 sym1 := v.Aux 4777 _ = v.Args[2] 4778 v_0 := v.Args[0] 4779 if v_0.Op != OpMIPS64MOVVaddr { 4780 break 4781 } 4782 off2 := v_0.AuxInt 4783 sym2 := v_0.Aux 4784 ptr := v_0.Args[0] 4785 val := v.Args[1] 4786 mem := v.Args[2] 4787 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4788 break 4789 } 4790 v.reset(OpMIPS64MOVHstore) 4791 v.AuxInt = off1 + off2 4792 v.Aux = mergeSym(sym1, sym2) 4793 v.AddArg(ptr) 4794 v.AddArg(val) 4795 v.AddArg(mem) 4796 return true 4797 } 4798 // match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem) 4799 // cond: 4800 // result: (MOVHstorezero [off] {sym} ptr mem) 4801 for { 4802 off := v.AuxInt 4803 sym := v.Aux 4804 _ = v.Args[2] 4805 ptr := v.Args[0] 4806 v_1 := v.Args[1] 4807 if v_1.Op != OpMIPS64MOVVconst { 4808 break 4809 } 4810 if v_1.AuxInt != 0 { 4811 break 4812 } 4813 mem := v.Args[2] 4814 v.reset(OpMIPS64MOVHstorezero) 4815 v.AuxInt = off 4816 v.Aux = sym 4817 v.AddArg(ptr) 4818 v.AddArg(mem) 4819 return true 4820 } 4821 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4822 // cond: 4823 // result: (MOVHstore [off] {sym} ptr x mem) 4824 for { 4825 off := v.AuxInt 4826 sym := v.Aux 4827 _ = v.Args[2] 4828 ptr := v.Args[0] 4829 v_1 := v.Args[1] 4830 if v_1.Op != OpMIPS64MOVHreg { 4831 break 4832 } 4833 x := v_1.Args[0] 4834 mem := v.Args[2] 4835 v.reset(OpMIPS64MOVHstore) 4836 v.AuxInt = off 4837 v.Aux = sym 4838 v.AddArg(ptr) 4839 v.AddArg(x) 4840 v.AddArg(mem) 4841 return true 4842 } 4843 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4844 // cond: 4845 // result: (MOVHstore [off] {sym} ptr x mem) 4846 for { 4847 off := v.AuxInt 4848 sym := v.Aux 4849 _ = v.Args[2] 4850 ptr := v.Args[0] 4851 v_1 := v.Args[1] 4852 if v_1.Op != OpMIPS64MOVHUreg { 4853 break 4854 } 4855 x := v_1.Args[0] 4856 mem := v.Args[2] 4857 v.reset(OpMIPS64MOVHstore) 4858 v.AuxInt = off 4859 v.Aux = sym 4860 v.AddArg(ptr) 4861 v.AddArg(x) 4862 v.AddArg(mem) 4863 return true 4864 } 4865 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4866 // cond: 4867 // result: (MOVHstore [off] {sym} ptr x mem) 4868 for { 4869 off := v.AuxInt 4870 sym := v.Aux 4871 _ = v.Args[2] 4872 ptr := v.Args[0] 4873 v_1 := v.Args[1] 4874 if v_1.Op != OpMIPS64MOVWreg { 4875 break 4876 } 4877 x := v_1.Args[0] 4878 mem := v.Args[2] 4879 v.reset(OpMIPS64MOVHstore) 4880 v.AuxInt = off 4881 v.Aux = sym 4882 v.AddArg(ptr) 4883 v.AddArg(x) 4884 v.AddArg(mem) 4885 return true 4886 } 4887 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4888 // cond: 4889 // result: (MOVHstore [off] {sym} ptr x mem) 4890 for { 4891 off := v.AuxInt 4892 sym := v.Aux 4893 _ = v.Args[2] 4894 ptr := v.Args[0] 4895 v_1 := v.Args[1] 4896 if v_1.Op != OpMIPS64MOVWUreg { 4897 break 4898 } 4899 x := v_1.Args[0] 4900 mem := v.Args[2] 4901 v.reset(OpMIPS64MOVHstore) 4902 v.AuxInt = off 4903 v.Aux = sym 4904 v.AddArg(ptr) 4905 v.AddArg(x) 4906 v.AddArg(mem) 4907 return true 4908 } 4909 return false 4910 } 4911 func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool { 4912 // match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 4913 // cond: is32Bit(off1+off2) 4914 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4915 for { 4916 off1 := v.AuxInt 4917 sym := v.Aux 4918 _ = v.Args[1] 4919 v_0 := v.Args[0] 4920 if v_0.Op != OpMIPS64ADDVconst { 4921 break 4922 } 4923 off2 := v_0.AuxInt 4924 ptr := v_0.Args[0] 4925 mem := v.Args[1] 4926 if !(is32Bit(off1 + off2)) { 4927 break 4928 } 4929 v.reset(OpMIPS64MOVHstorezero) 4930 v.AuxInt = off1 + off2 4931 v.Aux = sym 4932 v.AddArg(ptr) 4933 v.AddArg(mem) 4934 return true 4935 } 4936 // match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4937 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4938 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4939 for { 4940 off1 := v.AuxInt 4941 sym1 := v.Aux 4942 _ = v.Args[1] 4943 v_0 := v.Args[0] 4944 if v_0.Op != OpMIPS64MOVVaddr { 4945 break 4946 } 4947 off2 := v_0.AuxInt 4948 sym2 := v_0.Aux 4949 ptr := v_0.Args[0] 4950 mem := v.Args[1] 4951 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4952 break 4953 } 4954 v.reset(OpMIPS64MOVHstorezero) 4955 v.AuxInt = off1 + off2 4956 v.Aux = mergeSym(sym1, sym2) 4957 v.AddArg(ptr) 4958 v.AddArg(mem) 4959 return true 4960 } 4961 return false 4962 } 4963 func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool { 4964 // match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem) 4965 // cond: is32Bit(off1+off2) 4966 // result: (MOVVload [off1+off2] {sym} ptr mem) 4967 for { 4968 off1 := v.AuxInt 4969 sym := v.Aux 4970 _ = v.Args[1] 4971 v_0 := v.Args[0] 4972 if v_0.Op != OpMIPS64ADDVconst { 4973 break 4974 } 4975 off2 := v_0.AuxInt 4976 ptr := v_0.Args[0] 4977 mem := v.Args[1] 4978 if !(is32Bit(off1 + off2)) { 4979 break 4980 } 4981 v.reset(OpMIPS64MOVVload) 4982 v.AuxInt = off1 + off2 4983 v.Aux = sym 4984 v.AddArg(ptr) 4985 v.AddArg(mem) 4986 return true 4987 } 4988 // match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4989 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4990 // result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4991 for { 4992 off1 := v.AuxInt 4993 sym1 := v.Aux 4994 _ = v.Args[1] 4995 v_0 := v.Args[0] 4996 if v_0.Op != OpMIPS64MOVVaddr { 4997 break 4998 } 4999 off2 := v_0.AuxInt 5000 sym2 := v_0.Aux 5001 ptr := v_0.Args[0] 5002 mem := v.Args[1] 5003 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5004 break 5005 } 5006 v.reset(OpMIPS64MOVVload) 5007 v.AuxInt = off1 + off2 5008 v.Aux = mergeSym(sym1, sym2) 5009 v.AddArg(ptr) 5010 v.AddArg(mem) 5011 return true 5012 } 5013 return false 5014 } 5015 func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool { 5016 // match: (MOVVreg x) 5017 // cond: x.Uses == 1 5018 // result: (MOVVnop x) 5019 for { 5020 x := v.Args[0] 5021 if !(x.Uses == 1) { 5022 break 5023 } 5024 v.reset(OpMIPS64MOVVnop) 5025 v.AddArg(x) 5026 return true 5027 } 5028 // match: (MOVVreg (MOVVconst [c])) 5029 // cond: 5030 // result: (MOVVconst [c]) 5031 for { 5032 v_0 := v.Args[0] 5033 if v_0.Op != OpMIPS64MOVVconst { 5034 break 5035 } 5036 c := v_0.AuxInt 5037 v.reset(OpMIPS64MOVVconst) 5038 v.AuxInt = c 5039 return true 5040 } 5041 return false 5042 } 5043 func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool { 5044 // match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 5045 // cond: is32Bit(off1+off2) 5046 // result: (MOVVstore [off1+off2] {sym} ptr val mem) 5047 for { 5048 off1 := v.AuxInt 5049 sym := v.Aux 5050 _ = v.Args[2] 5051 v_0 := v.Args[0] 5052 if v_0.Op != OpMIPS64ADDVconst { 5053 break 5054 } 5055 off2 := v_0.AuxInt 5056 ptr := v_0.Args[0] 5057 val := v.Args[1] 5058 mem := v.Args[2] 5059 if !(is32Bit(off1 + off2)) { 5060 break 5061 } 5062 v.reset(OpMIPS64MOVVstore) 5063 v.AuxInt = off1 + off2 5064 v.Aux = sym 5065 v.AddArg(ptr) 5066 v.AddArg(val) 5067 v.AddArg(mem) 5068 return true 5069 } 5070 // match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 5071 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5072 // result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5073 for { 5074 off1 := v.AuxInt 5075 sym1 := v.Aux 5076 _ = v.Args[2] 5077 v_0 := v.Args[0] 5078 if v_0.Op != OpMIPS64MOVVaddr { 5079 break 5080 } 5081 off2 := v_0.AuxInt 5082 sym2 := v_0.Aux 5083 ptr := v_0.Args[0] 5084 val := v.Args[1] 5085 mem := v.Args[2] 5086 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5087 break 5088 } 5089 v.reset(OpMIPS64MOVVstore) 5090 v.AuxInt = off1 + off2 5091 v.Aux = mergeSym(sym1, sym2) 5092 v.AddArg(ptr) 5093 v.AddArg(val) 5094 v.AddArg(mem) 5095 return true 5096 } 5097 // match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem) 5098 // cond: 5099 // result: (MOVVstorezero [off] {sym} ptr mem) 5100 for { 5101 off := v.AuxInt 5102 sym := v.Aux 5103 _ = v.Args[2] 5104 ptr := v.Args[0] 5105 v_1 := v.Args[1] 5106 if v_1.Op != OpMIPS64MOVVconst { 5107 break 5108 } 5109 if v_1.AuxInt != 0 { 5110 break 5111 } 5112 mem := v.Args[2] 5113 v.reset(OpMIPS64MOVVstorezero) 5114 v.AuxInt = off 5115 v.Aux = sym 5116 v.AddArg(ptr) 5117 v.AddArg(mem) 5118 return true 5119 } 5120 return false 5121 } 5122 func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool { 5123 // match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 5124 // cond: is32Bit(off1+off2) 5125 // result: (MOVVstorezero [off1+off2] {sym} ptr mem) 5126 for { 5127 off1 := v.AuxInt 5128 sym := v.Aux 5129 _ = v.Args[1] 5130 v_0 := v.Args[0] 5131 if v_0.Op != OpMIPS64ADDVconst { 5132 break 5133 } 5134 off2 := v_0.AuxInt 5135 ptr := v_0.Args[0] 5136 mem := v.Args[1] 5137 if !(is32Bit(off1 + off2)) { 5138 break 5139 } 5140 v.reset(OpMIPS64MOVVstorezero) 5141 v.AuxInt = off1 + off2 5142 v.Aux = sym 5143 v.AddArg(ptr) 5144 v.AddArg(mem) 5145 return true 5146 } 5147 // match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5148 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5149 // result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5150 for { 5151 off1 := v.AuxInt 5152 sym1 := v.Aux 5153 _ = v.Args[1] 5154 v_0 := v.Args[0] 5155 if v_0.Op != OpMIPS64MOVVaddr { 5156 break 5157 } 5158 off2 := v_0.AuxInt 5159 sym2 := v_0.Aux 5160 ptr := v_0.Args[0] 5161 mem := v.Args[1] 5162 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5163 break 5164 } 5165 v.reset(OpMIPS64MOVVstorezero) 5166 v.AuxInt = off1 + off2 5167 v.Aux = mergeSym(sym1, sym2) 5168 v.AddArg(ptr) 5169 v.AddArg(mem) 5170 return true 5171 } 5172 return false 5173 } 5174 func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool { 5175 // match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem) 5176 // cond: is32Bit(off1+off2) 5177 // result: (MOVWUload [off1+off2] {sym} ptr mem) 5178 for { 5179 off1 := v.AuxInt 5180 sym := v.Aux 5181 _ = v.Args[1] 5182 v_0 := v.Args[0] 5183 if v_0.Op != OpMIPS64ADDVconst { 5184 break 5185 } 5186 off2 := v_0.AuxInt 5187 ptr := v_0.Args[0] 5188 mem := v.Args[1] 5189 if !(is32Bit(off1 + off2)) { 5190 break 5191 } 5192 v.reset(OpMIPS64MOVWUload) 5193 v.AuxInt = off1 + off2 5194 v.Aux = sym 5195 v.AddArg(ptr) 5196 v.AddArg(mem) 5197 return true 5198 } 5199 // match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5200 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5201 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5202 for { 5203 off1 := v.AuxInt 5204 sym1 := v.Aux 5205 _ = v.Args[1] 5206 v_0 := v.Args[0] 5207 if v_0.Op != OpMIPS64MOVVaddr { 5208 break 5209 } 5210 off2 := v_0.AuxInt 5211 sym2 := v_0.Aux 5212 ptr := v_0.Args[0] 5213 mem := v.Args[1] 5214 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5215 break 5216 } 5217 v.reset(OpMIPS64MOVWUload) 5218 v.AuxInt = off1 + off2 5219 v.Aux = mergeSym(sym1, sym2) 5220 v.AddArg(ptr) 5221 v.AddArg(mem) 5222 return true 5223 } 5224 return false 5225 } 5226 func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool { 5227 // match: (MOVWUreg x:(MOVBUload _ _)) 5228 // cond: 5229 // result: (MOVVreg x) 5230 for { 5231 x := v.Args[0] 5232 if x.Op != OpMIPS64MOVBUload { 5233 break 5234 } 5235 _ = x.Args[1] 5236 v.reset(OpMIPS64MOVVreg) 5237 v.AddArg(x) 5238 return true 5239 } 5240 // match: (MOVWUreg x:(MOVHUload _ _)) 5241 // cond: 5242 // result: (MOVVreg x) 5243 for { 5244 x := v.Args[0] 5245 if x.Op != OpMIPS64MOVHUload { 5246 break 5247 } 5248 _ = x.Args[1] 5249 v.reset(OpMIPS64MOVVreg) 5250 v.AddArg(x) 5251 return true 5252 } 5253 // match: (MOVWUreg x:(MOVWUload _ _)) 5254 // cond: 5255 // result: (MOVVreg x) 5256 for { 5257 x := v.Args[0] 5258 if x.Op != OpMIPS64MOVWUload { 5259 break 5260 } 5261 _ = x.Args[1] 5262 v.reset(OpMIPS64MOVVreg) 5263 v.AddArg(x) 5264 return true 5265 } 5266 // match: (MOVWUreg x:(MOVBUreg _)) 5267 // cond: 5268 // result: (MOVVreg x) 5269 for { 5270 x := v.Args[0] 5271 if x.Op != OpMIPS64MOVBUreg { 5272 break 5273 } 5274 v.reset(OpMIPS64MOVVreg) 5275 v.AddArg(x) 5276 return true 5277 } 5278 // match: (MOVWUreg x:(MOVHUreg _)) 5279 // cond: 5280 // result: (MOVVreg x) 5281 for { 5282 x := v.Args[0] 5283 if x.Op != OpMIPS64MOVHUreg { 5284 break 5285 } 5286 v.reset(OpMIPS64MOVVreg) 5287 v.AddArg(x) 5288 return true 5289 } 5290 // match: (MOVWUreg x:(MOVWUreg _)) 5291 // cond: 5292 // result: (MOVVreg x) 5293 for { 5294 x := v.Args[0] 5295 if x.Op != OpMIPS64MOVWUreg { 5296 break 5297 } 5298 v.reset(OpMIPS64MOVVreg) 5299 v.AddArg(x) 5300 return true 5301 } 5302 // match: (MOVWUreg (MOVVconst [c])) 5303 // cond: 5304 // result: (MOVVconst [int64(uint32(c))]) 5305 for { 5306 v_0 := v.Args[0] 5307 if v_0.Op != OpMIPS64MOVVconst { 5308 break 5309 } 5310 c := v_0.AuxInt 5311 v.reset(OpMIPS64MOVVconst) 5312 v.AuxInt = int64(uint32(c)) 5313 return true 5314 } 5315 return false 5316 } 5317 func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool { 5318 // match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem) 5319 // cond: is32Bit(off1+off2) 5320 // result: (MOVWload [off1+off2] {sym} ptr mem) 5321 for { 5322 off1 := v.AuxInt 5323 sym := v.Aux 5324 _ = v.Args[1] 5325 v_0 := v.Args[0] 5326 if v_0.Op != OpMIPS64ADDVconst { 5327 break 5328 } 5329 off2 := v_0.AuxInt 5330 ptr := v_0.Args[0] 5331 mem := v.Args[1] 5332 if !(is32Bit(off1 + off2)) { 5333 break 5334 } 5335 v.reset(OpMIPS64MOVWload) 5336 v.AuxInt = off1 + off2 5337 v.Aux = sym 5338 v.AddArg(ptr) 5339 v.AddArg(mem) 5340 return true 5341 } 5342 // match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5343 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5344 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5345 for { 5346 off1 := v.AuxInt 5347 sym1 := v.Aux 5348 _ = v.Args[1] 5349 v_0 := v.Args[0] 5350 if v_0.Op != OpMIPS64MOVVaddr { 5351 break 5352 } 5353 off2 := v_0.AuxInt 5354 sym2 := v_0.Aux 5355 ptr := v_0.Args[0] 5356 mem := v.Args[1] 5357 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5358 break 5359 } 5360 v.reset(OpMIPS64MOVWload) 5361 v.AuxInt = off1 + off2 5362 v.Aux = mergeSym(sym1, sym2) 5363 v.AddArg(ptr) 5364 v.AddArg(mem) 5365 return true 5366 } 5367 return false 5368 } 5369 func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool { 5370 // match: (MOVWreg x:(MOVBload _ _)) 5371 // cond: 5372 // result: (MOVVreg x) 5373 for { 5374 x := v.Args[0] 5375 if x.Op != OpMIPS64MOVBload { 5376 break 5377 } 5378 _ = x.Args[1] 5379 v.reset(OpMIPS64MOVVreg) 5380 v.AddArg(x) 5381 return true 5382 } 5383 // match: (MOVWreg x:(MOVBUload _ _)) 5384 // cond: 5385 // result: (MOVVreg x) 5386 for { 5387 x := v.Args[0] 5388 if x.Op != OpMIPS64MOVBUload { 5389 break 5390 } 5391 _ = x.Args[1] 5392 v.reset(OpMIPS64MOVVreg) 5393 v.AddArg(x) 5394 return true 5395 } 5396 // match: (MOVWreg x:(MOVHload _ _)) 5397 // cond: 5398 // result: (MOVVreg x) 5399 for { 5400 x := v.Args[0] 5401 if x.Op != OpMIPS64MOVHload { 5402 break 5403 } 5404 _ = x.Args[1] 5405 v.reset(OpMIPS64MOVVreg) 5406 v.AddArg(x) 5407 return true 5408 } 5409 // match: (MOVWreg x:(MOVHUload _ _)) 5410 // cond: 5411 // result: (MOVVreg x) 5412 for { 5413 x := v.Args[0] 5414 if x.Op != OpMIPS64MOVHUload { 5415 break 5416 } 5417 _ = x.Args[1] 5418 v.reset(OpMIPS64MOVVreg) 5419 v.AddArg(x) 5420 return true 5421 } 5422 // match: (MOVWreg x:(MOVWload _ _)) 5423 // cond: 5424 // result: (MOVVreg x) 5425 for { 5426 x := v.Args[0] 5427 if x.Op != OpMIPS64MOVWload { 5428 break 5429 } 5430 _ = x.Args[1] 5431 v.reset(OpMIPS64MOVVreg) 5432 v.AddArg(x) 5433 return true 5434 } 5435 // match: (MOVWreg x:(MOVBreg _)) 5436 // cond: 5437 // result: (MOVVreg x) 5438 for { 5439 x := v.Args[0] 5440 if x.Op != OpMIPS64MOVBreg { 5441 break 5442 } 5443 v.reset(OpMIPS64MOVVreg) 5444 v.AddArg(x) 5445 return true 5446 } 5447 // match: (MOVWreg x:(MOVBUreg _)) 5448 // cond: 5449 // result: (MOVVreg x) 5450 for { 5451 x := v.Args[0] 5452 if x.Op != OpMIPS64MOVBUreg { 5453 break 5454 } 5455 v.reset(OpMIPS64MOVVreg) 5456 v.AddArg(x) 5457 return true 5458 } 5459 // match: (MOVWreg x:(MOVHreg _)) 5460 // cond: 5461 // result: (MOVVreg x) 5462 for { 5463 x := v.Args[0] 5464 if x.Op != OpMIPS64MOVHreg { 5465 break 5466 } 5467 v.reset(OpMIPS64MOVVreg) 5468 v.AddArg(x) 5469 return true 5470 } 5471 // match: (MOVWreg x:(MOVHreg _)) 5472 // cond: 5473 // result: (MOVVreg x) 5474 for { 5475 x := v.Args[0] 5476 if x.Op != OpMIPS64MOVHreg { 5477 break 5478 } 5479 v.reset(OpMIPS64MOVVreg) 5480 v.AddArg(x) 5481 return true 5482 } 5483 // match: (MOVWreg x:(MOVWreg _)) 5484 // cond: 5485 // result: (MOVVreg x) 5486 for { 5487 x := v.Args[0] 5488 if x.Op != OpMIPS64MOVWreg { 5489 break 5490 } 5491 v.reset(OpMIPS64MOVVreg) 5492 v.AddArg(x) 5493 return true 5494 } 5495 return false 5496 } 5497 func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool { 5498 // match: (MOVWreg (MOVVconst [c])) 5499 // cond: 5500 // result: (MOVVconst [int64(int32(c))]) 5501 for { 5502 v_0 := v.Args[0] 5503 if v_0.Op != OpMIPS64MOVVconst { 5504 break 5505 } 5506 c := v_0.AuxInt 5507 v.reset(OpMIPS64MOVVconst) 5508 v.AuxInt = int64(int32(c)) 5509 return true 5510 } 5511 return false 5512 } 5513 func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool { 5514 // match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 5515 // cond: is32Bit(off1+off2) 5516 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5517 for { 5518 off1 := v.AuxInt 5519 sym := v.Aux 5520 _ = v.Args[2] 5521 v_0 := v.Args[0] 5522 if v_0.Op != OpMIPS64ADDVconst { 5523 break 5524 } 5525 off2 := v_0.AuxInt 5526 ptr := v_0.Args[0] 5527 val := v.Args[1] 5528 mem := v.Args[2] 5529 if !(is32Bit(off1 + off2)) { 5530 break 5531 } 5532 v.reset(OpMIPS64MOVWstore) 5533 v.AuxInt = off1 + off2 5534 v.Aux = sym 5535 v.AddArg(ptr) 5536 v.AddArg(val) 5537 v.AddArg(mem) 5538 return true 5539 } 5540 // match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 5541 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5542 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5543 for { 5544 off1 := v.AuxInt 5545 sym1 := v.Aux 5546 _ = v.Args[2] 5547 v_0 := v.Args[0] 5548 if v_0.Op != OpMIPS64MOVVaddr { 5549 break 5550 } 5551 off2 := v_0.AuxInt 5552 sym2 := v_0.Aux 5553 ptr := v_0.Args[0] 5554 val := v.Args[1] 5555 mem := v.Args[2] 5556 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5557 break 5558 } 5559 v.reset(OpMIPS64MOVWstore) 5560 v.AuxInt = off1 + off2 5561 v.Aux = mergeSym(sym1, sym2) 5562 v.AddArg(ptr) 5563 v.AddArg(val) 5564 v.AddArg(mem) 5565 return true 5566 } 5567 // match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem) 5568 // cond: 5569 // result: (MOVWstorezero [off] {sym} ptr mem) 5570 for { 5571 off := v.AuxInt 5572 sym := v.Aux 5573 _ = v.Args[2] 5574 ptr := v.Args[0] 5575 v_1 := v.Args[1] 5576 if v_1.Op != OpMIPS64MOVVconst { 5577 break 5578 } 5579 if v_1.AuxInt != 0 { 5580 break 5581 } 5582 mem := v.Args[2] 5583 v.reset(OpMIPS64MOVWstorezero) 5584 v.AuxInt = off 5585 v.Aux = sym 5586 v.AddArg(ptr) 5587 v.AddArg(mem) 5588 return true 5589 } 5590 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5591 // cond: 5592 // result: (MOVWstore [off] {sym} ptr x mem) 5593 for { 5594 off := v.AuxInt 5595 sym := v.Aux 5596 _ = v.Args[2] 5597 ptr := v.Args[0] 5598 v_1 := v.Args[1] 5599 if v_1.Op != OpMIPS64MOVWreg { 5600 break 5601 } 5602 x := v_1.Args[0] 5603 mem := v.Args[2] 5604 v.reset(OpMIPS64MOVWstore) 5605 v.AuxInt = off 5606 v.Aux = sym 5607 v.AddArg(ptr) 5608 v.AddArg(x) 5609 v.AddArg(mem) 5610 return true 5611 } 5612 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5613 // cond: 5614 // result: (MOVWstore [off] {sym} ptr x mem) 5615 for { 5616 off := v.AuxInt 5617 sym := v.Aux 5618 _ = v.Args[2] 5619 ptr := v.Args[0] 5620 v_1 := v.Args[1] 5621 if v_1.Op != OpMIPS64MOVWUreg { 5622 break 5623 } 5624 x := v_1.Args[0] 5625 mem := v.Args[2] 5626 v.reset(OpMIPS64MOVWstore) 5627 v.AuxInt = off 5628 v.Aux = sym 5629 v.AddArg(ptr) 5630 v.AddArg(x) 5631 v.AddArg(mem) 5632 return true 5633 } 5634 return false 5635 } 5636 func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool { 5637 // match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 5638 // cond: is32Bit(off1+off2) 5639 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5640 for { 5641 off1 := v.AuxInt 5642 sym := v.Aux 5643 _ = v.Args[1] 5644 v_0 := v.Args[0] 5645 if v_0.Op != OpMIPS64ADDVconst { 5646 break 5647 } 5648 off2 := v_0.AuxInt 5649 ptr := v_0.Args[0] 5650 mem := v.Args[1] 5651 if !(is32Bit(off1 + off2)) { 5652 break 5653 } 5654 v.reset(OpMIPS64MOVWstorezero) 5655 v.AuxInt = off1 + off2 5656 v.Aux = sym 5657 v.AddArg(ptr) 5658 v.AddArg(mem) 5659 return true 5660 } 5661 // match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5662 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5663 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5664 for { 5665 off1 := v.AuxInt 5666 sym1 := v.Aux 5667 _ = v.Args[1] 5668 v_0 := v.Args[0] 5669 if v_0.Op != OpMIPS64MOVVaddr { 5670 break 5671 } 5672 off2 := v_0.AuxInt 5673 sym2 := v_0.Aux 5674 ptr := v_0.Args[0] 5675 mem := v.Args[1] 5676 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5677 break 5678 } 5679 v.reset(OpMIPS64MOVWstorezero) 5680 v.AuxInt = off1 + off2 5681 v.Aux = mergeSym(sym1, sym2) 5682 v.AddArg(ptr) 5683 v.AddArg(mem) 5684 return true 5685 } 5686 return false 5687 } 5688 func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool { 5689 // match: (NEGV (MOVVconst [c])) 5690 // cond: 5691 // result: (MOVVconst [-c]) 5692 for { 5693 v_0 := v.Args[0] 5694 if v_0.Op != OpMIPS64MOVVconst { 5695 break 5696 } 5697 c := v_0.AuxInt 5698 v.reset(OpMIPS64MOVVconst) 5699 v.AuxInt = -c 5700 return true 5701 } 5702 return false 5703 } 5704 func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool { 5705 // match: (NOR x (MOVVconst [c])) 5706 // cond: is32Bit(c) 5707 // result: (NORconst [c] x) 5708 for { 5709 _ = v.Args[1] 5710 x := v.Args[0] 5711 v_1 := v.Args[1] 5712 if v_1.Op != OpMIPS64MOVVconst { 5713 break 5714 } 5715 c := v_1.AuxInt 5716 if !(is32Bit(c)) { 5717 break 5718 } 5719 v.reset(OpMIPS64NORconst) 5720 v.AuxInt = c 5721 v.AddArg(x) 5722 return true 5723 } 5724 // match: (NOR (MOVVconst [c]) x) 5725 // cond: is32Bit(c) 5726 // result: (NORconst [c] x) 5727 for { 5728 _ = v.Args[1] 5729 v_0 := v.Args[0] 5730 if v_0.Op != OpMIPS64MOVVconst { 5731 break 5732 } 5733 c := v_0.AuxInt 5734 x := v.Args[1] 5735 if !(is32Bit(c)) { 5736 break 5737 } 5738 v.reset(OpMIPS64NORconst) 5739 v.AuxInt = c 5740 v.AddArg(x) 5741 return true 5742 } 5743 return false 5744 } 5745 func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool { 5746 // match: (NORconst [c] (MOVVconst [d])) 5747 // cond: 5748 // result: (MOVVconst [^(c|d)]) 5749 for { 5750 c := v.AuxInt 5751 v_0 := v.Args[0] 5752 if v_0.Op != OpMIPS64MOVVconst { 5753 break 5754 } 5755 d := v_0.AuxInt 5756 v.reset(OpMIPS64MOVVconst) 5757 v.AuxInt = ^(c | d) 5758 return true 5759 } 5760 return false 5761 } 5762 func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool { 5763 // match: (OR x (MOVVconst [c])) 5764 // cond: is32Bit(c) 5765 // result: (ORconst [c] x) 5766 for { 5767 _ = v.Args[1] 5768 x := v.Args[0] 5769 v_1 := v.Args[1] 5770 if v_1.Op != OpMIPS64MOVVconst { 5771 break 5772 } 5773 c := v_1.AuxInt 5774 if !(is32Bit(c)) { 5775 break 5776 } 5777 v.reset(OpMIPS64ORconst) 5778 v.AuxInt = c 5779 v.AddArg(x) 5780 return true 5781 } 5782 // match: (OR (MOVVconst [c]) x) 5783 // cond: is32Bit(c) 5784 // result: (ORconst [c] x) 5785 for { 5786 _ = v.Args[1] 5787 v_0 := v.Args[0] 5788 if v_0.Op != OpMIPS64MOVVconst { 5789 break 5790 } 5791 c := v_0.AuxInt 5792 x := v.Args[1] 5793 if !(is32Bit(c)) { 5794 break 5795 } 5796 v.reset(OpMIPS64ORconst) 5797 v.AuxInt = c 5798 v.AddArg(x) 5799 return true 5800 } 5801 // match: (OR x x) 5802 // cond: 5803 // result: x 5804 for { 5805 _ = v.Args[1] 5806 x := v.Args[0] 5807 if x != v.Args[1] { 5808 break 5809 } 5810 v.reset(OpCopy) 5811 v.Type = x.Type 5812 v.AddArg(x) 5813 return true 5814 } 5815 return false 5816 } 5817 func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool { 5818 // match: (ORconst [0] x) 5819 // cond: 5820 // result: x 5821 for { 5822 if v.AuxInt != 0 { 5823 break 5824 } 5825 x := v.Args[0] 5826 v.reset(OpCopy) 5827 v.Type = x.Type 5828 v.AddArg(x) 5829 return true 5830 } 5831 // match: (ORconst [-1] _) 5832 // cond: 5833 // result: (MOVVconst [-1]) 5834 for { 5835 if v.AuxInt != -1 { 5836 break 5837 } 5838 v.reset(OpMIPS64MOVVconst) 5839 v.AuxInt = -1 5840 return true 5841 } 5842 // match: (ORconst [c] (MOVVconst [d])) 5843 // cond: 5844 // result: (MOVVconst [c|d]) 5845 for { 5846 c := v.AuxInt 5847 v_0 := v.Args[0] 5848 if v_0.Op != OpMIPS64MOVVconst { 5849 break 5850 } 5851 d := v_0.AuxInt 5852 v.reset(OpMIPS64MOVVconst) 5853 v.AuxInt = c | d 5854 return true 5855 } 5856 // match: (ORconst [c] (ORconst [d] x)) 5857 // cond: is32Bit(c|d) 5858 // result: (ORconst [c|d] x) 5859 for { 5860 c := v.AuxInt 5861 v_0 := v.Args[0] 5862 if v_0.Op != OpMIPS64ORconst { 5863 break 5864 } 5865 d := v_0.AuxInt 5866 x := v_0.Args[0] 5867 if !(is32Bit(c | d)) { 5868 break 5869 } 5870 v.reset(OpMIPS64ORconst) 5871 v.AuxInt = c | d 5872 v.AddArg(x) 5873 return true 5874 } 5875 return false 5876 } 5877 func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool { 5878 // match: (SGT (MOVVconst [c]) x) 5879 // cond: is32Bit(c) 5880 // result: (SGTconst [c] x) 5881 for { 5882 _ = v.Args[1] 5883 v_0 := v.Args[0] 5884 if v_0.Op != OpMIPS64MOVVconst { 5885 break 5886 } 5887 c := v_0.AuxInt 5888 x := v.Args[1] 5889 if !(is32Bit(c)) { 5890 break 5891 } 5892 v.reset(OpMIPS64SGTconst) 5893 v.AuxInt = c 5894 v.AddArg(x) 5895 return true 5896 } 5897 return false 5898 } 5899 func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool { 5900 // match: (SGTU (MOVVconst [c]) x) 5901 // cond: is32Bit(c) 5902 // result: (SGTUconst [c] x) 5903 for { 5904 _ = v.Args[1] 5905 v_0 := v.Args[0] 5906 if v_0.Op != OpMIPS64MOVVconst { 5907 break 5908 } 5909 c := v_0.AuxInt 5910 x := v.Args[1] 5911 if !(is32Bit(c)) { 5912 break 5913 } 5914 v.reset(OpMIPS64SGTUconst) 5915 v.AuxInt = c 5916 v.AddArg(x) 5917 return true 5918 } 5919 return false 5920 } 5921 func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool { 5922 // match: (SGTUconst [c] (MOVVconst [d])) 5923 // cond: uint64(c)>uint64(d) 5924 // result: (MOVVconst [1]) 5925 for { 5926 c := v.AuxInt 5927 v_0 := v.Args[0] 5928 if v_0.Op != OpMIPS64MOVVconst { 5929 break 5930 } 5931 d := v_0.AuxInt 5932 if !(uint64(c) > uint64(d)) { 5933 break 5934 } 5935 v.reset(OpMIPS64MOVVconst) 5936 v.AuxInt = 1 5937 return true 5938 } 5939 // match: (SGTUconst [c] (MOVVconst [d])) 5940 // cond: uint64(c)<=uint64(d) 5941 // result: (MOVVconst [0]) 5942 for { 5943 c := v.AuxInt 5944 v_0 := v.Args[0] 5945 if v_0.Op != OpMIPS64MOVVconst { 5946 break 5947 } 5948 d := v_0.AuxInt 5949 if !(uint64(c) <= uint64(d)) { 5950 break 5951 } 5952 v.reset(OpMIPS64MOVVconst) 5953 v.AuxInt = 0 5954 return true 5955 } 5956 // match: (SGTUconst [c] (MOVBUreg _)) 5957 // cond: 0xff < uint64(c) 5958 // result: (MOVVconst [1]) 5959 for { 5960 c := v.AuxInt 5961 v_0 := v.Args[0] 5962 if v_0.Op != OpMIPS64MOVBUreg { 5963 break 5964 } 5965 if !(0xff < uint64(c)) { 5966 break 5967 } 5968 v.reset(OpMIPS64MOVVconst) 5969 v.AuxInt = 1 5970 return true 5971 } 5972 // match: (SGTUconst [c] (MOVHUreg _)) 5973 // cond: 0xffff < uint64(c) 5974 // result: (MOVVconst [1]) 5975 for { 5976 c := v.AuxInt 5977 v_0 := v.Args[0] 5978 if v_0.Op != OpMIPS64MOVHUreg { 5979 break 5980 } 5981 if !(0xffff < uint64(c)) { 5982 break 5983 } 5984 v.reset(OpMIPS64MOVVconst) 5985 v.AuxInt = 1 5986 return true 5987 } 5988 // match: (SGTUconst [c] (ANDconst [m] _)) 5989 // cond: uint64(m) < uint64(c) 5990 // result: (MOVVconst [1]) 5991 for { 5992 c := v.AuxInt 5993 v_0 := v.Args[0] 5994 if v_0.Op != OpMIPS64ANDconst { 5995 break 5996 } 5997 m := v_0.AuxInt 5998 if !(uint64(m) < uint64(c)) { 5999 break 6000 } 6001 v.reset(OpMIPS64MOVVconst) 6002 v.AuxInt = 1 6003 return true 6004 } 6005 // match: (SGTUconst [c] (SRLVconst _ [d])) 6006 // cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c) 6007 // result: (MOVVconst [1]) 6008 for { 6009 c := v.AuxInt 6010 v_0 := v.Args[0] 6011 if v_0.Op != OpMIPS64SRLVconst { 6012 break 6013 } 6014 d := v_0.AuxInt 6015 if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) { 6016 break 6017 } 6018 v.reset(OpMIPS64MOVVconst) 6019 v.AuxInt = 1 6020 return true 6021 } 6022 return false 6023 } 6024 func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool { 6025 // match: (SGTconst [c] (MOVVconst [d])) 6026 // cond: c>d 6027 // result: (MOVVconst [1]) 6028 for { 6029 c := v.AuxInt 6030 v_0 := v.Args[0] 6031 if v_0.Op != OpMIPS64MOVVconst { 6032 break 6033 } 6034 d := v_0.AuxInt 6035 if !(c > d) { 6036 break 6037 } 6038 v.reset(OpMIPS64MOVVconst) 6039 v.AuxInt = 1 6040 return true 6041 } 6042 // match: (SGTconst [c] (MOVVconst [d])) 6043 // cond: c<=d 6044 // result: (MOVVconst [0]) 6045 for { 6046 c := v.AuxInt 6047 v_0 := v.Args[0] 6048 if v_0.Op != OpMIPS64MOVVconst { 6049 break 6050 } 6051 d := v_0.AuxInt 6052 if !(c <= d) { 6053 break 6054 } 6055 v.reset(OpMIPS64MOVVconst) 6056 v.AuxInt = 0 6057 return true 6058 } 6059 // match: (SGTconst [c] (MOVBreg _)) 6060 // cond: 0x7f < c 6061 // result: (MOVVconst [1]) 6062 for { 6063 c := v.AuxInt 6064 v_0 := v.Args[0] 6065 if v_0.Op != OpMIPS64MOVBreg { 6066 break 6067 } 6068 if !(0x7f < c) { 6069 break 6070 } 6071 v.reset(OpMIPS64MOVVconst) 6072 v.AuxInt = 1 6073 return true 6074 } 6075 // match: (SGTconst [c] (MOVBreg _)) 6076 // cond: c <= -0x80 6077 // result: (MOVVconst [0]) 6078 for { 6079 c := v.AuxInt 6080 v_0 := v.Args[0] 6081 if v_0.Op != OpMIPS64MOVBreg { 6082 break 6083 } 6084 if !(c <= -0x80) { 6085 break 6086 } 6087 v.reset(OpMIPS64MOVVconst) 6088 v.AuxInt = 0 6089 return true 6090 } 6091 // match: (SGTconst [c] (MOVBUreg _)) 6092 // cond: 0xff < c 6093 // result: (MOVVconst [1]) 6094 for { 6095 c := v.AuxInt 6096 v_0 := v.Args[0] 6097 if v_0.Op != OpMIPS64MOVBUreg { 6098 break 6099 } 6100 if !(0xff < c) { 6101 break 6102 } 6103 v.reset(OpMIPS64MOVVconst) 6104 v.AuxInt = 1 6105 return true 6106 } 6107 // match: (SGTconst [c] (MOVBUreg _)) 6108 // cond: c < 0 6109 // result: (MOVVconst [0]) 6110 for { 6111 c := v.AuxInt 6112 v_0 := v.Args[0] 6113 if v_0.Op != OpMIPS64MOVBUreg { 6114 break 6115 } 6116 if !(c < 0) { 6117 break 6118 } 6119 v.reset(OpMIPS64MOVVconst) 6120 v.AuxInt = 0 6121 return true 6122 } 6123 // match: (SGTconst [c] (MOVHreg _)) 6124 // cond: 0x7fff < c 6125 // result: (MOVVconst [1]) 6126 for { 6127 c := v.AuxInt 6128 v_0 := v.Args[0] 6129 if v_0.Op != OpMIPS64MOVHreg { 6130 break 6131 } 6132 if !(0x7fff < c) { 6133 break 6134 } 6135 v.reset(OpMIPS64MOVVconst) 6136 v.AuxInt = 1 6137 return true 6138 } 6139 // match: (SGTconst [c] (MOVHreg _)) 6140 // cond: c <= -0x8000 6141 // result: (MOVVconst [0]) 6142 for { 6143 c := v.AuxInt 6144 v_0 := v.Args[0] 6145 if v_0.Op != OpMIPS64MOVHreg { 6146 break 6147 } 6148 if !(c <= -0x8000) { 6149 break 6150 } 6151 v.reset(OpMIPS64MOVVconst) 6152 v.AuxInt = 0 6153 return true 6154 } 6155 // match: (SGTconst [c] (MOVHUreg _)) 6156 // cond: 0xffff < c 6157 // result: (MOVVconst [1]) 6158 for { 6159 c := v.AuxInt 6160 v_0 := v.Args[0] 6161 if v_0.Op != OpMIPS64MOVHUreg { 6162 break 6163 } 6164 if !(0xffff < c) { 6165 break 6166 } 6167 v.reset(OpMIPS64MOVVconst) 6168 v.AuxInt = 1 6169 return true 6170 } 6171 // match: (SGTconst [c] (MOVHUreg _)) 6172 // cond: c < 0 6173 // result: (MOVVconst [0]) 6174 for { 6175 c := v.AuxInt 6176 v_0 := v.Args[0] 6177 if v_0.Op != OpMIPS64MOVHUreg { 6178 break 6179 } 6180 if !(c < 0) { 6181 break 6182 } 6183 v.reset(OpMIPS64MOVVconst) 6184 v.AuxInt = 0 6185 return true 6186 } 6187 return false 6188 } 6189 func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool { 6190 // match: (SGTconst [c] (MOVWUreg _)) 6191 // cond: c < 0 6192 // result: (MOVVconst [0]) 6193 for { 6194 c := v.AuxInt 6195 v_0 := v.Args[0] 6196 if v_0.Op != OpMIPS64MOVWUreg { 6197 break 6198 } 6199 if !(c < 0) { 6200 break 6201 } 6202 v.reset(OpMIPS64MOVVconst) 6203 v.AuxInt = 0 6204 return true 6205 } 6206 // match: (SGTconst [c] (ANDconst [m] _)) 6207 // cond: 0 <= m && m < c 6208 // result: (MOVVconst [1]) 6209 for { 6210 c := v.AuxInt 6211 v_0 := v.Args[0] 6212 if v_0.Op != OpMIPS64ANDconst { 6213 break 6214 } 6215 m := v_0.AuxInt 6216 if !(0 <= m && m < c) { 6217 break 6218 } 6219 v.reset(OpMIPS64MOVVconst) 6220 v.AuxInt = 1 6221 return true 6222 } 6223 // match: (SGTconst [c] (SRLVconst _ [d])) 6224 // cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c 6225 // result: (MOVVconst [1]) 6226 for { 6227 c := v.AuxInt 6228 v_0 := v.Args[0] 6229 if v_0.Op != OpMIPS64SRLVconst { 6230 break 6231 } 6232 d := v_0.AuxInt 6233 if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) { 6234 break 6235 } 6236 v.reset(OpMIPS64MOVVconst) 6237 v.AuxInt = 1 6238 return true 6239 } 6240 return false 6241 } 6242 func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool { 6243 // match: (SLLV _ (MOVVconst [c])) 6244 // cond: uint64(c)>=64 6245 // result: (MOVVconst [0]) 6246 for { 6247 _ = v.Args[1] 6248 v_1 := v.Args[1] 6249 if v_1.Op != OpMIPS64MOVVconst { 6250 break 6251 } 6252 c := v_1.AuxInt 6253 if !(uint64(c) >= 64) { 6254 break 6255 } 6256 v.reset(OpMIPS64MOVVconst) 6257 v.AuxInt = 0 6258 return true 6259 } 6260 // match: (SLLV x (MOVVconst [c])) 6261 // cond: 6262 // result: (SLLVconst x [c]) 6263 for { 6264 _ = v.Args[1] 6265 x := v.Args[0] 6266 v_1 := v.Args[1] 6267 if v_1.Op != OpMIPS64MOVVconst { 6268 break 6269 } 6270 c := v_1.AuxInt 6271 v.reset(OpMIPS64SLLVconst) 6272 v.AuxInt = c 6273 v.AddArg(x) 6274 return true 6275 } 6276 return false 6277 } 6278 func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool { 6279 // match: (SLLVconst [c] (MOVVconst [d])) 6280 // cond: 6281 // result: (MOVVconst [d<<uint64(c)]) 6282 for { 6283 c := v.AuxInt 6284 v_0 := v.Args[0] 6285 if v_0.Op != OpMIPS64MOVVconst { 6286 break 6287 } 6288 d := v_0.AuxInt 6289 v.reset(OpMIPS64MOVVconst) 6290 v.AuxInt = d << uint64(c) 6291 return true 6292 } 6293 return false 6294 } 6295 func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool { 6296 // match: (SRAV x (MOVVconst [c])) 6297 // cond: uint64(c)>=64 6298 // result: (SRAVconst x [63]) 6299 for { 6300 _ = v.Args[1] 6301 x := v.Args[0] 6302 v_1 := v.Args[1] 6303 if v_1.Op != OpMIPS64MOVVconst { 6304 break 6305 } 6306 c := v_1.AuxInt 6307 if !(uint64(c) >= 64) { 6308 break 6309 } 6310 v.reset(OpMIPS64SRAVconst) 6311 v.AuxInt = 63 6312 v.AddArg(x) 6313 return true 6314 } 6315 // match: (SRAV x (MOVVconst [c])) 6316 // cond: 6317 // result: (SRAVconst x [c]) 6318 for { 6319 _ = v.Args[1] 6320 x := v.Args[0] 6321 v_1 := v.Args[1] 6322 if v_1.Op != OpMIPS64MOVVconst { 6323 break 6324 } 6325 c := v_1.AuxInt 6326 v.reset(OpMIPS64SRAVconst) 6327 v.AuxInt = c 6328 v.AddArg(x) 6329 return true 6330 } 6331 return false 6332 } 6333 func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool { 6334 // match: (SRAVconst [c] (MOVVconst [d])) 6335 // cond: 6336 // result: (MOVVconst [d>>uint64(c)]) 6337 for { 6338 c := v.AuxInt 6339 v_0 := v.Args[0] 6340 if v_0.Op != OpMIPS64MOVVconst { 6341 break 6342 } 6343 d := v_0.AuxInt 6344 v.reset(OpMIPS64MOVVconst) 6345 v.AuxInt = d >> uint64(c) 6346 return true 6347 } 6348 return false 6349 } 6350 func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool { 6351 // match: (SRLV _ (MOVVconst [c])) 6352 // cond: uint64(c)>=64 6353 // result: (MOVVconst [0]) 6354 for { 6355 _ = v.Args[1] 6356 v_1 := v.Args[1] 6357 if v_1.Op != OpMIPS64MOVVconst { 6358 break 6359 } 6360 c := v_1.AuxInt 6361 if !(uint64(c) >= 64) { 6362 break 6363 } 6364 v.reset(OpMIPS64MOVVconst) 6365 v.AuxInt = 0 6366 return true 6367 } 6368 // match: (SRLV x (MOVVconst [c])) 6369 // cond: 6370 // result: (SRLVconst x [c]) 6371 for { 6372 _ = v.Args[1] 6373 x := v.Args[0] 6374 v_1 := v.Args[1] 6375 if v_1.Op != OpMIPS64MOVVconst { 6376 break 6377 } 6378 c := v_1.AuxInt 6379 v.reset(OpMIPS64SRLVconst) 6380 v.AuxInt = c 6381 v.AddArg(x) 6382 return true 6383 } 6384 return false 6385 } 6386 func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool { 6387 // match: (SRLVconst [c] (MOVVconst [d])) 6388 // cond: 6389 // result: (MOVVconst [int64(uint64(d)>>uint64(c))]) 6390 for { 6391 c := v.AuxInt 6392 v_0 := v.Args[0] 6393 if v_0.Op != OpMIPS64MOVVconst { 6394 break 6395 } 6396 d := v_0.AuxInt 6397 v.reset(OpMIPS64MOVVconst) 6398 v.AuxInt = int64(uint64(d) >> uint64(c)) 6399 return true 6400 } 6401 return false 6402 } 6403 func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool { 6404 // match: (SUBV x (MOVVconst [c])) 6405 // cond: is32Bit(c) 6406 // result: (SUBVconst [c] x) 6407 for { 6408 _ = v.Args[1] 6409 x := v.Args[0] 6410 v_1 := v.Args[1] 6411 if v_1.Op != OpMIPS64MOVVconst { 6412 break 6413 } 6414 c := v_1.AuxInt 6415 if !(is32Bit(c)) { 6416 break 6417 } 6418 v.reset(OpMIPS64SUBVconst) 6419 v.AuxInt = c 6420 v.AddArg(x) 6421 return true 6422 } 6423 // match: (SUBV x x) 6424 // cond: 6425 // result: (MOVVconst [0]) 6426 for { 6427 _ = v.Args[1] 6428 x := v.Args[0] 6429 if x != v.Args[1] { 6430 break 6431 } 6432 v.reset(OpMIPS64MOVVconst) 6433 v.AuxInt = 0 6434 return true 6435 } 6436 // match: (SUBV (MOVVconst [0]) x) 6437 // cond: 6438 // result: (NEGV x) 6439 for { 6440 _ = v.Args[1] 6441 v_0 := v.Args[0] 6442 if v_0.Op != OpMIPS64MOVVconst { 6443 break 6444 } 6445 if v_0.AuxInt != 0 { 6446 break 6447 } 6448 x := v.Args[1] 6449 v.reset(OpMIPS64NEGV) 6450 v.AddArg(x) 6451 return true 6452 } 6453 return false 6454 } 6455 func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool { 6456 // match: (SUBVconst [0] x) 6457 // cond: 6458 // result: x 6459 for { 6460 if v.AuxInt != 0 { 6461 break 6462 } 6463 x := v.Args[0] 6464 v.reset(OpCopy) 6465 v.Type = x.Type 6466 v.AddArg(x) 6467 return true 6468 } 6469 // match: (SUBVconst [c] (MOVVconst [d])) 6470 // cond: 6471 // result: (MOVVconst [d-c]) 6472 for { 6473 c := v.AuxInt 6474 v_0 := v.Args[0] 6475 if v_0.Op != OpMIPS64MOVVconst { 6476 break 6477 } 6478 d := v_0.AuxInt 6479 v.reset(OpMIPS64MOVVconst) 6480 v.AuxInt = d - c 6481 return true 6482 } 6483 // match: (SUBVconst [c] (SUBVconst [d] x)) 6484 // cond: is32Bit(-c-d) 6485 // result: (ADDVconst [-c-d] x) 6486 for { 6487 c := v.AuxInt 6488 v_0 := v.Args[0] 6489 if v_0.Op != OpMIPS64SUBVconst { 6490 break 6491 } 6492 d := v_0.AuxInt 6493 x := v_0.Args[0] 6494 if !(is32Bit(-c - d)) { 6495 break 6496 } 6497 v.reset(OpMIPS64ADDVconst) 6498 v.AuxInt = -c - d 6499 v.AddArg(x) 6500 return true 6501 } 6502 // match: (SUBVconst [c] (ADDVconst [d] x)) 6503 // cond: is32Bit(-c+d) 6504 // result: (ADDVconst [-c+d] x) 6505 for { 6506 c := v.AuxInt 6507 v_0 := v.Args[0] 6508 if v_0.Op != OpMIPS64ADDVconst { 6509 break 6510 } 6511 d := v_0.AuxInt 6512 x := v_0.Args[0] 6513 if !(is32Bit(-c + d)) { 6514 break 6515 } 6516 v.reset(OpMIPS64ADDVconst) 6517 v.AuxInt = -c + d 6518 v.AddArg(x) 6519 return true 6520 } 6521 return false 6522 } 6523 func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool { 6524 // match: (XOR x (MOVVconst [c])) 6525 // cond: is32Bit(c) 6526 // result: (XORconst [c] x) 6527 for { 6528 _ = v.Args[1] 6529 x := v.Args[0] 6530 v_1 := v.Args[1] 6531 if v_1.Op != OpMIPS64MOVVconst { 6532 break 6533 } 6534 c := v_1.AuxInt 6535 if !(is32Bit(c)) { 6536 break 6537 } 6538 v.reset(OpMIPS64XORconst) 6539 v.AuxInt = c 6540 v.AddArg(x) 6541 return true 6542 } 6543 // match: (XOR (MOVVconst [c]) x) 6544 // cond: is32Bit(c) 6545 // result: (XORconst [c] x) 6546 for { 6547 _ = v.Args[1] 6548 v_0 := v.Args[0] 6549 if v_0.Op != OpMIPS64MOVVconst { 6550 break 6551 } 6552 c := v_0.AuxInt 6553 x := v.Args[1] 6554 if !(is32Bit(c)) { 6555 break 6556 } 6557 v.reset(OpMIPS64XORconst) 6558 v.AuxInt = c 6559 v.AddArg(x) 6560 return true 6561 } 6562 // match: (XOR x x) 6563 // cond: 6564 // result: (MOVVconst [0]) 6565 for { 6566 _ = v.Args[1] 6567 x := v.Args[0] 6568 if x != v.Args[1] { 6569 break 6570 } 6571 v.reset(OpMIPS64MOVVconst) 6572 v.AuxInt = 0 6573 return true 6574 } 6575 return false 6576 } 6577 func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool { 6578 // match: (XORconst [0] x) 6579 // cond: 6580 // result: x 6581 for { 6582 if v.AuxInt != 0 { 6583 break 6584 } 6585 x := v.Args[0] 6586 v.reset(OpCopy) 6587 v.Type = x.Type 6588 v.AddArg(x) 6589 return true 6590 } 6591 // match: (XORconst [-1] x) 6592 // cond: 6593 // result: (NORconst [0] x) 6594 for { 6595 if v.AuxInt != -1 { 6596 break 6597 } 6598 x := v.Args[0] 6599 v.reset(OpMIPS64NORconst) 6600 v.AuxInt = 0 6601 v.AddArg(x) 6602 return true 6603 } 6604 // match: (XORconst [c] (MOVVconst [d])) 6605 // cond: 6606 // result: (MOVVconst [c^d]) 6607 for { 6608 c := v.AuxInt 6609 v_0 := v.Args[0] 6610 if v_0.Op != OpMIPS64MOVVconst { 6611 break 6612 } 6613 d := v_0.AuxInt 6614 v.reset(OpMIPS64MOVVconst) 6615 v.AuxInt = c ^ d 6616 return true 6617 } 6618 // match: (XORconst [c] (XORconst [d] x)) 6619 // cond: is32Bit(c^d) 6620 // result: (XORconst [c^d] x) 6621 for { 6622 c := v.AuxInt 6623 v_0 := v.Args[0] 6624 if v_0.Op != OpMIPS64XORconst { 6625 break 6626 } 6627 d := v_0.AuxInt 6628 x := v_0.Args[0] 6629 if !(is32Bit(c ^ d)) { 6630 break 6631 } 6632 v.reset(OpMIPS64XORconst) 6633 v.AuxInt = c ^ d 6634 v.AddArg(x) 6635 return true 6636 } 6637 return false 6638 } 6639 func rewriteValueMIPS64_OpMod16_0(v *Value) bool { 6640 b := v.Block 6641 _ = b 6642 typ := &b.Func.Config.Types 6643 _ = typ 6644 // match: (Mod16 x y) 6645 // cond: 6646 // result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 6647 for { 6648 _ = v.Args[1] 6649 x := v.Args[0] 6650 y := v.Args[1] 6651 v.reset(OpSelect0) 6652 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6653 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6654 v1.AddArg(x) 6655 v0.AddArg(v1) 6656 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6657 v2.AddArg(y) 6658 v0.AddArg(v2) 6659 v.AddArg(v0) 6660 return true 6661 } 6662 } 6663 func rewriteValueMIPS64_OpMod16u_0(v *Value) bool { 6664 b := v.Block 6665 _ = b 6666 typ := &b.Func.Config.Types 6667 _ = typ 6668 // match: (Mod16u x y) 6669 // cond: 6670 // result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 6671 for { 6672 _ = v.Args[1] 6673 x := v.Args[0] 6674 y := v.Args[1] 6675 v.reset(OpSelect0) 6676 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6677 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6678 v1.AddArg(x) 6679 v0.AddArg(v1) 6680 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6681 v2.AddArg(y) 6682 v0.AddArg(v2) 6683 v.AddArg(v0) 6684 return true 6685 } 6686 } 6687 func rewriteValueMIPS64_OpMod32_0(v *Value) bool { 6688 b := v.Block 6689 _ = b 6690 typ := &b.Func.Config.Types 6691 _ = typ 6692 // match: (Mod32 x y) 6693 // cond: 6694 // result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 6695 for { 6696 _ = v.Args[1] 6697 x := v.Args[0] 6698 y := v.Args[1] 6699 v.reset(OpSelect0) 6700 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6701 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6702 v1.AddArg(x) 6703 v0.AddArg(v1) 6704 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6705 v2.AddArg(y) 6706 v0.AddArg(v2) 6707 v.AddArg(v0) 6708 return true 6709 } 6710 } 6711 func rewriteValueMIPS64_OpMod32u_0(v *Value) bool { 6712 b := v.Block 6713 _ = b 6714 typ := &b.Func.Config.Types 6715 _ = typ 6716 // match: (Mod32u x y) 6717 // cond: 6718 // result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 6719 for { 6720 _ = v.Args[1] 6721 x := v.Args[0] 6722 y := v.Args[1] 6723 v.reset(OpSelect0) 6724 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6725 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6726 v1.AddArg(x) 6727 v0.AddArg(v1) 6728 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6729 v2.AddArg(y) 6730 v0.AddArg(v2) 6731 v.AddArg(v0) 6732 return true 6733 } 6734 } 6735 func rewriteValueMIPS64_OpMod64_0(v *Value) bool { 6736 b := v.Block 6737 _ = b 6738 typ := &b.Func.Config.Types 6739 _ = typ 6740 // match: (Mod64 x y) 6741 // cond: 6742 // result: (Select0 (DIVV x y)) 6743 for { 6744 _ = v.Args[1] 6745 x := v.Args[0] 6746 y := v.Args[1] 6747 v.reset(OpSelect0) 6748 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6749 v0.AddArg(x) 6750 v0.AddArg(y) 6751 v.AddArg(v0) 6752 return true 6753 } 6754 } 6755 func rewriteValueMIPS64_OpMod64u_0(v *Value) bool { 6756 b := v.Block 6757 _ = b 6758 typ := &b.Func.Config.Types 6759 _ = typ 6760 // match: (Mod64u x y) 6761 // cond: 6762 // result: (Select0 (DIVVU x y)) 6763 for { 6764 _ = v.Args[1] 6765 x := v.Args[0] 6766 y := v.Args[1] 6767 v.reset(OpSelect0) 6768 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6769 v0.AddArg(x) 6770 v0.AddArg(y) 6771 v.AddArg(v0) 6772 return true 6773 } 6774 } 6775 func rewriteValueMIPS64_OpMod8_0(v *Value) bool { 6776 b := v.Block 6777 _ = b 6778 typ := &b.Func.Config.Types 6779 _ = typ 6780 // match: (Mod8 x y) 6781 // cond: 6782 // result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 6783 for { 6784 _ = v.Args[1] 6785 x := v.Args[0] 6786 y := v.Args[1] 6787 v.reset(OpSelect0) 6788 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6789 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6790 v1.AddArg(x) 6791 v0.AddArg(v1) 6792 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6793 v2.AddArg(y) 6794 v0.AddArg(v2) 6795 v.AddArg(v0) 6796 return true 6797 } 6798 } 6799 func rewriteValueMIPS64_OpMod8u_0(v *Value) bool { 6800 b := v.Block 6801 _ = b 6802 typ := &b.Func.Config.Types 6803 _ = typ 6804 // match: (Mod8u x y) 6805 // cond: 6806 // result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 6807 for { 6808 _ = v.Args[1] 6809 x := v.Args[0] 6810 y := v.Args[1] 6811 v.reset(OpSelect0) 6812 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6813 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6814 v1.AddArg(x) 6815 v0.AddArg(v1) 6816 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6817 v2.AddArg(y) 6818 v0.AddArg(v2) 6819 v.AddArg(v0) 6820 return true 6821 } 6822 } 6823 func rewriteValueMIPS64_OpMove_0(v *Value) bool { 6824 b := v.Block 6825 _ = b 6826 typ := &b.Func.Config.Types 6827 _ = typ 6828 // match: (Move [0] _ _ mem) 6829 // cond: 6830 // result: mem 6831 for { 6832 if v.AuxInt != 0 { 6833 break 6834 } 6835 _ = v.Args[2] 6836 mem := v.Args[2] 6837 v.reset(OpCopy) 6838 v.Type = mem.Type 6839 v.AddArg(mem) 6840 return true 6841 } 6842 // match: (Move [1] dst src mem) 6843 // cond: 6844 // result: (MOVBstore dst (MOVBload src mem) mem) 6845 for { 6846 if v.AuxInt != 1 { 6847 break 6848 } 6849 _ = v.Args[2] 6850 dst := v.Args[0] 6851 src := v.Args[1] 6852 mem := v.Args[2] 6853 v.reset(OpMIPS64MOVBstore) 6854 v.AddArg(dst) 6855 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6856 v0.AddArg(src) 6857 v0.AddArg(mem) 6858 v.AddArg(v0) 6859 v.AddArg(mem) 6860 return true 6861 } 6862 // match: (Move [2] {t} dst src mem) 6863 // cond: t.(*types.Type).Alignment()%2 == 0 6864 // result: (MOVHstore dst (MOVHload src mem) mem) 6865 for { 6866 if v.AuxInt != 2 { 6867 break 6868 } 6869 t := v.Aux 6870 _ = v.Args[2] 6871 dst := v.Args[0] 6872 src := v.Args[1] 6873 mem := v.Args[2] 6874 if !(t.(*types.Type).Alignment()%2 == 0) { 6875 break 6876 } 6877 v.reset(OpMIPS64MOVHstore) 6878 v.AddArg(dst) 6879 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6880 v0.AddArg(src) 6881 v0.AddArg(mem) 6882 v.AddArg(v0) 6883 v.AddArg(mem) 6884 return true 6885 } 6886 // match: (Move [2] dst src mem) 6887 // cond: 6888 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)) 6889 for { 6890 if v.AuxInt != 2 { 6891 break 6892 } 6893 _ = v.Args[2] 6894 dst := v.Args[0] 6895 src := v.Args[1] 6896 mem := v.Args[2] 6897 v.reset(OpMIPS64MOVBstore) 6898 v.AuxInt = 1 6899 v.AddArg(dst) 6900 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6901 v0.AuxInt = 1 6902 v0.AddArg(src) 6903 v0.AddArg(mem) 6904 v.AddArg(v0) 6905 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6906 v1.AddArg(dst) 6907 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6908 v2.AddArg(src) 6909 v2.AddArg(mem) 6910 v1.AddArg(v2) 6911 v1.AddArg(mem) 6912 v.AddArg(v1) 6913 return true 6914 } 6915 // match: (Move [4] {t} dst src mem) 6916 // cond: t.(*types.Type).Alignment()%4 == 0 6917 // result: (MOVWstore dst (MOVWload src mem) mem) 6918 for { 6919 if v.AuxInt != 4 { 6920 break 6921 } 6922 t := v.Aux 6923 _ = v.Args[2] 6924 dst := v.Args[0] 6925 src := v.Args[1] 6926 mem := v.Args[2] 6927 if !(t.(*types.Type).Alignment()%4 == 0) { 6928 break 6929 } 6930 v.reset(OpMIPS64MOVWstore) 6931 v.AddArg(dst) 6932 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6933 v0.AddArg(src) 6934 v0.AddArg(mem) 6935 v.AddArg(v0) 6936 v.AddArg(mem) 6937 return true 6938 } 6939 // match: (Move [4] {t} dst src mem) 6940 // cond: t.(*types.Type).Alignment()%2 == 0 6941 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)) 6942 for { 6943 if v.AuxInt != 4 { 6944 break 6945 } 6946 t := v.Aux 6947 _ = v.Args[2] 6948 dst := v.Args[0] 6949 src := v.Args[1] 6950 mem := v.Args[2] 6951 if !(t.(*types.Type).Alignment()%2 == 0) { 6952 break 6953 } 6954 v.reset(OpMIPS64MOVHstore) 6955 v.AuxInt = 2 6956 v.AddArg(dst) 6957 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6958 v0.AuxInt = 2 6959 v0.AddArg(src) 6960 v0.AddArg(mem) 6961 v.AddArg(v0) 6962 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6963 v1.AddArg(dst) 6964 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6965 v2.AddArg(src) 6966 v2.AddArg(mem) 6967 v1.AddArg(v2) 6968 v1.AddArg(mem) 6969 v.AddArg(v1) 6970 return true 6971 } 6972 // match: (Move [4] dst src mem) 6973 // cond: 6974 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))) 6975 for { 6976 if v.AuxInt != 4 { 6977 break 6978 } 6979 _ = v.Args[2] 6980 dst := v.Args[0] 6981 src := v.Args[1] 6982 mem := v.Args[2] 6983 v.reset(OpMIPS64MOVBstore) 6984 v.AuxInt = 3 6985 v.AddArg(dst) 6986 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6987 v0.AuxInt = 3 6988 v0.AddArg(src) 6989 v0.AddArg(mem) 6990 v.AddArg(v0) 6991 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6992 v1.AuxInt = 2 6993 v1.AddArg(dst) 6994 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6995 v2.AuxInt = 2 6996 v2.AddArg(src) 6997 v2.AddArg(mem) 6998 v1.AddArg(v2) 6999 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7000 v3.AuxInt = 1 7001 v3.AddArg(dst) 7002 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7003 v4.AuxInt = 1 7004 v4.AddArg(src) 7005 v4.AddArg(mem) 7006 v3.AddArg(v4) 7007 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7008 v5.AddArg(dst) 7009 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7010 v6.AddArg(src) 7011 v6.AddArg(mem) 7012 v5.AddArg(v6) 7013 v5.AddArg(mem) 7014 v3.AddArg(v5) 7015 v1.AddArg(v3) 7016 v.AddArg(v1) 7017 return true 7018 } 7019 // match: (Move [8] {t} dst src mem) 7020 // cond: t.(*types.Type).Alignment()%8 == 0 7021 // result: (MOVVstore dst (MOVVload src mem) mem) 7022 for { 7023 if v.AuxInt != 8 { 7024 break 7025 } 7026 t := v.Aux 7027 _ = v.Args[2] 7028 dst := v.Args[0] 7029 src := v.Args[1] 7030 mem := v.Args[2] 7031 if !(t.(*types.Type).Alignment()%8 == 0) { 7032 break 7033 } 7034 v.reset(OpMIPS64MOVVstore) 7035 v.AddArg(dst) 7036 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7037 v0.AddArg(src) 7038 v0.AddArg(mem) 7039 v.AddArg(v0) 7040 v.AddArg(mem) 7041 return true 7042 } 7043 // match: (Move [8] {t} dst src mem) 7044 // cond: t.(*types.Type).Alignment()%4 == 0 7045 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) 7046 for { 7047 if v.AuxInt != 8 { 7048 break 7049 } 7050 t := v.Aux 7051 _ = v.Args[2] 7052 dst := v.Args[0] 7053 src := v.Args[1] 7054 mem := v.Args[2] 7055 if !(t.(*types.Type).Alignment()%4 == 0) { 7056 break 7057 } 7058 v.reset(OpMIPS64MOVWstore) 7059 v.AuxInt = 4 7060 v.AddArg(dst) 7061 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7062 v0.AuxInt = 4 7063 v0.AddArg(src) 7064 v0.AddArg(mem) 7065 v.AddArg(v0) 7066 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 7067 v1.AddArg(dst) 7068 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7069 v2.AddArg(src) 7070 v2.AddArg(mem) 7071 v1.AddArg(v2) 7072 v1.AddArg(mem) 7073 v.AddArg(v1) 7074 return true 7075 } 7076 // match: (Move [8] {t} dst src mem) 7077 // cond: t.(*types.Type).Alignment()%2 == 0 7078 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) 7079 for { 7080 if v.AuxInt != 8 { 7081 break 7082 } 7083 t := v.Aux 7084 _ = v.Args[2] 7085 dst := v.Args[0] 7086 src := v.Args[1] 7087 mem := v.Args[2] 7088 if !(t.(*types.Type).Alignment()%2 == 0) { 7089 break 7090 } 7091 v.reset(OpMIPS64MOVHstore) 7092 v.AuxInt = 6 7093 v.AddArg(dst) 7094 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7095 v0.AuxInt = 6 7096 v0.AddArg(src) 7097 v0.AddArg(mem) 7098 v.AddArg(v0) 7099 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7100 v1.AuxInt = 4 7101 v1.AddArg(dst) 7102 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7103 v2.AuxInt = 4 7104 v2.AddArg(src) 7105 v2.AddArg(mem) 7106 v1.AddArg(v2) 7107 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7108 v3.AuxInt = 2 7109 v3.AddArg(dst) 7110 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7111 v4.AuxInt = 2 7112 v4.AddArg(src) 7113 v4.AddArg(mem) 7114 v3.AddArg(v4) 7115 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7116 v5.AddArg(dst) 7117 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7118 v6.AddArg(src) 7119 v6.AddArg(mem) 7120 v5.AddArg(v6) 7121 v5.AddArg(mem) 7122 v3.AddArg(v5) 7123 v1.AddArg(v3) 7124 v.AddArg(v1) 7125 return true 7126 } 7127 return false 7128 } 7129 func rewriteValueMIPS64_OpMove_10(v *Value) bool { 7130 b := v.Block 7131 _ = b 7132 config := b.Func.Config 7133 _ = config 7134 typ := &b.Func.Config.Types 7135 _ = typ 7136 // match: (Move [3] dst src mem) 7137 // cond: 7138 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))) 7139 for { 7140 if v.AuxInt != 3 { 7141 break 7142 } 7143 _ = v.Args[2] 7144 dst := v.Args[0] 7145 src := v.Args[1] 7146 mem := v.Args[2] 7147 v.reset(OpMIPS64MOVBstore) 7148 v.AuxInt = 2 7149 v.AddArg(dst) 7150 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7151 v0.AuxInt = 2 7152 v0.AddArg(src) 7153 v0.AddArg(mem) 7154 v.AddArg(v0) 7155 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7156 v1.AuxInt = 1 7157 v1.AddArg(dst) 7158 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7159 v2.AuxInt = 1 7160 v2.AddArg(src) 7161 v2.AddArg(mem) 7162 v1.AddArg(v2) 7163 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7164 v3.AddArg(dst) 7165 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7166 v4.AddArg(src) 7167 v4.AddArg(mem) 7168 v3.AddArg(v4) 7169 v3.AddArg(mem) 7170 v1.AddArg(v3) 7171 v.AddArg(v1) 7172 return true 7173 } 7174 // match: (Move [6] {t} dst src mem) 7175 // cond: t.(*types.Type).Alignment()%2 == 0 7176 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) 7177 for { 7178 if v.AuxInt != 6 { 7179 break 7180 } 7181 t := v.Aux 7182 _ = v.Args[2] 7183 dst := v.Args[0] 7184 src := v.Args[1] 7185 mem := v.Args[2] 7186 if !(t.(*types.Type).Alignment()%2 == 0) { 7187 break 7188 } 7189 v.reset(OpMIPS64MOVHstore) 7190 v.AuxInt = 4 7191 v.AddArg(dst) 7192 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7193 v0.AuxInt = 4 7194 v0.AddArg(src) 7195 v0.AddArg(mem) 7196 v.AddArg(v0) 7197 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7198 v1.AuxInt = 2 7199 v1.AddArg(dst) 7200 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7201 v2.AuxInt = 2 7202 v2.AddArg(src) 7203 v2.AddArg(mem) 7204 v1.AddArg(v2) 7205 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7206 v3.AddArg(dst) 7207 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7208 v4.AddArg(src) 7209 v4.AddArg(mem) 7210 v3.AddArg(v4) 7211 v3.AddArg(mem) 7212 v1.AddArg(v3) 7213 v.AddArg(v1) 7214 return true 7215 } 7216 // match: (Move [12] {t} dst src mem) 7217 // cond: t.(*types.Type).Alignment()%4 == 0 7218 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) 7219 for { 7220 if v.AuxInt != 12 { 7221 break 7222 } 7223 t := v.Aux 7224 _ = v.Args[2] 7225 dst := v.Args[0] 7226 src := v.Args[1] 7227 mem := v.Args[2] 7228 if !(t.(*types.Type).Alignment()%4 == 0) { 7229 break 7230 } 7231 v.reset(OpMIPS64MOVWstore) 7232 v.AuxInt = 8 7233 v.AddArg(dst) 7234 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7235 v0.AuxInt = 8 7236 v0.AddArg(src) 7237 v0.AddArg(mem) 7238 v.AddArg(v0) 7239 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 7240 v1.AuxInt = 4 7241 v1.AddArg(dst) 7242 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7243 v2.AuxInt = 4 7244 v2.AddArg(src) 7245 v2.AddArg(mem) 7246 v1.AddArg(v2) 7247 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 7248 v3.AddArg(dst) 7249 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7250 v4.AddArg(src) 7251 v4.AddArg(mem) 7252 v3.AddArg(v4) 7253 v3.AddArg(mem) 7254 v1.AddArg(v3) 7255 v.AddArg(v1) 7256 return true 7257 } 7258 // match: (Move [16] {t} dst src mem) 7259 // cond: t.(*types.Type).Alignment()%8 == 0 7260 // result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)) 7261 for { 7262 if v.AuxInt != 16 { 7263 break 7264 } 7265 t := v.Aux 7266 _ = v.Args[2] 7267 dst := v.Args[0] 7268 src := v.Args[1] 7269 mem := v.Args[2] 7270 if !(t.(*types.Type).Alignment()%8 == 0) { 7271 break 7272 } 7273 v.reset(OpMIPS64MOVVstore) 7274 v.AuxInt = 8 7275 v.AddArg(dst) 7276 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7277 v0.AuxInt = 8 7278 v0.AddArg(src) 7279 v0.AddArg(mem) 7280 v.AddArg(v0) 7281 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 7282 v1.AddArg(dst) 7283 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7284 v2.AddArg(src) 7285 v2.AddArg(mem) 7286 v1.AddArg(v2) 7287 v1.AddArg(mem) 7288 v.AddArg(v1) 7289 return true 7290 } 7291 // match: (Move [24] {t} dst src mem) 7292 // cond: t.(*types.Type).Alignment()%8 == 0 7293 // result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))) 7294 for { 7295 if v.AuxInt != 24 { 7296 break 7297 } 7298 t := v.Aux 7299 _ = v.Args[2] 7300 dst := v.Args[0] 7301 src := v.Args[1] 7302 mem := v.Args[2] 7303 if !(t.(*types.Type).Alignment()%8 == 0) { 7304 break 7305 } 7306 v.reset(OpMIPS64MOVVstore) 7307 v.AuxInt = 16 7308 v.AddArg(dst) 7309 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7310 v0.AuxInt = 16 7311 v0.AddArg(src) 7312 v0.AddArg(mem) 7313 v.AddArg(v0) 7314 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 7315 v1.AuxInt = 8 7316 v1.AddArg(dst) 7317 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7318 v2.AuxInt = 8 7319 v2.AddArg(src) 7320 v2.AddArg(mem) 7321 v1.AddArg(v2) 7322 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 7323 v3.AddArg(dst) 7324 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7325 v4.AddArg(src) 7326 v4.AddArg(mem) 7327 v3.AddArg(v4) 7328 v3.AddArg(mem) 7329 v1.AddArg(v3) 7330 v.AddArg(v1) 7331 return true 7332 } 7333 // match: (Move [s] {t} dst src mem) 7334 // cond: s > 24 || t.(*types.Type).Alignment()%8 != 0 7335 // result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 7336 for { 7337 s := v.AuxInt 7338 t := v.Aux 7339 _ = v.Args[2] 7340 dst := v.Args[0] 7341 src := v.Args[1] 7342 mem := v.Args[2] 7343 if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) { 7344 break 7345 } 7346 v.reset(OpMIPS64LoweredMove) 7347 v.AuxInt = t.(*types.Type).Alignment() 7348 v.AddArg(dst) 7349 v.AddArg(src) 7350 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type) 7351 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 7352 v0.AddArg(src) 7353 v.AddArg(v0) 7354 v.AddArg(mem) 7355 return true 7356 } 7357 return false 7358 } 7359 func rewriteValueMIPS64_OpMul16_0(v *Value) bool { 7360 b := v.Block 7361 _ = b 7362 typ := &b.Func.Config.Types 7363 _ = typ 7364 // match: (Mul16 x y) 7365 // cond: 7366 // result: (Select1 (MULVU x y)) 7367 for { 7368 _ = v.Args[1] 7369 x := v.Args[0] 7370 y := v.Args[1] 7371 v.reset(OpSelect1) 7372 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7373 v0.AddArg(x) 7374 v0.AddArg(y) 7375 v.AddArg(v0) 7376 return true 7377 } 7378 } 7379 func rewriteValueMIPS64_OpMul32_0(v *Value) bool { 7380 b := v.Block 7381 _ = b 7382 typ := &b.Func.Config.Types 7383 _ = typ 7384 // match: (Mul32 x y) 7385 // cond: 7386 // result: (Select1 (MULVU x y)) 7387 for { 7388 _ = v.Args[1] 7389 x := v.Args[0] 7390 y := v.Args[1] 7391 v.reset(OpSelect1) 7392 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7393 v0.AddArg(x) 7394 v0.AddArg(y) 7395 v.AddArg(v0) 7396 return true 7397 } 7398 } 7399 func rewriteValueMIPS64_OpMul32F_0(v *Value) bool { 7400 // match: (Mul32F x y) 7401 // cond: 7402 // result: (MULF x y) 7403 for { 7404 _ = v.Args[1] 7405 x := v.Args[0] 7406 y := v.Args[1] 7407 v.reset(OpMIPS64MULF) 7408 v.AddArg(x) 7409 v.AddArg(y) 7410 return true 7411 } 7412 } 7413 func rewriteValueMIPS64_OpMul64_0(v *Value) bool { 7414 b := v.Block 7415 _ = b 7416 typ := &b.Func.Config.Types 7417 _ = typ 7418 // match: (Mul64 x y) 7419 // cond: 7420 // result: (Select1 (MULVU x y)) 7421 for { 7422 _ = v.Args[1] 7423 x := v.Args[0] 7424 y := v.Args[1] 7425 v.reset(OpSelect1) 7426 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7427 v0.AddArg(x) 7428 v0.AddArg(y) 7429 v.AddArg(v0) 7430 return true 7431 } 7432 } 7433 func rewriteValueMIPS64_OpMul64F_0(v *Value) bool { 7434 // match: (Mul64F x y) 7435 // cond: 7436 // result: (MULD x y) 7437 for { 7438 _ = v.Args[1] 7439 x := v.Args[0] 7440 y := v.Args[1] 7441 v.reset(OpMIPS64MULD) 7442 v.AddArg(x) 7443 v.AddArg(y) 7444 return true 7445 } 7446 } 7447 func rewriteValueMIPS64_OpMul8_0(v *Value) bool { 7448 b := v.Block 7449 _ = b 7450 typ := &b.Func.Config.Types 7451 _ = typ 7452 // match: (Mul8 x y) 7453 // cond: 7454 // result: (Select1 (MULVU x y)) 7455 for { 7456 _ = v.Args[1] 7457 x := v.Args[0] 7458 y := v.Args[1] 7459 v.reset(OpSelect1) 7460 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7461 v0.AddArg(x) 7462 v0.AddArg(y) 7463 v.AddArg(v0) 7464 return true 7465 } 7466 } 7467 func rewriteValueMIPS64_OpNeg16_0(v *Value) bool { 7468 // match: (Neg16 x) 7469 // cond: 7470 // result: (NEGV x) 7471 for { 7472 x := v.Args[0] 7473 v.reset(OpMIPS64NEGV) 7474 v.AddArg(x) 7475 return true 7476 } 7477 } 7478 func rewriteValueMIPS64_OpNeg32_0(v *Value) bool { 7479 // match: (Neg32 x) 7480 // cond: 7481 // result: (NEGV x) 7482 for { 7483 x := v.Args[0] 7484 v.reset(OpMIPS64NEGV) 7485 v.AddArg(x) 7486 return true 7487 } 7488 } 7489 func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool { 7490 // match: (Neg32F x) 7491 // cond: 7492 // result: (NEGF x) 7493 for { 7494 x := v.Args[0] 7495 v.reset(OpMIPS64NEGF) 7496 v.AddArg(x) 7497 return true 7498 } 7499 } 7500 func rewriteValueMIPS64_OpNeg64_0(v *Value) bool { 7501 // match: (Neg64 x) 7502 // cond: 7503 // result: (NEGV x) 7504 for { 7505 x := v.Args[0] 7506 v.reset(OpMIPS64NEGV) 7507 v.AddArg(x) 7508 return true 7509 } 7510 } 7511 func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool { 7512 // match: (Neg64F x) 7513 // cond: 7514 // result: (NEGD x) 7515 for { 7516 x := v.Args[0] 7517 v.reset(OpMIPS64NEGD) 7518 v.AddArg(x) 7519 return true 7520 } 7521 } 7522 func rewriteValueMIPS64_OpNeg8_0(v *Value) bool { 7523 // match: (Neg8 x) 7524 // cond: 7525 // result: (NEGV x) 7526 for { 7527 x := v.Args[0] 7528 v.reset(OpMIPS64NEGV) 7529 v.AddArg(x) 7530 return true 7531 } 7532 } 7533 func rewriteValueMIPS64_OpNeq16_0(v *Value) bool { 7534 b := v.Block 7535 _ = b 7536 typ := &b.Func.Config.Types 7537 _ = typ 7538 // match: (Neq16 x y) 7539 // cond: 7540 // result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0])) 7541 for { 7542 _ = v.Args[1] 7543 x := v.Args[0] 7544 y := v.Args[1] 7545 v.reset(OpMIPS64SGTU) 7546 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7547 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7548 v1.AddArg(x) 7549 v0.AddArg(v1) 7550 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7551 v2.AddArg(y) 7552 v0.AddArg(v2) 7553 v.AddArg(v0) 7554 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7555 v3.AuxInt = 0 7556 v.AddArg(v3) 7557 return true 7558 } 7559 } 7560 func rewriteValueMIPS64_OpNeq32_0(v *Value) bool { 7561 b := v.Block 7562 _ = b 7563 typ := &b.Func.Config.Types 7564 _ = typ 7565 // match: (Neq32 x y) 7566 // cond: 7567 // result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0])) 7568 for { 7569 _ = v.Args[1] 7570 x := v.Args[0] 7571 y := v.Args[1] 7572 v.reset(OpMIPS64SGTU) 7573 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7574 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7575 v1.AddArg(x) 7576 v0.AddArg(v1) 7577 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7578 v2.AddArg(y) 7579 v0.AddArg(v2) 7580 v.AddArg(v0) 7581 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7582 v3.AuxInt = 0 7583 v.AddArg(v3) 7584 return true 7585 } 7586 } 7587 func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool { 7588 b := v.Block 7589 _ = b 7590 // match: (Neq32F x y) 7591 // cond: 7592 // result: (FPFlagFalse (CMPEQF x y)) 7593 for { 7594 _ = v.Args[1] 7595 x := v.Args[0] 7596 y := v.Args[1] 7597 v.reset(OpMIPS64FPFlagFalse) 7598 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags) 7599 v0.AddArg(x) 7600 v0.AddArg(y) 7601 v.AddArg(v0) 7602 return true 7603 } 7604 } 7605 func rewriteValueMIPS64_OpNeq64_0(v *Value) bool { 7606 b := v.Block 7607 _ = b 7608 typ := &b.Func.Config.Types 7609 _ = typ 7610 // match: (Neq64 x y) 7611 // cond: 7612 // result: (SGTU (XOR x y) (MOVVconst [0])) 7613 for { 7614 _ = v.Args[1] 7615 x := v.Args[0] 7616 y := v.Args[1] 7617 v.reset(OpMIPS64SGTU) 7618 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7619 v0.AddArg(x) 7620 v0.AddArg(y) 7621 v.AddArg(v0) 7622 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7623 v1.AuxInt = 0 7624 v.AddArg(v1) 7625 return true 7626 } 7627 } 7628 func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool { 7629 b := v.Block 7630 _ = b 7631 // match: (Neq64F x y) 7632 // cond: 7633 // result: (FPFlagFalse (CMPEQD x y)) 7634 for { 7635 _ = v.Args[1] 7636 x := v.Args[0] 7637 y := v.Args[1] 7638 v.reset(OpMIPS64FPFlagFalse) 7639 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags) 7640 v0.AddArg(x) 7641 v0.AddArg(y) 7642 v.AddArg(v0) 7643 return true 7644 } 7645 } 7646 func rewriteValueMIPS64_OpNeq8_0(v *Value) bool { 7647 b := v.Block 7648 _ = b 7649 typ := &b.Func.Config.Types 7650 _ = typ 7651 // match: (Neq8 x y) 7652 // cond: 7653 // result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0])) 7654 for { 7655 _ = v.Args[1] 7656 x := v.Args[0] 7657 y := v.Args[1] 7658 v.reset(OpMIPS64SGTU) 7659 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7660 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7661 v1.AddArg(x) 7662 v0.AddArg(v1) 7663 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7664 v2.AddArg(y) 7665 v0.AddArg(v2) 7666 v.AddArg(v0) 7667 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7668 v3.AuxInt = 0 7669 v.AddArg(v3) 7670 return true 7671 } 7672 } 7673 func rewriteValueMIPS64_OpNeqB_0(v *Value) bool { 7674 // match: (NeqB x y) 7675 // cond: 7676 // result: (XOR x y) 7677 for { 7678 _ = v.Args[1] 7679 x := v.Args[0] 7680 y := v.Args[1] 7681 v.reset(OpMIPS64XOR) 7682 v.AddArg(x) 7683 v.AddArg(y) 7684 return true 7685 } 7686 } 7687 func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool { 7688 b := v.Block 7689 _ = b 7690 typ := &b.Func.Config.Types 7691 _ = typ 7692 // match: (NeqPtr x y) 7693 // cond: 7694 // result: (SGTU (XOR x y) (MOVVconst [0])) 7695 for { 7696 _ = v.Args[1] 7697 x := v.Args[0] 7698 y := v.Args[1] 7699 v.reset(OpMIPS64SGTU) 7700 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7701 v0.AddArg(x) 7702 v0.AddArg(y) 7703 v.AddArg(v0) 7704 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7705 v1.AuxInt = 0 7706 v.AddArg(v1) 7707 return true 7708 } 7709 } 7710 func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool { 7711 // match: (NilCheck ptr mem) 7712 // cond: 7713 // result: (LoweredNilCheck ptr mem) 7714 for { 7715 _ = v.Args[1] 7716 ptr := v.Args[0] 7717 mem := v.Args[1] 7718 v.reset(OpMIPS64LoweredNilCheck) 7719 v.AddArg(ptr) 7720 v.AddArg(mem) 7721 return true 7722 } 7723 } 7724 func rewriteValueMIPS64_OpNot_0(v *Value) bool { 7725 // match: (Not x) 7726 // cond: 7727 // result: (XORconst [1] x) 7728 for { 7729 x := v.Args[0] 7730 v.reset(OpMIPS64XORconst) 7731 v.AuxInt = 1 7732 v.AddArg(x) 7733 return true 7734 } 7735 } 7736 func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool { 7737 // match: (OffPtr [off] ptr:(SP)) 7738 // cond: 7739 // result: (MOVVaddr [off] ptr) 7740 for { 7741 off := v.AuxInt 7742 ptr := v.Args[0] 7743 if ptr.Op != OpSP { 7744 break 7745 } 7746 v.reset(OpMIPS64MOVVaddr) 7747 v.AuxInt = off 7748 v.AddArg(ptr) 7749 return true 7750 } 7751 // match: (OffPtr [off] ptr) 7752 // cond: 7753 // result: (ADDVconst [off] ptr) 7754 for { 7755 off := v.AuxInt 7756 ptr := v.Args[0] 7757 v.reset(OpMIPS64ADDVconst) 7758 v.AuxInt = off 7759 v.AddArg(ptr) 7760 return true 7761 } 7762 } 7763 func rewriteValueMIPS64_OpOr16_0(v *Value) bool { 7764 // match: (Or16 x y) 7765 // cond: 7766 // result: (OR x y) 7767 for { 7768 _ = v.Args[1] 7769 x := v.Args[0] 7770 y := v.Args[1] 7771 v.reset(OpMIPS64OR) 7772 v.AddArg(x) 7773 v.AddArg(y) 7774 return true 7775 } 7776 } 7777 func rewriteValueMIPS64_OpOr32_0(v *Value) bool { 7778 // match: (Or32 x y) 7779 // cond: 7780 // result: (OR x y) 7781 for { 7782 _ = v.Args[1] 7783 x := v.Args[0] 7784 y := v.Args[1] 7785 v.reset(OpMIPS64OR) 7786 v.AddArg(x) 7787 v.AddArg(y) 7788 return true 7789 } 7790 } 7791 func rewriteValueMIPS64_OpOr64_0(v *Value) bool { 7792 // match: (Or64 x y) 7793 // cond: 7794 // result: (OR x y) 7795 for { 7796 _ = v.Args[1] 7797 x := v.Args[0] 7798 y := v.Args[1] 7799 v.reset(OpMIPS64OR) 7800 v.AddArg(x) 7801 v.AddArg(y) 7802 return true 7803 } 7804 } 7805 func rewriteValueMIPS64_OpOr8_0(v *Value) bool { 7806 // match: (Or8 x y) 7807 // cond: 7808 // result: (OR x y) 7809 for { 7810 _ = v.Args[1] 7811 x := v.Args[0] 7812 y := v.Args[1] 7813 v.reset(OpMIPS64OR) 7814 v.AddArg(x) 7815 v.AddArg(y) 7816 return true 7817 } 7818 } 7819 func rewriteValueMIPS64_OpOrB_0(v *Value) bool { 7820 // match: (OrB x y) 7821 // cond: 7822 // result: (OR x y) 7823 for { 7824 _ = v.Args[1] 7825 x := v.Args[0] 7826 y := v.Args[1] 7827 v.reset(OpMIPS64OR) 7828 v.AddArg(x) 7829 v.AddArg(y) 7830 return true 7831 } 7832 } 7833 func rewriteValueMIPS64_OpRound32F_0(v *Value) bool { 7834 // match: (Round32F x) 7835 // cond: 7836 // result: x 7837 for { 7838 x := v.Args[0] 7839 v.reset(OpCopy) 7840 v.Type = x.Type 7841 v.AddArg(x) 7842 return true 7843 } 7844 } 7845 func rewriteValueMIPS64_OpRound64F_0(v *Value) bool { 7846 // match: (Round64F x) 7847 // cond: 7848 // result: x 7849 for { 7850 x := v.Args[0] 7851 v.reset(OpCopy) 7852 v.Type = x.Type 7853 v.AddArg(x) 7854 return true 7855 } 7856 } 7857 func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool { 7858 b := v.Block 7859 _ = b 7860 typ := &b.Func.Config.Types 7861 _ = typ 7862 // match: (Rsh16Ux16 <t> x y) 7863 // cond: 7864 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y))) 7865 for { 7866 t := v.Type 7867 _ = v.Args[1] 7868 x := v.Args[0] 7869 y := v.Args[1] 7870 v.reset(OpMIPS64AND) 7871 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7872 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7873 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7874 v2.AuxInt = 64 7875 v1.AddArg(v2) 7876 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7877 v3.AddArg(y) 7878 v1.AddArg(v3) 7879 v0.AddArg(v1) 7880 v.AddArg(v0) 7881 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7882 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7883 v5.AddArg(x) 7884 v4.AddArg(v5) 7885 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7886 v6.AddArg(y) 7887 v4.AddArg(v6) 7888 v.AddArg(v4) 7889 return true 7890 } 7891 } 7892 func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool { 7893 b := v.Block 7894 _ = b 7895 typ := &b.Func.Config.Types 7896 _ = typ 7897 // match: (Rsh16Ux32 <t> x y) 7898 // cond: 7899 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y))) 7900 for { 7901 t := v.Type 7902 _ = v.Args[1] 7903 x := v.Args[0] 7904 y := v.Args[1] 7905 v.reset(OpMIPS64AND) 7906 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7907 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7908 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7909 v2.AuxInt = 64 7910 v1.AddArg(v2) 7911 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7912 v3.AddArg(y) 7913 v1.AddArg(v3) 7914 v0.AddArg(v1) 7915 v.AddArg(v0) 7916 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7917 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7918 v5.AddArg(x) 7919 v4.AddArg(v5) 7920 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7921 v6.AddArg(y) 7922 v4.AddArg(v6) 7923 v.AddArg(v4) 7924 return true 7925 } 7926 } 7927 func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool { 7928 b := v.Block 7929 _ = b 7930 typ := &b.Func.Config.Types 7931 _ = typ 7932 // match: (Rsh16Ux64 <t> x y) 7933 // cond: 7934 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y)) 7935 for { 7936 t := v.Type 7937 _ = v.Args[1] 7938 x := v.Args[0] 7939 y := v.Args[1] 7940 v.reset(OpMIPS64AND) 7941 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7942 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7943 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7944 v2.AuxInt = 64 7945 v1.AddArg(v2) 7946 v1.AddArg(y) 7947 v0.AddArg(v1) 7948 v.AddArg(v0) 7949 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7950 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7951 v4.AddArg(x) 7952 v3.AddArg(v4) 7953 v3.AddArg(y) 7954 v.AddArg(v3) 7955 return true 7956 } 7957 } 7958 func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool { 7959 b := v.Block 7960 _ = b 7961 typ := &b.Func.Config.Types 7962 _ = typ 7963 // match: (Rsh16Ux8 <t> x y) 7964 // cond: 7965 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y))) 7966 for { 7967 t := v.Type 7968 _ = v.Args[1] 7969 x := v.Args[0] 7970 y := v.Args[1] 7971 v.reset(OpMIPS64AND) 7972 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7973 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7974 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7975 v2.AuxInt = 64 7976 v1.AddArg(v2) 7977 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7978 v3.AddArg(y) 7979 v1.AddArg(v3) 7980 v0.AddArg(v1) 7981 v.AddArg(v0) 7982 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7983 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7984 v5.AddArg(x) 7985 v4.AddArg(v5) 7986 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7987 v6.AddArg(y) 7988 v4.AddArg(v6) 7989 v.AddArg(v4) 7990 return true 7991 } 7992 } 7993 func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool { 7994 b := v.Block 7995 _ = b 7996 typ := &b.Func.Config.Types 7997 _ = typ 7998 // match: (Rsh16x16 <t> x y) 7999 // cond: 8000 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8001 for { 8002 t := v.Type 8003 _ = v.Args[1] 8004 x := v.Args[0] 8005 y := v.Args[1] 8006 v.reset(OpMIPS64SRAV) 8007 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8008 v0.AddArg(x) 8009 v.AddArg(v0) 8010 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8011 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8012 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8013 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8014 v4.AddArg(y) 8015 v3.AddArg(v4) 8016 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8017 v5.AuxInt = 63 8018 v3.AddArg(v5) 8019 v2.AddArg(v3) 8020 v1.AddArg(v2) 8021 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8022 v6.AddArg(y) 8023 v1.AddArg(v6) 8024 v.AddArg(v1) 8025 return true 8026 } 8027 } 8028 func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool { 8029 b := v.Block 8030 _ = b 8031 typ := &b.Func.Config.Types 8032 _ = typ 8033 // match: (Rsh16x32 <t> x y) 8034 // cond: 8035 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8036 for { 8037 t := v.Type 8038 _ = v.Args[1] 8039 x := v.Args[0] 8040 y := v.Args[1] 8041 v.reset(OpMIPS64SRAV) 8042 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8043 v0.AddArg(x) 8044 v.AddArg(v0) 8045 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8046 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8047 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8048 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8049 v4.AddArg(y) 8050 v3.AddArg(v4) 8051 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8052 v5.AuxInt = 63 8053 v3.AddArg(v5) 8054 v2.AddArg(v3) 8055 v1.AddArg(v2) 8056 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8057 v6.AddArg(y) 8058 v1.AddArg(v6) 8059 v.AddArg(v1) 8060 return true 8061 } 8062 } 8063 func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool { 8064 b := v.Block 8065 _ = b 8066 typ := &b.Func.Config.Types 8067 _ = typ 8068 // match: (Rsh16x64 <t> x y) 8069 // cond: 8070 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8071 for { 8072 t := v.Type 8073 _ = v.Args[1] 8074 x := v.Args[0] 8075 y := v.Args[1] 8076 v.reset(OpMIPS64SRAV) 8077 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8078 v0.AddArg(x) 8079 v.AddArg(v0) 8080 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8081 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8082 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8083 v3.AddArg(y) 8084 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8085 v4.AuxInt = 63 8086 v3.AddArg(v4) 8087 v2.AddArg(v3) 8088 v1.AddArg(v2) 8089 v1.AddArg(y) 8090 v.AddArg(v1) 8091 return true 8092 } 8093 } 8094 func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool { 8095 b := v.Block 8096 _ = b 8097 typ := &b.Func.Config.Types 8098 _ = typ 8099 // match: (Rsh16x8 <t> x y) 8100 // cond: 8101 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8102 for { 8103 t := v.Type 8104 _ = v.Args[1] 8105 x := v.Args[0] 8106 y := v.Args[1] 8107 v.reset(OpMIPS64SRAV) 8108 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8109 v0.AddArg(x) 8110 v.AddArg(v0) 8111 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8112 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8113 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8114 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8115 v4.AddArg(y) 8116 v3.AddArg(v4) 8117 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8118 v5.AuxInt = 63 8119 v3.AddArg(v5) 8120 v2.AddArg(v3) 8121 v1.AddArg(v2) 8122 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8123 v6.AddArg(y) 8124 v1.AddArg(v6) 8125 v.AddArg(v1) 8126 return true 8127 } 8128 } 8129 func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool { 8130 b := v.Block 8131 _ = b 8132 typ := &b.Func.Config.Types 8133 _ = typ 8134 // match: (Rsh32Ux16 <t> x y) 8135 // cond: 8136 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y))) 8137 for { 8138 t := v.Type 8139 _ = v.Args[1] 8140 x := v.Args[0] 8141 y := v.Args[1] 8142 v.reset(OpMIPS64AND) 8143 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8144 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8145 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8146 v2.AuxInt = 64 8147 v1.AddArg(v2) 8148 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8149 v3.AddArg(y) 8150 v1.AddArg(v3) 8151 v0.AddArg(v1) 8152 v.AddArg(v0) 8153 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8154 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8155 v5.AddArg(x) 8156 v4.AddArg(v5) 8157 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8158 v6.AddArg(y) 8159 v4.AddArg(v6) 8160 v.AddArg(v4) 8161 return true 8162 } 8163 } 8164 func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool { 8165 b := v.Block 8166 _ = b 8167 typ := &b.Func.Config.Types 8168 _ = typ 8169 // match: (Rsh32Ux32 <t> x y) 8170 // cond: 8171 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y))) 8172 for { 8173 t := v.Type 8174 _ = v.Args[1] 8175 x := v.Args[0] 8176 y := v.Args[1] 8177 v.reset(OpMIPS64AND) 8178 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8179 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8180 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8181 v2.AuxInt = 64 8182 v1.AddArg(v2) 8183 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8184 v3.AddArg(y) 8185 v1.AddArg(v3) 8186 v0.AddArg(v1) 8187 v.AddArg(v0) 8188 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8189 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8190 v5.AddArg(x) 8191 v4.AddArg(v5) 8192 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8193 v6.AddArg(y) 8194 v4.AddArg(v6) 8195 v.AddArg(v4) 8196 return true 8197 } 8198 } 8199 func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool { 8200 b := v.Block 8201 _ = b 8202 typ := &b.Func.Config.Types 8203 _ = typ 8204 // match: (Rsh32Ux64 <t> x y) 8205 // cond: 8206 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y)) 8207 for { 8208 t := v.Type 8209 _ = v.Args[1] 8210 x := v.Args[0] 8211 y := v.Args[1] 8212 v.reset(OpMIPS64AND) 8213 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8214 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8215 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8216 v2.AuxInt = 64 8217 v1.AddArg(v2) 8218 v1.AddArg(y) 8219 v0.AddArg(v1) 8220 v.AddArg(v0) 8221 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8222 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8223 v4.AddArg(x) 8224 v3.AddArg(v4) 8225 v3.AddArg(y) 8226 v.AddArg(v3) 8227 return true 8228 } 8229 } 8230 func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool { 8231 b := v.Block 8232 _ = b 8233 typ := &b.Func.Config.Types 8234 _ = typ 8235 // match: (Rsh32Ux8 <t> x y) 8236 // cond: 8237 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y))) 8238 for { 8239 t := v.Type 8240 _ = v.Args[1] 8241 x := v.Args[0] 8242 y := v.Args[1] 8243 v.reset(OpMIPS64AND) 8244 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8245 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8246 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8247 v2.AuxInt = 64 8248 v1.AddArg(v2) 8249 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8250 v3.AddArg(y) 8251 v1.AddArg(v3) 8252 v0.AddArg(v1) 8253 v.AddArg(v0) 8254 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8255 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8256 v5.AddArg(x) 8257 v4.AddArg(v5) 8258 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8259 v6.AddArg(y) 8260 v4.AddArg(v6) 8261 v.AddArg(v4) 8262 return true 8263 } 8264 } 8265 func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool { 8266 b := v.Block 8267 _ = b 8268 typ := &b.Func.Config.Types 8269 _ = typ 8270 // match: (Rsh32x16 <t> x y) 8271 // cond: 8272 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8273 for { 8274 t := v.Type 8275 _ = v.Args[1] 8276 x := v.Args[0] 8277 y := v.Args[1] 8278 v.reset(OpMIPS64SRAV) 8279 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8280 v0.AddArg(x) 8281 v.AddArg(v0) 8282 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8283 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8284 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8285 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8286 v4.AddArg(y) 8287 v3.AddArg(v4) 8288 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8289 v5.AuxInt = 63 8290 v3.AddArg(v5) 8291 v2.AddArg(v3) 8292 v1.AddArg(v2) 8293 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8294 v6.AddArg(y) 8295 v1.AddArg(v6) 8296 v.AddArg(v1) 8297 return true 8298 } 8299 } 8300 func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool { 8301 b := v.Block 8302 _ = b 8303 typ := &b.Func.Config.Types 8304 _ = typ 8305 // match: (Rsh32x32 <t> x y) 8306 // cond: 8307 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8308 for { 8309 t := v.Type 8310 _ = v.Args[1] 8311 x := v.Args[0] 8312 y := v.Args[1] 8313 v.reset(OpMIPS64SRAV) 8314 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8315 v0.AddArg(x) 8316 v.AddArg(v0) 8317 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8318 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8319 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8320 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8321 v4.AddArg(y) 8322 v3.AddArg(v4) 8323 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8324 v5.AuxInt = 63 8325 v3.AddArg(v5) 8326 v2.AddArg(v3) 8327 v1.AddArg(v2) 8328 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8329 v6.AddArg(y) 8330 v1.AddArg(v6) 8331 v.AddArg(v1) 8332 return true 8333 } 8334 } 8335 func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool { 8336 b := v.Block 8337 _ = b 8338 typ := &b.Func.Config.Types 8339 _ = typ 8340 // match: (Rsh32x64 <t> x y) 8341 // cond: 8342 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8343 for { 8344 t := v.Type 8345 _ = v.Args[1] 8346 x := v.Args[0] 8347 y := v.Args[1] 8348 v.reset(OpMIPS64SRAV) 8349 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8350 v0.AddArg(x) 8351 v.AddArg(v0) 8352 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8353 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8354 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8355 v3.AddArg(y) 8356 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8357 v4.AuxInt = 63 8358 v3.AddArg(v4) 8359 v2.AddArg(v3) 8360 v1.AddArg(v2) 8361 v1.AddArg(y) 8362 v.AddArg(v1) 8363 return true 8364 } 8365 } 8366 func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool { 8367 b := v.Block 8368 _ = b 8369 typ := &b.Func.Config.Types 8370 _ = typ 8371 // match: (Rsh32x8 <t> x y) 8372 // cond: 8373 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8374 for { 8375 t := v.Type 8376 _ = v.Args[1] 8377 x := v.Args[0] 8378 y := v.Args[1] 8379 v.reset(OpMIPS64SRAV) 8380 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8381 v0.AddArg(x) 8382 v.AddArg(v0) 8383 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8384 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8385 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8386 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8387 v4.AddArg(y) 8388 v3.AddArg(v4) 8389 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8390 v5.AuxInt = 63 8391 v3.AddArg(v5) 8392 v2.AddArg(v3) 8393 v1.AddArg(v2) 8394 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8395 v6.AddArg(y) 8396 v1.AddArg(v6) 8397 v.AddArg(v1) 8398 return true 8399 } 8400 } 8401 func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool { 8402 b := v.Block 8403 _ = b 8404 typ := &b.Func.Config.Types 8405 _ = typ 8406 // match: (Rsh64Ux16 <t> x y) 8407 // cond: 8408 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y))) 8409 for { 8410 t := v.Type 8411 _ = v.Args[1] 8412 x := v.Args[0] 8413 y := v.Args[1] 8414 v.reset(OpMIPS64AND) 8415 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8416 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8417 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8418 v2.AuxInt = 64 8419 v1.AddArg(v2) 8420 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8421 v3.AddArg(y) 8422 v1.AddArg(v3) 8423 v0.AddArg(v1) 8424 v.AddArg(v0) 8425 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8426 v4.AddArg(x) 8427 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8428 v5.AddArg(y) 8429 v4.AddArg(v5) 8430 v.AddArg(v4) 8431 return true 8432 } 8433 } 8434 func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool { 8435 b := v.Block 8436 _ = b 8437 typ := &b.Func.Config.Types 8438 _ = typ 8439 // match: (Rsh64Ux32 <t> x y) 8440 // cond: 8441 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y))) 8442 for { 8443 t := v.Type 8444 _ = v.Args[1] 8445 x := v.Args[0] 8446 y := v.Args[1] 8447 v.reset(OpMIPS64AND) 8448 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8449 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8450 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8451 v2.AuxInt = 64 8452 v1.AddArg(v2) 8453 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8454 v3.AddArg(y) 8455 v1.AddArg(v3) 8456 v0.AddArg(v1) 8457 v.AddArg(v0) 8458 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8459 v4.AddArg(x) 8460 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8461 v5.AddArg(y) 8462 v4.AddArg(v5) 8463 v.AddArg(v4) 8464 return true 8465 } 8466 } 8467 func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool { 8468 b := v.Block 8469 _ = b 8470 typ := &b.Func.Config.Types 8471 _ = typ 8472 // match: (Rsh64Ux64 <t> x y) 8473 // cond: 8474 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y)) 8475 for { 8476 t := v.Type 8477 _ = v.Args[1] 8478 x := v.Args[0] 8479 y := v.Args[1] 8480 v.reset(OpMIPS64AND) 8481 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8482 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8483 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8484 v2.AuxInt = 64 8485 v1.AddArg(v2) 8486 v1.AddArg(y) 8487 v0.AddArg(v1) 8488 v.AddArg(v0) 8489 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8490 v3.AddArg(x) 8491 v3.AddArg(y) 8492 v.AddArg(v3) 8493 return true 8494 } 8495 } 8496 func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool { 8497 b := v.Block 8498 _ = b 8499 typ := &b.Func.Config.Types 8500 _ = typ 8501 // match: (Rsh64Ux8 <t> x y) 8502 // cond: 8503 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y))) 8504 for { 8505 t := v.Type 8506 _ = v.Args[1] 8507 x := v.Args[0] 8508 y := v.Args[1] 8509 v.reset(OpMIPS64AND) 8510 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8511 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8512 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8513 v2.AuxInt = 64 8514 v1.AddArg(v2) 8515 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8516 v3.AddArg(y) 8517 v1.AddArg(v3) 8518 v0.AddArg(v1) 8519 v.AddArg(v0) 8520 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8521 v4.AddArg(x) 8522 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8523 v5.AddArg(y) 8524 v4.AddArg(v5) 8525 v.AddArg(v4) 8526 return true 8527 } 8528 } 8529 func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool { 8530 b := v.Block 8531 _ = b 8532 typ := &b.Func.Config.Types 8533 _ = typ 8534 // match: (Rsh64x16 <t> x y) 8535 // cond: 8536 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8537 for { 8538 t := v.Type 8539 _ = v.Args[1] 8540 x := v.Args[0] 8541 y := v.Args[1] 8542 v.reset(OpMIPS64SRAV) 8543 v.AddArg(x) 8544 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8545 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8546 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8547 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8548 v3.AddArg(y) 8549 v2.AddArg(v3) 8550 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8551 v4.AuxInt = 63 8552 v2.AddArg(v4) 8553 v1.AddArg(v2) 8554 v0.AddArg(v1) 8555 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8556 v5.AddArg(y) 8557 v0.AddArg(v5) 8558 v.AddArg(v0) 8559 return true 8560 } 8561 } 8562 func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool { 8563 b := v.Block 8564 _ = b 8565 typ := &b.Func.Config.Types 8566 _ = typ 8567 // match: (Rsh64x32 <t> x y) 8568 // cond: 8569 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8570 for { 8571 t := v.Type 8572 _ = v.Args[1] 8573 x := v.Args[0] 8574 y := v.Args[1] 8575 v.reset(OpMIPS64SRAV) 8576 v.AddArg(x) 8577 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8578 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8579 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8580 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8581 v3.AddArg(y) 8582 v2.AddArg(v3) 8583 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8584 v4.AuxInt = 63 8585 v2.AddArg(v4) 8586 v1.AddArg(v2) 8587 v0.AddArg(v1) 8588 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8589 v5.AddArg(y) 8590 v0.AddArg(v5) 8591 v.AddArg(v0) 8592 return true 8593 } 8594 } 8595 func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool { 8596 b := v.Block 8597 _ = b 8598 typ := &b.Func.Config.Types 8599 _ = typ 8600 // match: (Rsh64x64 <t> x y) 8601 // cond: 8602 // result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8603 for { 8604 t := v.Type 8605 _ = v.Args[1] 8606 x := v.Args[0] 8607 y := v.Args[1] 8608 v.reset(OpMIPS64SRAV) 8609 v.AddArg(x) 8610 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8611 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8612 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8613 v2.AddArg(y) 8614 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8615 v3.AuxInt = 63 8616 v2.AddArg(v3) 8617 v1.AddArg(v2) 8618 v0.AddArg(v1) 8619 v0.AddArg(y) 8620 v.AddArg(v0) 8621 return true 8622 } 8623 } 8624 func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool { 8625 b := v.Block 8626 _ = b 8627 typ := &b.Func.Config.Types 8628 _ = typ 8629 // match: (Rsh64x8 <t> x y) 8630 // cond: 8631 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8632 for { 8633 t := v.Type 8634 _ = v.Args[1] 8635 x := v.Args[0] 8636 y := v.Args[1] 8637 v.reset(OpMIPS64SRAV) 8638 v.AddArg(x) 8639 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8640 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8641 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8642 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8643 v3.AddArg(y) 8644 v2.AddArg(v3) 8645 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8646 v4.AuxInt = 63 8647 v2.AddArg(v4) 8648 v1.AddArg(v2) 8649 v0.AddArg(v1) 8650 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8651 v5.AddArg(y) 8652 v0.AddArg(v5) 8653 v.AddArg(v0) 8654 return true 8655 } 8656 } 8657 func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool { 8658 b := v.Block 8659 _ = b 8660 typ := &b.Func.Config.Types 8661 _ = typ 8662 // match: (Rsh8Ux16 <t> x y) 8663 // cond: 8664 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y))) 8665 for { 8666 t := v.Type 8667 _ = v.Args[1] 8668 x := v.Args[0] 8669 y := v.Args[1] 8670 v.reset(OpMIPS64AND) 8671 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8672 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8673 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8674 v2.AuxInt = 64 8675 v1.AddArg(v2) 8676 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8677 v3.AddArg(y) 8678 v1.AddArg(v3) 8679 v0.AddArg(v1) 8680 v.AddArg(v0) 8681 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8682 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8683 v5.AddArg(x) 8684 v4.AddArg(v5) 8685 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8686 v6.AddArg(y) 8687 v4.AddArg(v6) 8688 v.AddArg(v4) 8689 return true 8690 } 8691 } 8692 func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool { 8693 b := v.Block 8694 _ = b 8695 typ := &b.Func.Config.Types 8696 _ = typ 8697 // match: (Rsh8Ux32 <t> x y) 8698 // cond: 8699 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y))) 8700 for { 8701 t := v.Type 8702 _ = v.Args[1] 8703 x := v.Args[0] 8704 y := v.Args[1] 8705 v.reset(OpMIPS64AND) 8706 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8707 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8708 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8709 v2.AuxInt = 64 8710 v1.AddArg(v2) 8711 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8712 v3.AddArg(y) 8713 v1.AddArg(v3) 8714 v0.AddArg(v1) 8715 v.AddArg(v0) 8716 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8717 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8718 v5.AddArg(x) 8719 v4.AddArg(v5) 8720 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8721 v6.AddArg(y) 8722 v4.AddArg(v6) 8723 v.AddArg(v4) 8724 return true 8725 } 8726 } 8727 func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool { 8728 b := v.Block 8729 _ = b 8730 typ := &b.Func.Config.Types 8731 _ = typ 8732 // match: (Rsh8Ux64 <t> x y) 8733 // cond: 8734 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y)) 8735 for { 8736 t := v.Type 8737 _ = v.Args[1] 8738 x := v.Args[0] 8739 y := v.Args[1] 8740 v.reset(OpMIPS64AND) 8741 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8742 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8743 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8744 v2.AuxInt = 64 8745 v1.AddArg(v2) 8746 v1.AddArg(y) 8747 v0.AddArg(v1) 8748 v.AddArg(v0) 8749 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8750 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8751 v4.AddArg(x) 8752 v3.AddArg(v4) 8753 v3.AddArg(y) 8754 v.AddArg(v3) 8755 return true 8756 } 8757 } 8758 func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool { 8759 b := v.Block 8760 _ = b 8761 typ := &b.Func.Config.Types 8762 _ = typ 8763 // match: (Rsh8Ux8 <t> x y) 8764 // cond: 8765 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y))) 8766 for { 8767 t := v.Type 8768 _ = v.Args[1] 8769 x := v.Args[0] 8770 y := v.Args[1] 8771 v.reset(OpMIPS64AND) 8772 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8773 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8774 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8775 v2.AuxInt = 64 8776 v1.AddArg(v2) 8777 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8778 v3.AddArg(y) 8779 v1.AddArg(v3) 8780 v0.AddArg(v1) 8781 v.AddArg(v0) 8782 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8783 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8784 v5.AddArg(x) 8785 v4.AddArg(v5) 8786 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8787 v6.AddArg(y) 8788 v4.AddArg(v6) 8789 v.AddArg(v4) 8790 return true 8791 } 8792 } 8793 func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool { 8794 b := v.Block 8795 _ = b 8796 typ := &b.Func.Config.Types 8797 _ = typ 8798 // match: (Rsh8x16 <t> x y) 8799 // cond: 8800 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8801 for { 8802 t := v.Type 8803 _ = v.Args[1] 8804 x := v.Args[0] 8805 y := v.Args[1] 8806 v.reset(OpMIPS64SRAV) 8807 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8808 v0.AddArg(x) 8809 v.AddArg(v0) 8810 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8811 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8812 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8813 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8814 v4.AddArg(y) 8815 v3.AddArg(v4) 8816 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8817 v5.AuxInt = 63 8818 v3.AddArg(v5) 8819 v2.AddArg(v3) 8820 v1.AddArg(v2) 8821 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8822 v6.AddArg(y) 8823 v1.AddArg(v6) 8824 v.AddArg(v1) 8825 return true 8826 } 8827 } 8828 func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool { 8829 b := v.Block 8830 _ = b 8831 typ := &b.Func.Config.Types 8832 _ = typ 8833 // match: (Rsh8x32 <t> x y) 8834 // cond: 8835 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8836 for { 8837 t := v.Type 8838 _ = v.Args[1] 8839 x := v.Args[0] 8840 y := v.Args[1] 8841 v.reset(OpMIPS64SRAV) 8842 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8843 v0.AddArg(x) 8844 v.AddArg(v0) 8845 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8846 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8847 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8848 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8849 v4.AddArg(y) 8850 v3.AddArg(v4) 8851 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8852 v5.AuxInt = 63 8853 v3.AddArg(v5) 8854 v2.AddArg(v3) 8855 v1.AddArg(v2) 8856 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8857 v6.AddArg(y) 8858 v1.AddArg(v6) 8859 v.AddArg(v1) 8860 return true 8861 } 8862 } 8863 func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool { 8864 b := v.Block 8865 _ = b 8866 typ := &b.Func.Config.Types 8867 _ = typ 8868 // match: (Rsh8x64 <t> x y) 8869 // cond: 8870 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8871 for { 8872 t := v.Type 8873 _ = v.Args[1] 8874 x := v.Args[0] 8875 y := v.Args[1] 8876 v.reset(OpMIPS64SRAV) 8877 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8878 v0.AddArg(x) 8879 v.AddArg(v0) 8880 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8881 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8882 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8883 v3.AddArg(y) 8884 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8885 v4.AuxInt = 63 8886 v3.AddArg(v4) 8887 v2.AddArg(v3) 8888 v1.AddArg(v2) 8889 v1.AddArg(y) 8890 v.AddArg(v1) 8891 return true 8892 } 8893 } 8894 func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool { 8895 b := v.Block 8896 _ = b 8897 typ := &b.Func.Config.Types 8898 _ = typ 8899 // match: (Rsh8x8 <t> x y) 8900 // cond: 8901 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8902 for { 8903 t := v.Type 8904 _ = v.Args[1] 8905 x := v.Args[0] 8906 y := v.Args[1] 8907 v.reset(OpMIPS64SRAV) 8908 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8909 v0.AddArg(x) 8910 v.AddArg(v0) 8911 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8912 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8913 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8914 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8915 v4.AddArg(y) 8916 v3.AddArg(v4) 8917 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8918 v5.AuxInt = 63 8919 v3.AddArg(v5) 8920 v2.AddArg(v3) 8921 v1.AddArg(v2) 8922 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8923 v6.AddArg(y) 8924 v1.AddArg(v6) 8925 v.AddArg(v1) 8926 return true 8927 } 8928 } 8929 func rewriteValueMIPS64_OpSelect0_0(v *Value) bool { 8930 // match: (Select0 (DIVVU _ (MOVVconst [1]))) 8931 // cond: 8932 // result: (MOVVconst [0]) 8933 for { 8934 v_0 := v.Args[0] 8935 if v_0.Op != OpMIPS64DIVVU { 8936 break 8937 } 8938 _ = v_0.Args[1] 8939 v_0_1 := v_0.Args[1] 8940 if v_0_1.Op != OpMIPS64MOVVconst { 8941 break 8942 } 8943 if v_0_1.AuxInt != 1 { 8944 break 8945 } 8946 v.reset(OpMIPS64MOVVconst) 8947 v.AuxInt = 0 8948 return true 8949 } 8950 // match: (Select0 (DIVVU x (MOVVconst [c]))) 8951 // cond: isPowerOfTwo(c) 8952 // result: (ANDconst [c-1] x) 8953 for { 8954 v_0 := v.Args[0] 8955 if v_0.Op != OpMIPS64DIVVU { 8956 break 8957 } 8958 _ = v_0.Args[1] 8959 x := v_0.Args[0] 8960 v_0_1 := v_0.Args[1] 8961 if v_0_1.Op != OpMIPS64MOVVconst { 8962 break 8963 } 8964 c := v_0_1.AuxInt 8965 if !(isPowerOfTwo(c)) { 8966 break 8967 } 8968 v.reset(OpMIPS64ANDconst) 8969 v.AuxInt = c - 1 8970 v.AddArg(x) 8971 return true 8972 } 8973 // match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 8974 // cond: 8975 // result: (MOVVconst [c%d]) 8976 for { 8977 v_0 := v.Args[0] 8978 if v_0.Op != OpMIPS64DIVV { 8979 break 8980 } 8981 _ = v_0.Args[1] 8982 v_0_0 := v_0.Args[0] 8983 if v_0_0.Op != OpMIPS64MOVVconst { 8984 break 8985 } 8986 c := v_0_0.AuxInt 8987 v_0_1 := v_0.Args[1] 8988 if v_0_1.Op != OpMIPS64MOVVconst { 8989 break 8990 } 8991 d := v_0_1.AuxInt 8992 v.reset(OpMIPS64MOVVconst) 8993 v.AuxInt = c % d 8994 return true 8995 } 8996 // match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 8997 // cond: 8998 // result: (MOVVconst [int64(uint64(c)%uint64(d))]) 8999 for { 9000 v_0 := v.Args[0] 9001 if v_0.Op != OpMIPS64DIVVU { 9002 break 9003 } 9004 _ = v_0.Args[1] 9005 v_0_0 := v_0.Args[0] 9006 if v_0_0.Op != OpMIPS64MOVVconst { 9007 break 9008 } 9009 c := v_0_0.AuxInt 9010 v_0_1 := v_0.Args[1] 9011 if v_0_1.Op != OpMIPS64MOVVconst { 9012 break 9013 } 9014 d := v_0_1.AuxInt 9015 v.reset(OpMIPS64MOVVconst) 9016 v.AuxInt = int64(uint64(c) % uint64(d)) 9017 return true 9018 } 9019 return false 9020 } 9021 func rewriteValueMIPS64_OpSelect1_0(v *Value) bool { 9022 // match: (Select1 (MULVU x (MOVVconst [-1]))) 9023 // cond: 9024 // result: (NEGV x) 9025 for { 9026 v_0 := v.Args[0] 9027 if v_0.Op != OpMIPS64MULVU { 9028 break 9029 } 9030 _ = v_0.Args[1] 9031 x := v_0.Args[0] 9032 v_0_1 := v_0.Args[1] 9033 if v_0_1.Op != OpMIPS64MOVVconst { 9034 break 9035 } 9036 if v_0_1.AuxInt != -1 { 9037 break 9038 } 9039 v.reset(OpMIPS64NEGV) 9040 v.AddArg(x) 9041 return true 9042 } 9043 // match: (Select1 (MULVU (MOVVconst [-1]) x)) 9044 // cond: 9045 // result: (NEGV x) 9046 for { 9047 v_0 := v.Args[0] 9048 if v_0.Op != OpMIPS64MULVU { 9049 break 9050 } 9051 _ = v_0.Args[1] 9052 v_0_0 := v_0.Args[0] 9053 if v_0_0.Op != OpMIPS64MOVVconst { 9054 break 9055 } 9056 if v_0_0.AuxInt != -1 { 9057 break 9058 } 9059 x := v_0.Args[1] 9060 v.reset(OpMIPS64NEGV) 9061 v.AddArg(x) 9062 return true 9063 } 9064 // match: (Select1 (MULVU _ (MOVVconst [0]))) 9065 // cond: 9066 // result: (MOVVconst [0]) 9067 for { 9068 v_0 := v.Args[0] 9069 if v_0.Op != OpMIPS64MULVU { 9070 break 9071 } 9072 _ = v_0.Args[1] 9073 v_0_1 := v_0.Args[1] 9074 if v_0_1.Op != OpMIPS64MOVVconst { 9075 break 9076 } 9077 if v_0_1.AuxInt != 0 { 9078 break 9079 } 9080 v.reset(OpMIPS64MOVVconst) 9081 v.AuxInt = 0 9082 return true 9083 } 9084 // match: (Select1 (MULVU (MOVVconst [0]) _)) 9085 // cond: 9086 // result: (MOVVconst [0]) 9087 for { 9088 v_0 := v.Args[0] 9089 if v_0.Op != OpMIPS64MULVU { 9090 break 9091 } 9092 _ = v_0.Args[1] 9093 v_0_0 := v_0.Args[0] 9094 if v_0_0.Op != OpMIPS64MOVVconst { 9095 break 9096 } 9097 if v_0_0.AuxInt != 0 { 9098 break 9099 } 9100 v.reset(OpMIPS64MOVVconst) 9101 v.AuxInt = 0 9102 return true 9103 } 9104 // match: (Select1 (MULVU x (MOVVconst [1]))) 9105 // cond: 9106 // result: x 9107 for { 9108 v_0 := v.Args[0] 9109 if v_0.Op != OpMIPS64MULVU { 9110 break 9111 } 9112 _ = v_0.Args[1] 9113 x := v_0.Args[0] 9114 v_0_1 := v_0.Args[1] 9115 if v_0_1.Op != OpMIPS64MOVVconst { 9116 break 9117 } 9118 if v_0_1.AuxInt != 1 { 9119 break 9120 } 9121 v.reset(OpCopy) 9122 v.Type = x.Type 9123 v.AddArg(x) 9124 return true 9125 } 9126 // match: (Select1 (MULVU (MOVVconst [1]) x)) 9127 // cond: 9128 // result: x 9129 for { 9130 v_0 := v.Args[0] 9131 if v_0.Op != OpMIPS64MULVU { 9132 break 9133 } 9134 _ = v_0.Args[1] 9135 v_0_0 := v_0.Args[0] 9136 if v_0_0.Op != OpMIPS64MOVVconst { 9137 break 9138 } 9139 if v_0_0.AuxInt != 1 { 9140 break 9141 } 9142 x := v_0.Args[1] 9143 v.reset(OpCopy) 9144 v.Type = x.Type 9145 v.AddArg(x) 9146 return true 9147 } 9148 // match: (Select1 (MULVU x (MOVVconst [c]))) 9149 // cond: isPowerOfTwo(c) 9150 // result: (SLLVconst [log2(c)] x) 9151 for { 9152 v_0 := v.Args[0] 9153 if v_0.Op != OpMIPS64MULVU { 9154 break 9155 } 9156 _ = v_0.Args[1] 9157 x := v_0.Args[0] 9158 v_0_1 := v_0.Args[1] 9159 if v_0_1.Op != OpMIPS64MOVVconst { 9160 break 9161 } 9162 c := v_0_1.AuxInt 9163 if !(isPowerOfTwo(c)) { 9164 break 9165 } 9166 v.reset(OpMIPS64SLLVconst) 9167 v.AuxInt = log2(c) 9168 v.AddArg(x) 9169 return true 9170 } 9171 // match: (Select1 (MULVU (MOVVconst [c]) x)) 9172 // cond: isPowerOfTwo(c) 9173 // result: (SLLVconst [log2(c)] x) 9174 for { 9175 v_0 := v.Args[0] 9176 if v_0.Op != OpMIPS64MULVU { 9177 break 9178 } 9179 _ = v_0.Args[1] 9180 v_0_0 := v_0.Args[0] 9181 if v_0_0.Op != OpMIPS64MOVVconst { 9182 break 9183 } 9184 c := v_0_0.AuxInt 9185 x := v_0.Args[1] 9186 if !(isPowerOfTwo(c)) { 9187 break 9188 } 9189 v.reset(OpMIPS64SLLVconst) 9190 v.AuxInt = log2(c) 9191 v.AddArg(x) 9192 return true 9193 } 9194 // match: (Select1 (MULVU (MOVVconst [-1]) x)) 9195 // cond: 9196 // result: (NEGV x) 9197 for { 9198 v_0 := v.Args[0] 9199 if v_0.Op != OpMIPS64MULVU { 9200 break 9201 } 9202 _ = v_0.Args[1] 9203 v_0_0 := v_0.Args[0] 9204 if v_0_0.Op != OpMIPS64MOVVconst { 9205 break 9206 } 9207 if v_0_0.AuxInt != -1 { 9208 break 9209 } 9210 x := v_0.Args[1] 9211 v.reset(OpMIPS64NEGV) 9212 v.AddArg(x) 9213 return true 9214 } 9215 // match: (Select1 (MULVU x (MOVVconst [-1]))) 9216 // cond: 9217 // result: (NEGV x) 9218 for { 9219 v_0 := v.Args[0] 9220 if v_0.Op != OpMIPS64MULVU { 9221 break 9222 } 9223 _ = v_0.Args[1] 9224 x := v_0.Args[0] 9225 v_0_1 := v_0.Args[1] 9226 if v_0_1.Op != OpMIPS64MOVVconst { 9227 break 9228 } 9229 if v_0_1.AuxInt != -1 { 9230 break 9231 } 9232 v.reset(OpMIPS64NEGV) 9233 v.AddArg(x) 9234 return true 9235 } 9236 return false 9237 } 9238 func rewriteValueMIPS64_OpSelect1_10(v *Value) bool { 9239 // match: (Select1 (MULVU (MOVVconst [0]) _)) 9240 // cond: 9241 // result: (MOVVconst [0]) 9242 for { 9243 v_0 := v.Args[0] 9244 if v_0.Op != OpMIPS64MULVU { 9245 break 9246 } 9247 _ = v_0.Args[1] 9248 v_0_0 := v_0.Args[0] 9249 if v_0_0.Op != OpMIPS64MOVVconst { 9250 break 9251 } 9252 if v_0_0.AuxInt != 0 { 9253 break 9254 } 9255 v.reset(OpMIPS64MOVVconst) 9256 v.AuxInt = 0 9257 return true 9258 } 9259 // match: (Select1 (MULVU _ (MOVVconst [0]))) 9260 // cond: 9261 // result: (MOVVconst [0]) 9262 for { 9263 v_0 := v.Args[0] 9264 if v_0.Op != OpMIPS64MULVU { 9265 break 9266 } 9267 _ = v_0.Args[1] 9268 v_0_1 := v_0.Args[1] 9269 if v_0_1.Op != OpMIPS64MOVVconst { 9270 break 9271 } 9272 if v_0_1.AuxInt != 0 { 9273 break 9274 } 9275 v.reset(OpMIPS64MOVVconst) 9276 v.AuxInt = 0 9277 return true 9278 } 9279 // match: (Select1 (MULVU (MOVVconst [1]) x)) 9280 // cond: 9281 // result: x 9282 for { 9283 v_0 := v.Args[0] 9284 if v_0.Op != OpMIPS64MULVU { 9285 break 9286 } 9287 _ = v_0.Args[1] 9288 v_0_0 := v_0.Args[0] 9289 if v_0_0.Op != OpMIPS64MOVVconst { 9290 break 9291 } 9292 if v_0_0.AuxInt != 1 { 9293 break 9294 } 9295 x := v_0.Args[1] 9296 v.reset(OpCopy) 9297 v.Type = x.Type 9298 v.AddArg(x) 9299 return true 9300 } 9301 // match: (Select1 (MULVU x (MOVVconst [1]))) 9302 // cond: 9303 // result: x 9304 for { 9305 v_0 := v.Args[0] 9306 if v_0.Op != OpMIPS64MULVU { 9307 break 9308 } 9309 _ = v_0.Args[1] 9310 x := v_0.Args[0] 9311 v_0_1 := v_0.Args[1] 9312 if v_0_1.Op != OpMIPS64MOVVconst { 9313 break 9314 } 9315 if v_0_1.AuxInt != 1 { 9316 break 9317 } 9318 v.reset(OpCopy) 9319 v.Type = x.Type 9320 v.AddArg(x) 9321 return true 9322 } 9323 // match: (Select1 (MULVU (MOVVconst [c]) x)) 9324 // cond: isPowerOfTwo(c) 9325 // result: (SLLVconst [log2(c)] x) 9326 for { 9327 v_0 := v.Args[0] 9328 if v_0.Op != OpMIPS64MULVU { 9329 break 9330 } 9331 _ = v_0.Args[1] 9332 v_0_0 := v_0.Args[0] 9333 if v_0_0.Op != OpMIPS64MOVVconst { 9334 break 9335 } 9336 c := v_0_0.AuxInt 9337 x := v_0.Args[1] 9338 if !(isPowerOfTwo(c)) { 9339 break 9340 } 9341 v.reset(OpMIPS64SLLVconst) 9342 v.AuxInt = log2(c) 9343 v.AddArg(x) 9344 return true 9345 } 9346 // match: (Select1 (MULVU x (MOVVconst [c]))) 9347 // cond: isPowerOfTwo(c) 9348 // result: (SLLVconst [log2(c)] x) 9349 for { 9350 v_0 := v.Args[0] 9351 if v_0.Op != OpMIPS64MULVU { 9352 break 9353 } 9354 _ = v_0.Args[1] 9355 x := v_0.Args[0] 9356 v_0_1 := v_0.Args[1] 9357 if v_0_1.Op != OpMIPS64MOVVconst { 9358 break 9359 } 9360 c := v_0_1.AuxInt 9361 if !(isPowerOfTwo(c)) { 9362 break 9363 } 9364 v.reset(OpMIPS64SLLVconst) 9365 v.AuxInt = log2(c) 9366 v.AddArg(x) 9367 return true 9368 } 9369 // match: (Select1 (DIVVU x (MOVVconst [1]))) 9370 // cond: 9371 // result: x 9372 for { 9373 v_0 := v.Args[0] 9374 if v_0.Op != OpMIPS64DIVVU { 9375 break 9376 } 9377 _ = v_0.Args[1] 9378 x := v_0.Args[0] 9379 v_0_1 := v_0.Args[1] 9380 if v_0_1.Op != OpMIPS64MOVVconst { 9381 break 9382 } 9383 if v_0_1.AuxInt != 1 { 9384 break 9385 } 9386 v.reset(OpCopy) 9387 v.Type = x.Type 9388 v.AddArg(x) 9389 return true 9390 } 9391 // match: (Select1 (DIVVU x (MOVVconst [c]))) 9392 // cond: isPowerOfTwo(c) 9393 // result: (SRLVconst [log2(c)] x) 9394 for { 9395 v_0 := v.Args[0] 9396 if v_0.Op != OpMIPS64DIVVU { 9397 break 9398 } 9399 _ = v_0.Args[1] 9400 x := v_0.Args[0] 9401 v_0_1 := v_0.Args[1] 9402 if v_0_1.Op != OpMIPS64MOVVconst { 9403 break 9404 } 9405 c := v_0_1.AuxInt 9406 if !(isPowerOfTwo(c)) { 9407 break 9408 } 9409 v.reset(OpMIPS64SRLVconst) 9410 v.AuxInt = log2(c) 9411 v.AddArg(x) 9412 return true 9413 } 9414 // match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d]))) 9415 // cond: 9416 // result: (MOVVconst [c*d]) 9417 for { 9418 v_0 := v.Args[0] 9419 if v_0.Op != OpMIPS64MULVU { 9420 break 9421 } 9422 _ = v_0.Args[1] 9423 v_0_0 := v_0.Args[0] 9424 if v_0_0.Op != OpMIPS64MOVVconst { 9425 break 9426 } 9427 c := v_0_0.AuxInt 9428 v_0_1 := v_0.Args[1] 9429 if v_0_1.Op != OpMIPS64MOVVconst { 9430 break 9431 } 9432 d := v_0_1.AuxInt 9433 v.reset(OpMIPS64MOVVconst) 9434 v.AuxInt = c * d 9435 return true 9436 } 9437 // match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c]))) 9438 // cond: 9439 // result: (MOVVconst [c*d]) 9440 for { 9441 v_0 := v.Args[0] 9442 if v_0.Op != OpMIPS64MULVU { 9443 break 9444 } 9445 _ = v_0.Args[1] 9446 v_0_0 := v_0.Args[0] 9447 if v_0_0.Op != OpMIPS64MOVVconst { 9448 break 9449 } 9450 d := v_0_0.AuxInt 9451 v_0_1 := v_0.Args[1] 9452 if v_0_1.Op != OpMIPS64MOVVconst { 9453 break 9454 } 9455 c := v_0_1.AuxInt 9456 v.reset(OpMIPS64MOVVconst) 9457 v.AuxInt = c * d 9458 return true 9459 } 9460 return false 9461 } 9462 func rewriteValueMIPS64_OpSelect1_20(v *Value) bool { 9463 // match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 9464 // cond: 9465 // result: (MOVVconst [c/d]) 9466 for { 9467 v_0 := v.Args[0] 9468 if v_0.Op != OpMIPS64DIVV { 9469 break 9470 } 9471 _ = v_0.Args[1] 9472 v_0_0 := v_0.Args[0] 9473 if v_0_0.Op != OpMIPS64MOVVconst { 9474 break 9475 } 9476 c := v_0_0.AuxInt 9477 v_0_1 := v_0.Args[1] 9478 if v_0_1.Op != OpMIPS64MOVVconst { 9479 break 9480 } 9481 d := v_0_1.AuxInt 9482 v.reset(OpMIPS64MOVVconst) 9483 v.AuxInt = c / d 9484 return true 9485 } 9486 // match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 9487 // cond: 9488 // result: (MOVVconst [int64(uint64(c)/uint64(d))]) 9489 for { 9490 v_0 := v.Args[0] 9491 if v_0.Op != OpMIPS64DIVVU { 9492 break 9493 } 9494 _ = v_0.Args[1] 9495 v_0_0 := v_0.Args[0] 9496 if v_0_0.Op != OpMIPS64MOVVconst { 9497 break 9498 } 9499 c := v_0_0.AuxInt 9500 v_0_1 := v_0.Args[1] 9501 if v_0_1.Op != OpMIPS64MOVVconst { 9502 break 9503 } 9504 d := v_0_1.AuxInt 9505 v.reset(OpMIPS64MOVVconst) 9506 v.AuxInt = int64(uint64(c) / uint64(d)) 9507 return true 9508 } 9509 return false 9510 } 9511 func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool { 9512 // match: (SignExt16to32 x) 9513 // cond: 9514 // result: (MOVHreg x) 9515 for { 9516 x := v.Args[0] 9517 v.reset(OpMIPS64MOVHreg) 9518 v.AddArg(x) 9519 return true 9520 } 9521 } 9522 func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool { 9523 // match: (SignExt16to64 x) 9524 // cond: 9525 // result: (MOVHreg x) 9526 for { 9527 x := v.Args[0] 9528 v.reset(OpMIPS64MOVHreg) 9529 v.AddArg(x) 9530 return true 9531 } 9532 } 9533 func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool { 9534 // match: (SignExt32to64 x) 9535 // cond: 9536 // result: (MOVWreg x) 9537 for { 9538 x := v.Args[0] 9539 v.reset(OpMIPS64MOVWreg) 9540 v.AddArg(x) 9541 return true 9542 } 9543 } 9544 func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool { 9545 // match: (SignExt8to16 x) 9546 // cond: 9547 // result: (MOVBreg x) 9548 for { 9549 x := v.Args[0] 9550 v.reset(OpMIPS64MOVBreg) 9551 v.AddArg(x) 9552 return true 9553 } 9554 } 9555 func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool { 9556 // match: (SignExt8to32 x) 9557 // cond: 9558 // result: (MOVBreg x) 9559 for { 9560 x := v.Args[0] 9561 v.reset(OpMIPS64MOVBreg) 9562 v.AddArg(x) 9563 return true 9564 } 9565 } 9566 func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool { 9567 // match: (SignExt8to64 x) 9568 // cond: 9569 // result: (MOVBreg x) 9570 for { 9571 x := v.Args[0] 9572 v.reset(OpMIPS64MOVBreg) 9573 v.AddArg(x) 9574 return true 9575 } 9576 } 9577 func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool { 9578 b := v.Block 9579 _ = b 9580 // match: (Slicemask <t> x) 9581 // cond: 9582 // result: (SRAVconst (NEGV <t> x) [63]) 9583 for { 9584 t := v.Type 9585 x := v.Args[0] 9586 v.reset(OpMIPS64SRAVconst) 9587 v.AuxInt = 63 9588 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 9589 v0.AddArg(x) 9590 v.AddArg(v0) 9591 return true 9592 } 9593 } 9594 func rewriteValueMIPS64_OpSqrt_0(v *Value) bool { 9595 // match: (Sqrt x) 9596 // cond: 9597 // result: (SQRTD x) 9598 for { 9599 x := v.Args[0] 9600 v.reset(OpMIPS64SQRTD) 9601 v.AddArg(x) 9602 return true 9603 } 9604 } 9605 func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool { 9606 // match: (StaticCall [argwid] {target} mem) 9607 // cond: 9608 // result: (CALLstatic [argwid] {target} mem) 9609 for { 9610 argwid := v.AuxInt 9611 target := v.Aux 9612 mem := v.Args[0] 9613 v.reset(OpMIPS64CALLstatic) 9614 v.AuxInt = argwid 9615 v.Aux = target 9616 v.AddArg(mem) 9617 return true 9618 } 9619 } 9620 func rewriteValueMIPS64_OpStore_0(v *Value) bool { 9621 // match: (Store {t} ptr val mem) 9622 // cond: t.(*types.Type).Size() == 1 9623 // result: (MOVBstore ptr val mem) 9624 for { 9625 t := v.Aux 9626 _ = v.Args[2] 9627 ptr := v.Args[0] 9628 val := v.Args[1] 9629 mem := v.Args[2] 9630 if !(t.(*types.Type).Size() == 1) { 9631 break 9632 } 9633 v.reset(OpMIPS64MOVBstore) 9634 v.AddArg(ptr) 9635 v.AddArg(val) 9636 v.AddArg(mem) 9637 return true 9638 } 9639 // match: (Store {t} ptr val mem) 9640 // cond: t.(*types.Type).Size() == 2 9641 // result: (MOVHstore ptr val mem) 9642 for { 9643 t := v.Aux 9644 _ = v.Args[2] 9645 ptr := v.Args[0] 9646 val := v.Args[1] 9647 mem := v.Args[2] 9648 if !(t.(*types.Type).Size() == 2) { 9649 break 9650 } 9651 v.reset(OpMIPS64MOVHstore) 9652 v.AddArg(ptr) 9653 v.AddArg(val) 9654 v.AddArg(mem) 9655 return true 9656 } 9657 // match: (Store {t} ptr val mem) 9658 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 9659 // result: (MOVWstore ptr val mem) 9660 for { 9661 t := v.Aux 9662 _ = v.Args[2] 9663 ptr := v.Args[0] 9664 val := v.Args[1] 9665 mem := v.Args[2] 9666 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 9667 break 9668 } 9669 v.reset(OpMIPS64MOVWstore) 9670 v.AddArg(ptr) 9671 v.AddArg(val) 9672 v.AddArg(mem) 9673 return true 9674 } 9675 // match: (Store {t} ptr val mem) 9676 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 9677 // result: (MOVVstore ptr val mem) 9678 for { 9679 t := v.Aux 9680 _ = v.Args[2] 9681 ptr := v.Args[0] 9682 val := v.Args[1] 9683 mem := v.Args[2] 9684 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 9685 break 9686 } 9687 v.reset(OpMIPS64MOVVstore) 9688 v.AddArg(ptr) 9689 v.AddArg(val) 9690 v.AddArg(mem) 9691 return true 9692 } 9693 // match: (Store {t} ptr val mem) 9694 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 9695 // result: (MOVFstore ptr val mem) 9696 for { 9697 t := v.Aux 9698 _ = v.Args[2] 9699 ptr := v.Args[0] 9700 val := v.Args[1] 9701 mem := v.Args[2] 9702 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 9703 break 9704 } 9705 v.reset(OpMIPS64MOVFstore) 9706 v.AddArg(ptr) 9707 v.AddArg(val) 9708 v.AddArg(mem) 9709 return true 9710 } 9711 // match: (Store {t} ptr val mem) 9712 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 9713 // result: (MOVDstore ptr val mem) 9714 for { 9715 t := v.Aux 9716 _ = v.Args[2] 9717 ptr := v.Args[0] 9718 val := v.Args[1] 9719 mem := v.Args[2] 9720 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 9721 break 9722 } 9723 v.reset(OpMIPS64MOVDstore) 9724 v.AddArg(ptr) 9725 v.AddArg(val) 9726 v.AddArg(mem) 9727 return true 9728 } 9729 return false 9730 } 9731 func rewriteValueMIPS64_OpSub16_0(v *Value) bool { 9732 // match: (Sub16 x y) 9733 // cond: 9734 // result: (SUBV x y) 9735 for { 9736 _ = v.Args[1] 9737 x := v.Args[0] 9738 y := v.Args[1] 9739 v.reset(OpMIPS64SUBV) 9740 v.AddArg(x) 9741 v.AddArg(y) 9742 return true 9743 } 9744 } 9745 func rewriteValueMIPS64_OpSub32_0(v *Value) bool { 9746 // match: (Sub32 x y) 9747 // cond: 9748 // result: (SUBV x y) 9749 for { 9750 _ = v.Args[1] 9751 x := v.Args[0] 9752 y := v.Args[1] 9753 v.reset(OpMIPS64SUBV) 9754 v.AddArg(x) 9755 v.AddArg(y) 9756 return true 9757 } 9758 } 9759 func rewriteValueMIPS64_OpSub32F_0(v *Value) bool { 9760 // match: (Sub32F x y) 9761 // cond: 9762 // result: (SUBF x y) 9763 for { 9764 _ = v.Args[1] 9765 x := v.Args[0] 9766 y := v.Args[1] 9767 v.reset(OpMIPS64SUBF) 9768 v.AddArg(x) 9769 v.AddArg(y) 9770 return true 9771 } 9772 } 9773 func rewriteValueMIPS64_OpSub64_0(v *Value) bool { 9774 // match: (Sub64 x y) 9775 // cond: 9776 // result: (SUBV x y) 9777 for { 9778 _ = v.Args[1] 9779 x := v.Args[0] 9780 y := v.Args[1] 9781 v.reset(OpMIPS64SUBV) 9782 v.AddArg(x) 9783 v.AddArg(y) 9784 return true 9785 } 9786 } 9787 func rewriteValueMIPS64_OpSub64F_0(v *Value) bool { 9788 // match: (Sub64F x y) 9789 // cond: 9790 // result: (SUBD x y) 9791 for { 9792 _ = v.Args[1] 9793 x := v.Args[0] 9794 y := v.Args[1] 9795 v.reset(OpMIPS64SUBD) 9796 v.AddArg(x) 9797 v.AddArg(y) 9798 return true 9799 } 9800 } 9801 func rewriteValueMIPS64_OpSub8_0(v *Value) bool { 9802 // match: (Sub8 x y) 9803 // cond: 9804 // result: (SUBV x y) 9805 for { 9806 _ = v.Args[1] 9807 x := v.Args[0] 9808 y := v.Args[1] 9809 v.reset(OpMIPS64SUBV) 9810 v.AddArg(x) 9811 v.AddArg(y) 9812 return true 9813 } 9814 } 9815 func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool { 9816 // match: (SubPtr x y) 9817 // cond: 9818 // result: (SUBV x y) 9819 for { 9820 _ = v.Args[1] 9821 x := v.Args[0] 9822 y := v.Args[1] 9823 v.reset(OpMIPS64SUBV) 9824 v.AddArg(x) 9825 v.AddArg(y) 9826 return true 9827 } 9828 } 9829 func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool { 9830 // match: (Trunc16to8 x) 9831 // cond: 9832 // result: x 9833 for { 9834 x := v.Args[0] 9835 v.reset(OpCopy) 9836 v.Type = x.Type 9837 v.AddArg(x) 9838 return true 9839 } 9840 } 9841 func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool { 9842 // match: (Trunc32to16 x) 9843 // cond: 9844 // result: x 9845 for { 9846 x := v.Args[0] 9847 v.reset(OpCopy) 9848 v.Type = x.Type 9849 v.AddArg(x) 9850 return true 9851 } 9852 } 9853 func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool { 9854 // match: (Trunc32to8 x) 9855 // cond: 9856 // result: x 9857 for { 9858 x := v.Args[0] 9859 v.reset(OpCopy) 9860 v.Type = x.Type 9861 v.AddArg(x) 9862 return true 9863 } 9864 } 9865 func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool { 9866 // match: (Trunc64to16 x) 9867 // cond: 9868 // result: x 9869 for { 9870 x := v.Args[0] 9871 v.reset(OpCopy) 9872 v.Type = x.Type 9873 v.AddArg(x) 9874 return true 9875 } 9876 } 9877 func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool { 9878 // match: (Trunc64to32 x) 9879 // cond: 9880 // result: x 9881 for { 9882 x := v.Args[0] 9883 v.reset(OpCopy) 9884 v.Type = x.Type 9885 v.AddArg(x) 9886 return true 9887 } 9888 } 9889 func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool { 9890 // match: (Trunc64to8 x) 9891 // cond: 9892 // result: x 9893 for { 9894 x := v.Args[0] 9895 v.reset(OpCopy) 9896 v.Type = x.Type 9897 v.AddArg(x) 9898 return true 9899 } 9900 } 9901 func rewriteValueMIPS64_OpWB_0(v *Value) bool { 9902 // match: (WB {fn} destptr srcptr mem) 9903 // cond: 9904 // result: (LoweredWB {fn} destptr srcptr mem) 9905 for { 9906 fn := v.Aux 9907 _ = v.Args[2] 9908 destptr := v.Args[0] 9909 srcptr := v.Args[1] 9910 mem := v.Args[2] 9911 v.reset(OpMIPS64LoweredWB) 9912 v.Aux = fn 9913 v.AddArg(destptr) 9914 v.AddArg(srcptr) 9915 v.AddArg(mem) 9916 return true 9917 } 9918 } 9919 func rewriteValueMIPS64_OpXor16_0(v *Value) bool { 9920 // match: (Xor16 x y) 9921 // cond: 9922 // result: (XOR x y) 9923 for { 9924 _ = v.Args[1] 9925 x := v.Args[0] 9926 y := v.Args[1] 9927 v.reset(OpMIPS64XOR) 9928 v.AddArg(x) 9929 v.AddArg(y) 9930 return true 9931 } 9932 } 9933 func rewriteValueMIPS64_OpXor32_0(v *Value) bool { 9934 // match: (Xor32 x y) 9935 // cond: 9936 // result: (XOR x y) 9937 for { 9938 _ = v.Args[1] 9939 x := v.Args[0] 9940 y := v.Args[1] 9941 v.reset(OpMIPS64XOR) 9942 v.AddArg(x) 9943 v.AddArg(y) 9944 return true 9945 } 9946 } 9947 func rewriteValueMIPS64_OpXor64_0(v *Value) bool { 9948 // match: (Xor64 x y) 9949 // cond: 9950 // result: (XOR x y) 9951 for { 9952 _ = v.Args[1] 9953 x := v.Args[0] 9954 y := v.Args[1] 9955 v.reset(OpMIPS64XOR) 9956 v.AddArg(x) 9957 v.AddArg(y) 9958 return true 9959 } 9960 } 9961 func rewriteValueMIPS64_OpXor8_0(v *Value) bool { 9962 // match: (Xor8 x y) 9963 // cond: 9964 // result: (XOR x y) 9965 for { 9966 _ = v.Args[1] 9967 x := v.Args[0] 9968 y := v.Args[1] 9969 v.reset(OpMIPS64XOR) 9970 v.AddArg(x) 9971 v.AddArg(y) 9972 return true 9973 } 9974 } 9975 func rewriteValueMIPS64_OpZero_0(v *Value) bool { 9976 b := v.Block 9977 _ = b 9978 typ := &b.Func.Config.Types 9979 _ = typ 9980 // match: (Zero [0] _ mem) 9981 // cond: 9982 // result: mem 9983 for { 9984 if v.AuxInt != 0 { 9985 break 9986 } 9987 _ = v.Args[1] 9988 mem := v.Args[1] 9989 v.reset(OpCopy) 9990 v.Type = mem.Type 9991 v.AddArg(mem) 9992 return true 9993 } 9994 // match: (Zero [1] ptr mem) 9995 // cond: 9996 // result: (MOVBstore ptr (MOVVconst [0]) mem) 9997 for { 9998 if v.AuxInt != 1 { 9999 break 10000 } 10001 _ = v.Args[1] 10002 ptr := v.Args[0] 10003 mem := v.Args[1] 10004 v.reset(OpMIPS64MOVBstore) 10005 v.AddArg(ptr) 10006 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10007 v0.AuxInt = 0 10008 v.AddArg(v0) 10009 v.AddArg(mem) 10010 return true 10011 } 10012 // match: (Zero [2] {t} ptr mem) 10013 // cond: t.(*types.Type).Alignment()%2 == 0 10014 // result: (MOVHstore ptr (MOVVconst [0]) mem) 10015 for { 10016 if v.AuxInt != 2 { 10017 break 10018 } 10019 t := v.Aux 10020 _ = v.Args[1] 10021 ptr := v.Args[0] 10022 mem := v.Args[1] 10023 if !(t.(*types.Type).Alignment()%2 == 0) { 10024 break 10025 } 10026 v.reset(OpMIPS64MOVHstore) 10027 v.AddArg(ptr) 10028 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10029 v0.AuxInt = 0 10030 v.AddArg(v0) 10031 v.AddArg(mem) 10032 return true 10033 } 10034 // match: (Zero [2] ptr mem) 10035 // cond: 10036 // result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)) 10037 for { 10038 if v.AuxInt != 2 { 10039 break 10040 } 10041 _ = v.Args[1] 10042 ptr := v.Args[0] 10043 mem := v.Args[1] 10044 v.reset(OpMIPS64MOVBstore) 10045 v.AuxInt = 1 10046 v.AddArg(ptr) 10047 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10048 v0.AuxInt = 0 10049 v.AddArg(v0) 10050 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10051 v1.AuxInt = 0 10052 v1.AddArg(ptr) 10053 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10054 v2.AuxInt = 0 10055 v1.AddArg(v2) 10056 v1.AddArg(mem) 10057 v.AddArg(v1) 10058 return true 10059 } 10060 // match: (Zero [4] {t} ptr mem) 10061 // cond: t.(*types.Type).Alignment()%4 == 0 10062 // result: (MOVWstore ptr (MOVVconst [0]) mem) 10063 for { 10064 if v.AuxInt != 4 { 10065 break 10066 } 10067 t := v.Aux 10068 _ = v.Args[1] 10069 ptr := v.Args[0] 10070 mem := v.Args[1] 10071 if !(t.(*types.Type).Alignment()%4 == 0) { 10072 break 10073 } 10074 v.reset(OpMIPS64MOVWstore) 10075 v.AddArg(ptr) 10076 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10077 v0.AuxInt = 0 10078 v.AddArg(v0) 10079 v.AddArg(mem) 10080 return true 10081 } 10082 // match: (Zero [4] {t} ptr mem) 10083 // cond: t.(*types.Type).Alignment()%2 == 0 10084 // result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)) 10085 for { 10086 if v.AuxInt != 4 { 10087 break 10088 } 10089 t := v.Aux 10090 _ = v.Args[1] 10091 ptr := v.Args[0] 10092 mem := v.Args[1] 10093 if !(t.(*types.Type).Alignment()%2 == 0) { 10094 break 10095 } 10096 v.reset(OpMIPS64MOVHstore) 10097 v.AuxInt = 2 10098 v.AddArg(ptr) 10099 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10100 v0.AuxInt = 0 10101 v.AddArg(v0) 10102 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10103 v1.AuxInt = 0 10104 v1.AddArg(ptr) 10105 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10106 v2.AuxInt = 0 10107 v1.AddArg(v2) 10108 v1.AddArg(mem) 10109 v.AddArg(v1) 10110 return true 10111 } 10112 // match: (Zero [4] ptr mem) 10113 // cond: 10114 // result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))) 10115 for { 10116 if v.AuxInt != 4 { 10117 break 10118 } 10119 _ = v.Args[1] 10120 ptr := v.Args[0] 10121 mem := v.Args[1] 10122 v.reset(OpMIPS64MOVBstore) 10123 v.AuxInt = 3 10124 v.AddArg(ptr) 10125 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10126 v0.AuxInt = 0 10127 v.AddArg(v0) 10128 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10129 v1.AuxInt = 2 10130 v1.AddArg(ptr) 10131 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10132 v2.AuxInt = 0 10133 v1.AddArg(v2) 10134 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10135 v3.AuxInt = 1 10136 v3.AddArg(ptr) 10137 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10138 v4.AuxInt = 0 10139 v3.AddArg(v4) 10140 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10141 v5.AuxInt = 0 10142 v5.AddArg(ptr) 10143 v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10144 v6.AuxInt = 0 10145 v5.AddArg(v6) 10146 v5.AddArg(mem) 10147 v3.AddArg(v5) 10148 v1.AddArg(v3) 10149 v.AddArg(v1) 10150 return true 10151 } 10152 // match: (Zero [8] {t} ptr mem) 10153 // cond: t.(*types.Type).Alignment()%8 == 0 10154 // result: (MOVVstore ptr (MOVVconst [0]) mem) 10155 for { 10156 if v.AuxInt != 8 { 10157 break 10158 } 10159 t := v.Aux 10160 _ = v.Args[1] 10161 ptr := v.Args[0] 10162 mem := v.Args[1] 10163 if !(t.(*types.Type).Alignment()%8 == 0) { 10164 break 10165 } 10166 v.reset(OpMIPS64MOVVstore) 10167 v.AddArg(ptr) 10168 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10169 v0.AuxInt = 0 10170 v.AddArg(v0) 10171 v.AddArg(mem) 10172 return true 10173 } 10174 // match: (Zero [8] {t} ptr mem) 10175 // cond: t.(*types.Type).Alignment()%4 == 0 10176 // result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)) 10177 for { 10178 if v.AuxInt != 8 { 10179 break 10180 } 10181 t := v.Aux 10182 _ = v.Args[1] 10183 ptr := v.Args[0] 10184 mem := v.Args[1] 10185 if !(t.(*types.Type).Alignment()%4 == 0) { 10186 break 10187 } 10188 v.reset(OpMIPS64MOVWstore) 10189 v.AuxInt = 4 10190 v.AddArg(ptr) 10191 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10192 v0.AuxInt = 0 10193 v.AddArg(v0) 10194 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 10195 v1.AuxInt = 0 10196 v1.AddArg(ptr) 10197 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10198 v2.AuxInt = 0 10199 v1.AddArg(v2) 10200 v1.AddArg(mem) 10201 v.AddArg(v1) 10202 return true 10203 } 10204 // match: (Zero [8] {t} ptr mem) 10205 // cond: t.(*types.Type).Alignment()%2 == 0 10206 // result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))) 10207 for { 10208 if v.AuxInt != 8 { 10209 break 10210 } 10211 t := v.Aux 10212 _ = v.Args[1] 10213 ptr := v.Args[0] 10214 mem := v.Args[1] 10215 if !(t.(*types.Type).Alignment()%2 == 0) { 10216 break 10217 } 10218 v.reset(OpMIPS64MOVHstore) 10219 v.AuxInt = 6 10220 v.AddArg(ptr) 10221 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10222 v0.AuxInt = 0 10223 v.AddArg(v0) 10224 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10225 v1.AuxInt = 4 10226 v1.AddArg(ptr) 10227 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10228 v2.AuxInt = 0 10229 v1.AddArg(v2) 10230 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10231 v3.AuxInt = 2 10232 v3.AddArg(ptr) 10233 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10234 v4.AuxInt = 0 10235 v3.AddArg(v4) 10236 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10237 v5.AuxInt = 0 10238 v5.AddArg(ptr) 10239 v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10240 v6.AuxInt = 0 10241 v5.AddArg(v6) 10242 v5.AddArg(mem) 10243 v3.AddArg(v5) 10244 v1.AddArg(v3) 10245 v.AddArg(v1) 10246 return true 10247 } 10248 return false 10249 } 10250 func rewriteValueMIPS64_OpZero_10(v *Value) bool { 10251 b := v.Block 10252 _ = b 10253 config := b.Func.Config 10254 _ = config 10255 typ := &b.Func.Config.Types 10256 _ = typ 10257 // match: (Zero [3] ptr mem) 10258 // cond: 10259 // result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))) 10260 for { 10261 if v.AuxInt != 3 { 10262 break 10263 } 10264 _ = v.Args[1] 10265 ptr := v.Args[0] 10266 mem := v.Args[1] 10267 v.reset(OpMIPS64MOVBstore) 10268 v.AuxInt = 2 10269 v.AddArg(ptr) 10270 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10271 v0.AuxInt = 0 10272 v.AddArg(v0) 10273 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10274 v1.AuxInt = 1 10275 v1.AddArg(ptr) 10276 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10277 v2.AuxInt = 0 10278 v1.AddArg(v2) 10279 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10280 v3.AuxInt = 0 10281 v3.AddArg(ptr) 10282 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10283 v4.AuxInt = 0 10284 v3.AddArg(v4) 10285 v3.AddArg(mem) 10286 v1.AddArg(v3) 10287 v.AddArg(v1) 10288 return true 10289 } 10290 // match: (Zero [6] {t} ptr mem) 10291 // cond: t.(*types.Type).Alignment()%2 == 0 10292 // result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))) 10293 for { 10294 if v.AuxInt != 6 { 10295 break 10296 } 10297 t := v.Aux 10298 _ = v.Args[1] 10299 ptr := v.Args[0] 10300 mem := v.Args[1] 10301 if !(t.(*types.Type).Alignment()%2 == 0) { 10302 break 10303 } 10304 v.reset(OpMIPS64MOVHstore) 10305 v.AuxInt = 4 10306 v.AddArg(ptr) 10307 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10308 v0.AuxInt = 0 10309 v.AddArg(v0) 10310 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10311 v1.AuxInt = 2 10312 v1.AddArg(ptr) 10313 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10314 v2.AuxInt = 0 10315 v1.AddArg(v2) 10316 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10317 v3.AuxInt = 0 10318 v3.AddArg(ptr) 10319 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10320 v4.AuxInt = 0 10321 v3.AddArg(v4) 10322 v3.AddArg(mem) 10323 v1.AddArg(v3) 10324 v.AddArg(v1) 10325 return true 10326 } 10327 // match: (Zero [12] {t} ptr mem) 10328 // cond: t.(*types.Type).Alignment()%4 == 0 10329 // result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))) 10330 for { 10331 if v.AuxInt != 12 { 10332 break 10333 } 10334 t := v.Aux 10335 _ = v.Args[1] 10336 ptr := v.Args[0] 10337 mem := v.Args[1] 10338 if !(t.(*types.Type).Alignment()%4 == 0) { 10339 break 10340 } 10341 v.reset(OpMIPS64MOVWstore) 10342 v.AuxInt = 8 10343 v.AddArg(ptr) 10344 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10345 v0.AuxInt = 0 10346 v.AddArg(v0) 10347 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 10348 v1.AuxInt = 4 10349 v1.AddArg(ptr) 10350 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10351 v2.AuxInt = 0 10352 v1.AddArg(v2) 10353 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 10354 v3.AuxInt = 0 10355 v3.AddArg(ptr) 10356 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10357 v4.AuxInt = 0 10358 v3.AddArg(v4) 10359 v3.AddArg(mem) 10360 v1.AddArg(v3) 10361 v.AddArg(v1) 10362 return true 10363 } 10364 // match: (Zero [16] {t} ptr mem) 10365 // cond: t.(*types.Type).Alignment()%8 == 0 10366 // result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)) 10367 for { 10368 if v.AuxInt != 16 { 10369 break 10370 } 10371 t := v.Aux 10372 _ = v.Args[1] 10373 ptr := v.Args[0] 10374 mem := v.Args[1] 10375 if !(t.(*types.Type).Alignment()%8 == 0) { 10376 break 10377 } 10378 v.reset(OpMIPS64MOVVstore) 10379 v.AuxInt = 8 10380 v.AddArg(ptr) 10381 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10382 v0.AuxInt = 0 10383 v.AddArg(v0) 10384 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10385 v1.AuxInt = 0 10386 v1.AddArg(ptr) 10387 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10388 v2.AuxInt = 0 10389 v1.AddArg(v2) 10390 v1.AddArg(mem) 10391 v.AddArg(v1) 10392 return true 10393 } 10394 // match: (Zero [24] {t} ptr mem) 10395 // cond: t.(*types.Type).Alignment()%8 == 0 10396 // result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))) 10397 for { 10398 if v.AuxInt != 24 { 10399 break 10400 } 10401 t := v.Aux 10402 _ = v.Args[1] 10403 ptr := v.Args[0] 10404 mem := v.Args[1] 10405 if !(t.(*types.Type).Alignment()%8 == 0) { 10406 break 10407 } 10408 v.reset(OpMIPS64MOVVstore) 10409 v.AuxInt = 16 10410 v.AddArg(ptr) 10411 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10412 v0.AuxInt = 0 10413 v.AddArg(v0) 10414 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10415 v1.AuxInt = 8 10416 v1.AddArg(ptr) 10417 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10418 v2.AuxInt = 0 10419 v1.AddArg(v2) 10420 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10421 v3.AuxInt = 0 10422 v3.AddArg(ptr) 10423 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10424 v4.AuxInt = 0 10425 v3.AddArg(v4) 10426 v3.AddArg(mem) 10427 v1.AddArg(v3) 10428 v.AddArg(v1) 10429 return true 10430 } 10431 // match: (Zero [s] {t} ptr mem) 10432 // cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice 10433 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem) 10434 for { 10435 s := v.AuxInt 10436 t := v.Aux 10437 _ = v.Args[1] 10438 ptr := v.Args[0] 10439 mem := v.Args[1] 10440 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) { 10441 break 10442 } 10443 v.reset(OpMIPS64DUFFZERO) 10444 v.AuxInt = 8 * (128 - s/8) 10445 v.AddArg(ptr) 10446 v.AddArg(mem) 10447 return true 10448 } 10449 // match: (Zero [s] {t} ptr mem) 10450 // cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0 10451 // result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 10452 for { 10453 s := v.AuxInt 10454 t := v.Aux 10455 _ = v.Args[1] 10456 ptr := v.Args[0] 10457 mem := v.Args[1] 10458 if !((s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0) { 10459 break 10460 } 10461 v.reset(OpMIPS64LoweredZero) 10462 v.AuxInt = t.(*types.Type).Alignment() 10463 v.AddArg(ptr) 10464 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type) 10465 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 10466 v0.AddArg(ptr) 10467 v.AddArg(v0) 10468 v.AddArg(mem) 10469 return true 10470 } 10471 return false 10472 } 10473 func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool { 10474 // match: (ZeroExt16to32 x) 10475 // cond: 10476 // result: (MOVHUreg x) 10477 for { 10478 x := v.Args[0] 10479 v.reset(OpMIPS64MOVHUreg) 10480 v.AddArg(x) 10481 return true 10482 } 10483 } 10484 func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool { 10485 // match: (ZeroExt16to64 x) 10486 // cond: 10487 // result: (MOVHUreg x) 10488 for { 10489 x := v.Args[0] 10490 v.reset(OpMIPS64MOVHUreg) 10491 v.AddArg(x) 10492 return true 10493 } 10494 } 10495 func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool { 10496 // match: (ZeroExt32to64 x) 10497 // cond: 10498 // result: (MOVWUreg x) 10499 for { 10500 x := v.Args[0] 10501 v.reset(OpMIPS64MOVWUreg) 10502 v.AddArg(x) 10503 return true 10504 } 10505 } 10506 func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool { 10507 // match: (ZeroExt8to16 x) 10508 // cond: 10509 // result: (MOVBUreg x) 10510 for { 10511 x := v.Args[0] 10512 v.reset(OpMIPS64MOVBUreg) 10513 v.AddArg(x) 10514 return true 10515 } 10516 } 10517 func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool { 10518 // match: (ZeroExt8to32 x) 10519 // cond: 10520 // result: (MOVBUreg x) 10521 for { 10522 x := v.Args[0] 10523 v.reset(OpMIPS64MOVBUreg) 10524 v.AddArg(x) 10525 return true 10526 } 10527 } 10528 func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool { 10529 // match: (ZeroExt8to64 x) 10530 // cond: 10531 // result: (MOVBUreg x) 10532 for { 10533 x := v.Args[0] 10534 v.reset(OpMIPS64MOVBUreg) 10535 v.AddArg(x) 10536 return true 10537 } 10538 } 10539 func rewriteBlockMIPS64(b *Block) bool { 10540 config := b.Func.Config 10541 _ = config 10542 fe := b.Func.fe 10543 _ = fe 10544 typ := &config.Types 10545 _ = typ 10546 switch b.Kind { 10547 case BlockMIPS64EQ: 10548 // match: (EQ (FPFlagTrue cmp) yes no) 10549 // cond: 10550 // result: (FPF cmp yes no) 10551 for { 10552 v := b.Control 10553 if v.Op != OpMIPS64FPFlagTrue { 10554 break 10555 } 10556 cmp := v.Args[0] 10557 b.Kind = BlockMIPS64FPF 10558 b.SetControl(cmp) 10559 b.Aux = nil 10560 return true 10561 } 10562 // match: (EQ (FPFlagFalse cmp) yes no) 10563 // cond: 10564 // result: (FPT cmp yes no) 10565 for { 10566 v := b.Control 10567 if v.Op != OpMIPS64FPFlagFalse { 10568 break 10569 } 10570 cmp := v.Args[0] 10571 b.Kind = BlockMIPS64FPT 10572 b.SetControl(cmp) 10573 b.Aux = nil 10574 return true 10575 } 10576 // match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no) 10577 // cond: 10578 // result: (NE cmp yes no) 10579 for { 10580 v := b.Control 10581 if v.Op != OpMIPS64XORconst { 10582 break 10583 } 10584 if v.AuxInt != 1 { 10585 break 10586 } 10587 cmp := v.Args[0] 10588 if cmp.Op != OpMIPS64SGT { 10589 break 10590 } 10591 _ = cmp.Args[1] 10592 b.Kind = BlockMIPS64NE 10593 b.SetControl(cmp) 10594 b.Aux = nil 10595 return true 10596 } 10597 // match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no) 10598 // cond: 10599 // result: (NE cmp yes no) 10600 for { 10601 v := b.Control 10602 if v.Op != OpMIPS64XORconst { 10603 break 10604 } 10605 if v.AuxInt != 1 { 10606 break 10607 } 10608 cmp := v.Args[0] 10609 if cmp.Op != OpMIPS64SGTU { 10610 break 10611 } 10612 _ = cmp.Args[1] 10613 b.Kind = BlockMIPS64NE 10614 b.SetControl(cmp) 10615 b.Aux = nil 10616 return true 10617 } 10618 // match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no) 10619 // cond: 10620 // result: (NE cmp yes no) 10621 for { 10622 v := b.Control 10623 if v.Op != OpMIPS64XORconst { 10624 break 10625 } 10626 if v.AuxInt != 1 { 10627 break 10628 } 10629 cmp := v.Args[0] 10630 if cmp.Op != OpMIPS64SGTconst { 10631 break 10632 } 10633 b.Kind = BlockMIPS64NE 10634 b.SetControl(cmp) 10635 b.Aux = nil 10636 return true 10637 } 10638 // match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no) 10639 // cond: 10640 // result: (NE cmp yes no) 10641 for { 10642 v := b.Control 10643 if v.Op != OpMIPS64XORconst { 10644 break 10645 } 10646 if v.AuxInt != 1 { 10647 break 10648 } 10649 cmp := v.Args[0] 10650 if cmp.Op != OpMIPS64SGTUconst { 10651 break 10652 } 10653 b.Kind = BlockMIPS64NE 10654 b.SetControl(cmp) 10655 b.Aux = nil 10656 return true 10657 } 10658 // match: (EQ (SGTUconst [1] x) yes no) 10659 // cond: 10660 // result: (NE x yes no) 10661 for { 10662 v := b.Control 10663 if v.Op != OpMIPS64SGTUconst { 10664 break 10665 } 10666 if v.AuxInt != 1 { 10667 break 10668 } 10669 x := v.Args[0] 10670 b.Kind = BlockMIPS64NE 10671 b.SetControl(x) 10672 b.Aux = nil 10673 return true 10674 } 10675 // match: (EQ (SGTU x (MOVVconst [0])) yes no) 10676 // cond: 10677 // result: (EQ x yes no) 10678 for { 10679 v := b.Control 10680 if v.Op != OpMIPS64SGTU { 10681 break 10682 } 10683 _ = v.Args[1] 10684 x := v.Args[0] 10685 v_1 := v.Args[1] 10686 if v_1.Op != OpMIPS64MOVVconst { 10687 break 10688 } 10689 if v_1.AuxInt != 0 { 10690 break 10691 } 10692 b.Kind = BlockMIPS64EQ 10693 b.SetControl(x) 10694 b.Aux = nil 10695 return true 10696 } 10697 // match: (EQ (SGTconst [0] x) yes no) 10698 // cond: 10699 // result: (GEZ x yes no) 10700 for { 10701 v := b.Control 10702 if v.Op != OpMIPS64SGTconst { 10703 break 10704 } 10705 if v.AuxInt != 0 { 10706 break 10707 } 10708 x := v.Args[0] 10709 b.Kind = BlockMIPS64GEZ 10710 b.SetControl(x) 10711 b.Aux = nil 10712 return true 10713 } 10714 // match: (EQ (SGT x (MOVVconst [0])) yes no) 10715 // cond: 10716 // result: (LEZ x yes no) 10717 for { 10718 v := b.Control 10719 if v.Op != OpMIPS64SGT { 10720 break 10721 } 10722 _ = v.Args[1] 10723 x := v.Args[0] 10724 v_1 := v.Args[1] 10725 if v_1.Op != OpMIPS64MOVVconst { 10726 break 10727 } 10728 if v_1.AuxInt != 0 { 10729 break 10730 } 10731 b.Kind = BlockMIPS64LEZ 10732 b.SetControl(x) 10733 b.Aux = nil 10734 return true 10735 } 10736 // match: (EQ (MOVVconst [0]) yes no) 10737 // cond: 10738 // result: (First nil yes no) 10739 for { 10740 v := b.Control 10741 if v.Op != OpMIPS64MOVVconst { 10742 break 10743 } 10744 if v.AuxInt != 0 { 10745 break 10746 } 10747 b.Kind = BlockFirst 10748 b.SetControl(nil) 10749 b.Aux = nil 10750 return true 10751 } 10752 // match: (EQ (MOVVconst [c]) yes no) 10753 // cond: c != 0 10754 // result: (First nil no yes) 10755 for { 10756 v := b.Control 10757 if v.Op != OpMIPS64MOVVconst { 10758 break 10759 } 10760 c := v.AuxInt 10761 if !(c != 0) { 10762 break 10763 } 10764 b.Kind = BlockFirst 10765 b.SetControl(nil) 10766 b.Aux = nil 10767 b.swapSuccessors() 10768 return true 10769 } 10770 case BlockMIPS64GEZ: 10771 // match: (GEZ (MOVVconst [c]) yes no) 10772 // cond: c >= 0 10773 // result: (First nil yes no) 10774 for { 10775 v := b.Control 10776 if v.Op != OpMIPS64MOVVconst { 10777 break 10778 } 10779 c := v.AuxInt 10780 if !(c >= 0) { 10781 break 10782 } 10783 b.Kind = BlockFirst 10784 b.SetControl(nil) 10785 b.Aux = nil 10786 return true 10787 } 10788 // match: (GEZ (MOVVconst [c]) yes no) 10789 // cond: c < 0 10790 // result: (First nil no yes) 10791 for { 10792 v := b.Control 10793 if v.Op != OpMIPS64MOVVconst { 10794 break 10795 } 10796 c := v.AuxInt 10797 if !(c < 0) { 10798 break 10799 } 10800 b.Kind = BlockFirst 10801 b.SetControl(nil) 10802 b.Aux = nil 10803 b.swapSuccessors() 10804 return true 10805 } 10806 case BlockMIPS64GTZ: 10807 // match: (GTZ (MOVVconst [c]) yes no) 10808 // cond: c > 0 10809 // result: (First nil yes no) 10810 for { 10811 v := b.Control 10812 if v.Op != OpMIPS64MOVVconst { 10813 break 10814 } 10815 c := v.AuxInt 10816 if !(c > 0) { 10817 break 10818 } 10819 b.Kind = BlockFirst 10820 b.SetControl(nil) 10821 b.Aux = nil 10822 return true 10823 } 10824 // match: (GTZ (MOVVconst [c]) yes no) 10825 // cond: c <= 0 10826 // result: (First nil no yes) 10827 for { 10828 v := b.Control 10829 if v.Op != OpMIPS64MOVVconst { 10830 break 10831 } 10832 c := v.AuxInt 10833 if !(c <= 0) { 10834 break 10835 } 10836 b.Kind = BlockFirst 10837 b.SetControl(nil) 10838 b.Aux = nil 10839 b.swapSuccessors() 10840 return true 10841 } 10842 case BlockIf: 10843 // match: (If cond yes no) 10844 // cond: 10845 // result: (NE cond yes no) 10846 for { 10847 v := b.Control 10848 _ = v 10849 cond := b.Control 10850 b.Kind = BlockMIPS64NE 10851 b.SetControl(cond) 10852 b.Aux = nil 10853 return true 10854 } 10855 case BlockMIPS64LEZ: 10856 // match: (LEZ (MOVVconst [c]) yes no) 10857 // cond: c <= 0 10858 // result: (First nil yes no) 10859 for { 10860 v := b.Control 10861 if v.Op != OpMIPS64MOVVconst { 10862 break 10863 } 10864 c := v.AuxInt 10865 if !(c <= 0) { 10866 break 10867 } 10868 b.Kind = BlockFirst 10869 b.SetControl(nil) 10870 b.Aux = nil 10871 return true 10872 } 10873 // match: (LEZ (MOVVconst [c]) yes no) 10874 // cond: c > 0 10875 // result: (First nil no yes) 10876 for { 10877 v := b.Control 10878 if v.Op != OpMIPS64MOVVconst { 10879 break 10880 } 10881 c := v.AuxInt 10882 if !(c > 0) { 10883 break 10884 } 10885 b.Kind = BlockFirst 10886 b.SetControl(nil) 10887 b.Aux = nil 10888 b.swapSuccessors() 10889 return true 10890 } 10891 case BlockMIPS64LTZ: 10892 // match: (LTZ (MOVVconst [c]) yes no) 10893 // cond: c < 0 10894 // result: (First nil yes no) 10895 for { 10896 v := b.Control 10897 if v.Op != OpMIPS64MOVVconst { 10898 break 10899 } 10900 c := v.AuxInt 10901 if !(c < 0) { 10902 break 10903 } 10904 b.Kind = BlockFirst 10905 b.SetControl(nil) 10906 b.Aux = nil 10907 return true 10908 } 10909 // match: (LTZ (MOVVconst [c]) yes no) 10910 // cond: c >= 0 10911 // result: (First nil no yes) 10912 for { 10913 v := b.Control 10914 if v.Op != OpMIPS64MOVVconst { 10915 break 10916 } 10917 c := v.AuxInt 10918 if !(c >= 0) { 10919 break 10920 } 10921 b.Kind = BlockFirst 10922 b.SetControl(nil) 10923 b.Aux = nil 10924 b.swapSuccessors() 10925 return true 10926 } 10927 case BlockMIPS64NE: 10928 // match: (NE (FPFlagTrue cmp) yes no) 10929 // cond: 10930 // result: (FPT cmp yes no) 10931 for { 10932 v := b.Control 10933 if v.Op != OpMIPS64FPFlagTrue { 10934 break 10935 } 10936 cmp := v.Args[0] 10937 b.Kind = BlockMIPS64FPT 10938 b.SetControl(cmp) 10939 b.Aux = nil 10940 return true 10941 } 10942 // match: (NE (FPFlagFalse cmp) yes no) 10943 // cond: 10944 // result: (FPF cmp yes no) 10945 for { 10946 v := b.Control 10947 if v.Op != OpMIPS64FPFlagFalse { 10948 break 10949 } 10950 cmp := v.Args[0] 10951 b.Kind = BlockMIPS64FPF 10952 b.SetControl(cmp) 10953 b.Aux = nil 10954 return true 10955 } 10956 // match: (NE (XORconst [1] cmp:(SGT _ _)) yes no) 10957 // cond: 10958 // result: (EQ cmp yes no) 10959 for { 10960 v := b.Control 10961 if v.Op != OpMIPS64XORconst { 10962 break 10963 } 10964 if v.AuxInt != 1 { 10965 break 10966 } 10967 cmp := v.Args[0] 10968 if cmp.Op != OpMIPS64SGT { 10969 break 10970 } 10971 _ = cmp.Args[1] 10972 b.Kind = BlockMIPS64EQ 10973 b.SetControl(cmp) 10974 b.Aux = nil 10975 return true 10976 } 10977 // match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no) 10978 // cond: 10979 // result: (EQ cmp yes no) 10980 for { 10981 v := b.Control 10982 if v.Op != OpMIPS64XORconst { 10983 break 10984 } 10985 if v.AuxInt != 1 { 10986 break 10987 } 10988 cmp := v.Args[0] 10989 if cmp.Op != OpMIPS64SGTU { 10990 break 10991 } 10992 _ = cmp.Args[1] 10993 b.Kind = BlockMIPS64EQ 10994 b.SetControl(cmp) 10995 b.Aux = nil 10996 return true 10997 } 10998 // match: (NE (XORconst [1] cmp:(SGTconst _)) yes no) 10999 // cond: 11000 // result: (EQ cmp yes no) 11001 for { 11002 v := b.Control 11003 if v.Op != OpMIPS64XORconst { 11004 break 11005 } 11006 if v.AuxInt != 1 { 11007 break 11008 } 11009 cmp := v.Args[0] 11010 if cmp.Op != OpMIPS64SGTconst { 11011 break 11012 } 11013 b.Kind = BlockMIPS64EQ 11014 b.SetControl(cmp) 11015 b.Aux = nil 11016 return true 11017 } 11018 // match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no) 11019 // cond: 11020 // result: (EQ cmp yes no) 11021 for { 11022 v := b.Control 11023 if v.Op != OpMIPS64XORconst { 11024 break 11025 } 11026 if v.AuxInt != 1 { 11027 break 11028 } 11029 cmp := v.Args[0] 11030 if cmp.Op != OpMIPS64SGTUconst { 11031 break 11032 } 11033 b.Kind = BlockMIPS64EQ 11034 b.SetControl(cmp) 11035 b.Aux = nil 11036 return true 11037 } 11038 // match: (NE (SGTUconst [1] x) yes no) 11039 // cond: 11040 // result: (EQ x yes no) 11041 for { 11042 v := b.Control 11043 if v.Op != OpMIPS64SGTUconst { 11044 break 11045 } 11046 if v.AuxInt != 1 { 11047 break 11048 } 11049 x := v.Args[0] 11050 b.Kind = BlockMIPS64EQ 11051 b.SetControl(x) 11052 b.Aux = nil 11053 return true 11054 } 11055 // match: (NE (SGTU x (MOVVconst [0])) yes no) 11056 // cond: 11057 // result: (NE x yes no) 11058 for { 11059 v := b.Control 11060 if v.Op != OpMIPS64SGTU { 11061 break 11062 } 11063 _ = v.Args[1] 11064 x := v.Args[0] 11065 v_1 := v.Args[1] 11066 if v_1.Op != OpMIPS64MOVVconst { 11067 break 11068 } 11069 if v_1.AuxInt != 0 { 11070 break 11071 } 11072 b.Kind = BlockMIPS64NE 11073 b.SetControl(x) 11074 b.Aux = nil 11075 return true 11076 } 11077 // match: (NE (SGTconst [0] x) yes no) 11078 // cond: 11079 // result: (LTZ x yes no) 11080 for { 11081 v := b.Control 11082 if v.Op != OpMIPS64SGTconst { 11083 break 11084 } 11085 if v.AuxInt != 0 { 11086 break 11087 } 11088 x := v.Args[0] 11089 b.Kind = BlockMIPS64LTZ 11090 b.SetControl(x) 11091 b.Aux = nil 11092 return true 11093 } 11094 // match: (NE (SGT x (MOVVconst [0])) yes no) 11095 // cond: 11096 // result: (GTZ x yes no) 11097 for { 11098 v := b.Control 11099 if v.Op != OpMIPS64SGT { 11100 break 11101 } 11102 _ = v.Args[1] 11103 x := v.Args[0] 11104 v_1 := v.Args[1] 11105 if v_1.Op != OpMIPS64MOVVconst { 11106 break 11107 } 11108 if v_1.AuxInt != 0 { 11109 break 11110 } 11111 b.Kind = BlockMIPS64GTZ 11112 b.SetControl(x) 11113 b.Aux = nil 11114 return true 11115 } 11116 // match: (NE (MOVVconst [0]) yes no) 11117 // cond: 11118 // result: (First nil no yes) 11119 for { 11120 v := b.Control 11121 if v.Op != OpMIPS64MOVVconst { 11122 break 11123 } 11124 if v.AuxInt != 0 { 11125 break 11126 } 11127 b.Kind = BlockFirst 11128 b.SetControl(nil) 11129 b.Aux = nil 11130 b.swapSuccessors() 11131 return true 11132 } 11133 // match: (NE (MOVVconst [c]) yes no) 11134 // cond: c != 0 11135 // result: (First nil yes no) 11136 for { 11137 v := b.Control 11138 if v.Op != OpMIPS64MOVVconst { 11139 break 11140 } 11141 c := v.AuxInt 11142 if !(c != 0) { 11143 break 11144 } 11145 b.Kind = BlockFirst 11146 b.SetControl(nil) 11147 b.Aux = nil 11148 return true 11149 } 11150 } 11151 return false 11152 }