github.com/go-asm/go@v1.21.1-0.20240213172139-40c5ead50c48/cmd/compile/ssa/rewriteMIPS.go (about) 1 // Code generated from _gen/MIPS.rules using 'go generate'; DO NOT EDIT. 2 3 package ssa 4 5 import "github.com/go-asm/go/cmd/compile/types" 6 7 func rewriteValueMIPS(v *Value) bool { 8 switch v.Op { 9 case OpAbs: 10 v.Op = OpMIPSABSD 11 return true 12 case OpAdd16: 13 v.Op = OpMIPSADD 14 return true 15 case OpAdd32: 16 v.Op = OpMIPSADD 17 return true 18 case OpAdd32F: 19 v.Op = OpMIPSADDF 20 return true 21 case OpAdd32withcarry: 22 return rewriteValueMIPS_OpAdd32withcarry(v) 23 case OpAdd64F: 24 v.Op = OpMIPSADDD 25 return true 26 case OpAdd8: 27 v.Op = OpMIPSADD 28 return true 29 case OpAddPtr: 30 v.Op = OpMIPSADD 31 return true 32 case OpAddr: 33 return rewriteValueMIPS_OpAddr(v) 34 case OpAnd16: 35 v.Op = OpMIPSAND 36 return true 37 case OpAnd32: 38 v.Op = OpMIPSAND 39 return true 40 case OpAnd8: 41 v.Op = OpMIPSAND 42 return true 43 case OpAndB: 44 v.Op = OpMIPSAND 45 return true 46 case OpAtomicAdd32: 47 v.Op = OpMIPSLoweredAtomicAdd 48 return true 49 case OpAtomicAnd32: 50 v.Op = OpMIPSLoweredAtomicAnd 51 return true 52 case OpAtomicAnd8: 53 return rewriteValueMIPS_OpAtomicAnd8(v) 54 case OpAtomicCompareAndSwap32: 55 v.Op = OpMIPSLoweredAtomicCas 56 return true 57 case OpAtomicExchange32: 58 v.Op = OpMIPSLoweredAtomicExchange 59 return true 60 case OpAtomicLoad32: 61 v.Op = OpMIPSLoweredAtomicLoad32 62 return true 63 case OpAtomicLoad8: 64 v.Op = OpMIPSLoweredAtomicLoad8 65 return true 66 case OpAtomicLoadPtr: 67 v.Op = OpMIPSLoweredAtomicLoad32 68 return true 69 case OpAtomicOr32: 70 v.Op = OpMIPSLoweredAtomicOr 71 return true 72 case OpAtomicOr8: 73 return rewriteValueMIPS_OpAtomicOr8(v) 74 case OpAtomicStore32: 75 v.Op = OpMIPSLoweredAtomicStore32 76 return true 77 case OpAtomicStore8: 78 v.Op = OpMIPSLoweredAtomicStore8 79 return true 80 case OpAtomicStorePtrNoWB: 81 v.Op = OpMIPSLoweredAtomicStore32 82 return true 83 case OpAvg32u: 84 return rewriteValueMIPS_OpAvg32u(v) 85 case OpBitLen32: 86 return rewriteValueMIPS_OpBitLen32(v) 87 case OpClosureCall: 88 v.Op = OpMIPSCALLclosure 89 return true 90 case OpCom16: 91 return rewriteValueMIPS_OpCom16(v) 92 case OpCom32: 93 return rewriteValueMIPS_OpCom32(v) 94 case OpCom8: 95 return rewriteValueMIPS_OpCom8(v) 96 case OpConst16: 97 return rewriteValueMIPS_OpConst16(v) 98 case OpConst32: 99 return rewriteValueMIPS_OpConst32(v) 100 case OpConst32F: 101 v.Op = OpMIPSMOVFconst 102 return true 103 case OpConst64F: 104 v.Op = OpMIPSMOVDconst 105 return true 106 case OpConst8: 107 return rewriteValueMIPS_OpConst8(v) 108 case OpConstBool: 109 return rewriteValueMIPS_OpConstBool(v) 110 case OpConstNil: 111 return rewriteValueMIPS_OpConstNil(v) 112 case OpCtz32: 113 return rewriteValueMIPS_OpCtz32(v) 114 case OpCtz32NonZero: 115 v.Op = OpCtz32 116 return true 117 case OpCvt32Fto32: 118 v.Op = OpMIPSTRUNCFW 119 return true 120 case OpCvt32Fto64F: 121 v.Op = OpMIPSMOVFD 122 return true 123 case OpCvt32to32F: 124 v.Op = OpMIPSMOVWF 125 return true 126 case OpCvt32to64F: 127 v.Op = OpMIPSMOVWD 128 return true 129 case OpCvt64Fto32: 130 v.Op = OpMIPSTRUNCDW 131 return true 132 case OpCvt64Fto32F: 133 v.Op = OpMIPSMOVDF 134 return true 135 case OpCvtBoolToUint8: 136 v.Op = OpCopy 137 return true 138 case OpDiv16: 139 return rewriteValueMIPS_OpDiv16(v) 140 case OpDiv16u: 141 return rewriteValueMIPS_OpDiv16u(v) 142 case OpDiv32: 143 return rewriteValueMIPS_OpDiv32(v) 144 case OpDiv32F: 145 v.Op = OpMIPSDIVF 146 return true 147 case OpDiv32u: 148 return rewriteValueMIPS_OpDiv32u(v) 149 case OpDiv64F: 150 v.Op = OpMIPSDIVD 151 return true 152 case OpDiv8: 153 return rewriteValueMIPS_OpDiv8(v) 154 case OpDiv8u: 155 return rewriteValueMIPS_OpDiv8u(v) 156 case OpEq16: 157 return rewriteValueMIPS_OpEq16(v) 158 case OpEq32: 159 return rewriteValueMIPS_OpEq32(v) 160 case OpEq32F: 161 return rewriteValueMIPS_OpEq32F(v) 162 case OpEq64F: 163 return rewriteValueMIPS_OpEq64F(v) 164 case OpEq8: 165 return rewriteValueMIPS_OpEq8(v) 166 case OpEqB: 167 return rewriteValueMIPS_OpEqB(v) 168 case OpEqPtr: 169 return rewriteValueMIPS_OpEqPtr(v) 170 case OpGetCallerPC: 171 v.Op = OpMIPSLoweredGetCallerPC 172 return true 173 case OpGetCallerSP: 174 v.Op = OpMIPSLoweredGetCallerSP 175 return true 176 case OpGetClosurePtr: 177 v.Op = OpMIPSLoweredGetClosurePtr 178 return true 179 case OpHmul32: 180 return rewriteValueMIPS_OpHmul32(v) 181 case OpHmul32u: 182 return rewriteValueMIPS_OpHmul32u(v) 183 case OpInterCall: 184 v.Op = OpMIPSCALLinter 185 return true 186 case OpIsInBounds: 187 return rewriteValueMIPS_OpIsInBounds(v) 188 case OpIsNonNil: 189 return rewriteValueMIPS_OpIsNonNil(v) 190 case OpIsSliceInBounds: 191 return rewriteValueMIPS_OpIsSliceInBounds(v) 192 case OpLeq16: 193 return rewriteValueMIPS_OpLeq16(v) 194 case OpLeq16U: 195 return rewriteValueMIPS_OpLeq16U(v) 196 case OpLeq32: 197 return rewriteValueMIPS_OpLeq32(v) 198 case OpLeq32F: 199 return rewriteValueMIPS_OpLeq32F(v) 200 case OpLeq32U: 201 return rewriteValueMIPS_OpLeq32U(v) 202 case OpLeq64F: 203 return rewriteValueMIPS_OpLeq64F(v) 204 case OpLeq8: 205 return rewriteValueMIPS_OpLeq8(v) 206 case OpLeq8U: 207 return rewriteValueMIPS_OpLeq8U(v) 208 case OpLess16: 209 return rewriteValueMIPS_OpLess16(v) 210 case OpLess16U: 211 return rewriteValueMIPS_OpLess16U(v) 212 case OpLess32: 213 return rewriteValueMIPS_OpLess32(v) 214 case OpLess32F: 215 return rewriteValueMIPS_OpLess32F(v) 216 case OpLess32U: 217 return rewriteValueMIPS_OpLess32U(v) 218 case OpLess64F: 219 return rewriteValueMIPS_OpLess64F(v) 220 case OpLess8: 221 return rewriteValueMIPS_OpLess8(v) 222 case OpLess8U: 223 return rewriteValueMIPS_OpLess8U(v) 224 case OpLoad: 225 return rewriteValueMIPS_OpLoad(v) 226 case OpLocalAddr: 227 return rewriteValueMIPS_OpLocalAddr(v) 228 case OpLsh16x16: 229 return rewriteValueMIPS_OpLsh16x16(v) 230 case OpLsh16x32: 231 return rewriteValueMIPS_OpLsh16x32(v) 232 case OpLsh16x64: 233 return rewriteValueMIPS_OpLsh16x64(v) 234 case OpLsh16x8: 235 return rewriteValueMIPS_OpLsh16x8(v) 236 case OpLsh32x16: 237 return rewriteValueMIPS_OpLsh32x16(v) 238 case OpLsh32x32: 239 return rewriteValueMIPS_OpLsh32x32(v) 240 case OpLsh32x64: 241 return rewriteValueMIPS_OpLsh32x64(v) 242 case OpLsh32x8: 243 return rewriteValueMIPS_OpLsh32x8(v) 244 case OpLsh8x16: 245 return rewriteValueMIPS_OpLsh8x16(v) 246 case OpLsh8x32: 247 return rewriteValueMIPS_OpLsh8x32(v) 248 case OpLsh8x64: 249 return rewriteValueMIPS_OpLsh8x64(v) 250 case OpLsh8x8: 251 return rewriteValueMIPS_OpLsh8x8(v) 252 case OpMIPSADD: 253 return rewriteValueMIPS_OpMIPSADD(v) 254 case OpMIPSADDconst: 255 return rewriteValueMIPS_OpMIPSADDconst(v) 256 case OpMIPSAND: 257 return rewriteValueMIPS_OpMIPSAND(v) 258 case OpMIPSANDconst: 259 return rewriteValueMIPS_OpMIPSANDconst(v) 260 case OpMIPSCMOVZ: 261 return rewriteValueMIPS_OpMIPSCMOVZ(v) 262 case OpMIPSCMOVZzero: 263 return rewriteValueMIPS_OpMIPSCMOVZzero(v) 264 case OpMIPSLoweredAtomicAdd: 265 return rewriteValueMIPS_OpMIPSLoweredAtomicAdd(v) 266 case OpMIPSLoweredAtomicStore32: 267 return rewriteValueMIPS_OpMIPSLoweredAtomicStore32(v) 268 case OpMIPSMOVBUload: 269 return rewriteValueMIPS_OpMIPSMOVBUload(v) 270 case OpMIPSMOVBUreg: 271 return rewriteValueMIPS_OpMIPSMOVBUreg(v) 272 case OpMIPSMOVBload: 273 return rewriteValueMIPS_OpMIPSMOVBload(v) 274 case OpMIPSMOVBreg: 275 return rewriteValueMIPS_OpMIPSMOVBreg(v) 276 case OpMIPSMOVBstore: 277 return rewriteValueMIPS_OpMIPSMOVBstore(v) 278 case OpMIPSMOVBstorezero: 279 return rewriteValueMIPS_OpMIPSMOVBstorezero(v) 280 case OpMIPSMOVDload: 281 return rewriteValueMIPS_OpMIPSMOVDload(v) 282 case OpMIPSMOVDstore: 283 return rewriteValueMIPS_OpMIPSMOVDstore(v) 284 case OpMIPSMOVFload: 285 return rewriteValueMIPS_OpMIPSMOVFload(v) 286 case OpMIPSMOVFstore: 287 return rewriteValueMIPS_OpMIPSMOVFstore(v) 288 case OpMIPSMOVHUload: 289 return rewriteValueMIPS_OpMIPSMOVHUload(v) 290 case OpMIPSMOVHUreg: 291 return rewriteValueMIPS_OpMIPSMOVHUreg(v) 292 case OpMIPSMOVHload: 293 return rewriteValueMIPS_OpMIPSMOVHload(v) 294 case OpMIPSMOVHreg: 295 return rewriteValueMIPS_OpMIPSMOVHreg(v) 296 case OpMIPSMOVHstore: 297 return rewriteValueMIPS_OpMIPSMOVHstore(v) 298 case OpMIPSMOVHstorezero: 299 return rewriteValueMIPS_OpMIPSMOVHstorezero(v) 300 case OpMIPSMOVWload: 301 return rewriteValueMIPS_OpMIPSMOVWload(v) 302 case OpMIPSMOVWnop: 303 return rewriteValueMIPS_OpMIPSMOVWnop(v) 304 case OpMIPSMOVWreg: 305 return rewriteValueMIPS_OpMIPSMOVWreg(v) 306 case OpMIPSMOVWstore: 307 return rewriteValueMIPS_OpMIPSMOVWstore(v) 308 case OpMIPSMOVWstorezero: 309 return rewriteValueMIPS_OpMIPSMOVWstorezero(v) 310 case OpMIPSMUL: 311 return rewriteValueMIPS_OpMIPSMUL(v) 312 case OpMIPSNEG: 313 return rewriteValueMIPS_OpMIPSNEG(v) 314 case OpMIPSNOR: 315 return rewriteValueMIPS_OpMIPSNOR(v) 316 case OpMIPSNORconst: 317 return rewriteValueMIPS_OpMIPSNORconst(v) 318 case OpMIPSOR: 319 return rewriteValueMIPS_OpMIPSOR(v) 320 case OpMIPSORconst: 321 return rewriteValueMIPS_OpMIPSORconst(v) 322 case OpMIPSSGT: 323 return rewriteValueMIPS_OpMIPSSGT(v) 324 case OpMIPSSGTU: 325 return rewriteValueMIPS_OpMIPSSGTU(v) 326 case OpMIPSSGTUconst: 327 return rewriteValueMIPS_OpMIPSSGTUconst(v) 328 case OpMIPSSGTUzero: 329 return rewriteValueMIPS_OpMIPSSGTUzero(v) 330 case OpMIPSSGTconst: 331 return rewriteValueMIPS_OpMIPSSGTconst(v) 332 case OpMIPSSGTzero: 333 return rewriteValueMIPS_OpMIPSSGTzero(v) 334 case OpMIPSSLL: 335 return rewriteValueMIPS_OpMIPSSLL(v) 336 case OpMIPSSLLconst: 337 return rewriteValueMIPS_OpMIPSSLLconst(v) 338 case OpMIPSSRA: 339 return rewriteValueMIPS_OpMIPSSRA(v) 340 case OpMIPSSRAconst: 341 return rewriteValueMIPS_OpMIPSSRAconst(v) 342 case OpMIPSSRL: 343 return rewriteValueMIPS_OpMIPSSRL(v) 344 case OpMIPSSRLconst: 345 return rewriteValueMIPS_OpMIPSSRLconst(v) 346 case OpMIPSSUB: 347 return rewriteValueMIPS_OpMIPSSUB(v) 348 case OpMIPSSUBconst: 349 return rewriteValueMIPS_OpMIPSSUBconst(v) 350 case OpMIPSXOR: 351 return rewriteValueMIPS_OpMIPSXOR(v) 352 case OpMIPSXORconst: 353 return rewriteValueMIPS_OpMIPSXORconst(v) 354 case OpMod16: 355 return rewriteValueMIPS_OpMod16(v) 356 case OpMod16u: 357 return rewriteValueMIPS_OpMod16u(v) 358 case OpMod32: 359 return rewriteValueMIPS_OpMod32(v) 360 case OpMod32u: 361 return rewriteValueMIPS_OpMod32u(v) 362 case OpMod8: 363 return rewriteValueMIPS_OpMod8(v) 364 case OpMod8u: 365 return rewriteValueMIPS_OpMod8u(v) 366 case OpMove: 367 return rewriteValueMIPS_OpMove(v) 368 case OpMul16: 369 v.Op = OpMIPSMUL 370 return true 371 case OpMul32: 372 v.Op = OpMIPSMUL 373 return true 374 case OpMul32F: 375 v.Op = OpMIPSMULF 376 return true 377 case OpMul32uhilo: 378 v.Op = OpMIPSMULTU 379 return true 380 case OpMul64F: 381 v.Op = OpMIPSMULD 382 return true 383 case OpMul8: 384 v.Op = OpMIPSMUL 385 return true 386 case OpNeg16: 387 v.Op = OpMIPSNEG 388 return true 389 case OpNeg32: 390 v.Op = OpMIPSNEG 391 return true 392 case OpNeg32F: 393 v.Op = OpMIPSNEGF 394 return true 395 case OpNeg64F: 396 v.Op = OpMIPSNEGD 397 return true 398 case OpNeg8: 399 v.Op = OpMIPSNEG 400 return true 401 case OpNeq16: 402 return rewriteValueMIPS_OpNeq16(v) 403 case OpNeq32: 404 return rewriteValueMIPS_OpNeq32(v) 405 case OpNeq32F: 406 return rewriteValueMIPS_OpNeq32F(v) 407 case OpNeq64F: 408 return rewriteValueMIPS_OpNeq64F(v) 409 case OpNeq8: 410 return rewriteValueMIPS_OpNeq8(v) 411 case OpNeqB: 412 v.Op = OpMIPSXOR 413 return true 414 case OpNeqPtr: 415 return rewriteValueMIPS_OpNeqPtr(v) 416 case OpNilCheck: 417 v.Op = OpMIPSLoweredNilCheck 418 return true 419 case OpNot: 420 return rewriteValueMIPS_OpNot(v) 421 case OpOffPtr: 422 return rewriteValueMIPS_OpOffPtr(v) 423 case OpOr16: 424 v.Op = OpMIPSOR 425 return true 426 case OpOr32: 427 v.Op = OpMIPSOR 428 return true 429 case OpOr8: 430 v.Op = OpMIPSOR 431 return true 432 case OpOrB: 433 v.Op = OpMIPSOR 434 return true 435 case OpPanicBounds: 436 return rewriteValueMIPS_OpPanicBounds(v) 437 case OpPanicExtend: 438 return rewriteValueMIPS_OpPanicExtend(v) 439 case OpRotateLeft16: 440 return rewriteValueMIPS_OpRotateLeft16(v) 441 case OpRotateLeft32: 442 return rewriteValueMIPS_OpRotateLeft32(v) 443 case OpRotateLeft64: 444 return rewriteValueMIPS_OpRotateLeft64(v) 445 case OpRotateLeft8: 446 return rewriteValueMIPS_OpRotateLeft8(v) 447 case OpRound32F: 448 v.Op = OpCopy 449 return true 450 case OpRound64F: 451 v.Op = OpCopy 452 return true 453 case OpRsh16Ux16: 454 return rewriteValueMIPS_OpRsh16Ux16(v) 455 case OpRsh16Ux32: 456 return rewriteValueMIPS_OpRsh16Ux32(v) 457 case OpRsh16Ux64: 458 return rewriteValueMIPS_OpRsh16Ux64(v) 459 case OpRsh16Ux8: 460 return rewriteValueMIPS_OpRsh16Ux8(v) 461 case OpRsh16x16: 462 return rewriteValueMIPS_OpRsh16x16(v) 463 case OpRsh16x32: 464 return rewriteValueMIPS_OpRsh16x32(v) 465 case OpRsh16x64: 466 return rewriteValueMIPS_OpRsh16x64(v) 467 case OpRsh16x8: 468 return rewriteValueMIPS_OpRsh16x8(v) 469 case OpRsh32Ux16: 470 return rewriteValueMIPS_OpRsh32Ux16(v) 471 case OpRsh32Ux32: 472 return rewriteValueMIPS_OpRsh32Ux32(v) 473 case OpRsh32Ux64: 474 return rewriteValueMIPS_OpRsh32Ux64(v) 475 case OpRsh32Ux8: 476 return rewriteValueMIPS_OpRsh32Ux8(v) 477 case OpRsh32x16: 478 return rewriteValueMIPS_OpRsh32x16(v) 479 case OpRsh32x32: 480 return rewriteValueMIPS_OpRsh32x32(v) 481 case OpRsh32x64: 482 return rewriteValueMIPS_OpRsh32x64(v) 483 case OpRsh32x8: 484 return rewriteValueMIPS_OpRsh32x8(v) 485 case OpRsh8Ux16: 486 return rewriteValueMIPS_OpRsh8Ux16(v) 487 case OpRsh8Ux32: 488 return rewriteValueMIPS_OpRsh8Ux32(v) 489 case OpRsh8Ux64: 490 return rewriteValueMIPS_OpRsh8Ux64(v) 491 case OpRsh8Ux8: 492 return rewriteValueMIPS_OpRsh8Ux8(v) 493 case OpRsh8x16: 494 return rewriteValueMIPS_OpRsh8x16(v) 495 case OpRsh8x32: 496 return rewriteValueMIPS_OpRsh8x32(v) 497 case OpRsh8x64: 498 return rewriteValueMIPS_OpRsh8x64(v) 499 case OpRsh8x8: 500 return rewriteValueMIPS_OpRsh8x8(v) 501 case OpSelect0: 502 return rewriteValueMIPS_OpSelect0(v) 503 case OpSelect1: 504 return rewriteValueMIPS_OpSelect1(v) 505 case OpSignExt16to32: 506 v.Op = OpMIPSMOVHreg 507 return true 508 case OpSignExt8to16: 509 v.Op = OpMIPSMOVBreg 510 return true 511 case OpSignExt8to32: 512 v.Op = OpMIPSMOVBreg 513 return true 514 case OpSignmask: 515 return rewriteValueMIPS_OpSignmask(v) 516 case OpSlicemask: 517 return rewriteValueMIPS_OpSlicemask(v) 518 case OpSqrt: 519 v.Op = OpMIPSSQRTD 520 return true 521 case OpSqrt32: 522 v.Op = OpMIPSSQRTF 523 return true 524 case OpStaticCall: 525 v.Op = OpMIPSCALLstatic 526 return true 527 case OpStore: 528 return rewriteValueMIPS_OpStore(v) 529 case OpSub16: 530 v.Op = OpMIPSSUB 531 return true 532 case OpSub32: 533 v.Op = OpMIPSSUB 534 return true 535 case OpSub32F: 536 v.Op = OpMIPSSUBF 537 return true 538 case OpSub32withcarry: 539 return rewriteValueMIPS_OpSub32withcarry(v) 540 case OpSub64F: 541 v.Op = OpMIPSSUBD 542 return true 543 case OpSub8: 544 v.Op = OpMIPSSUB 545 return true 546 case OpSubPtr: 547 v.Op = OpMIPSSUB 548 return true 549 case OpTailCall: 550 v.Op = OpMIPSCALLtail 551 return true 552 case OpTrunc16to8: 553 v.Op = OpCopy 554 return true 555 case OpTrunc32to16: 556 v.Op = OpCopy 557 return true 558 case OpTrunc32to8: 559 v.Op = OpCopy 560 return true 561 case OpWB: 562 v.Op = OpMIPSLoweredWB 563 return true 564 case OpXor16: 565 v.Op = OpMIPSXOR 566 return true 567 case OpXor32: 568 v.Op = OpMIPSXOR 569 return true 570 case OpXor8: 571 v.Op = OpMIPSXOR 572 return true 573 case OpZero: 574 return rewriteValueMIPS_OpZero(v) 575 case OpZeroExt16to32: 576 v.Op = OpMIPSMOVHUreg 577 return true 578 case OpZeroExt8to16: 579 v.Op = OpMIPSMOVBUreg 580 return true 581 case OpZeroExt8to32: 582 v.Op = OpMIPSMOVBUreg 583 return true 584 case OpZeromask: 585 return rewriteValueMIPS_OpZeromask(v) 586 } 587 return false 588 } 589 func rewriteValueMIPS_OpAdd32withcarry(v *Value) bool { 590 v_2 := v.Args[2] 591 v_1 := v.Args[1] 592 v_0 := v.Args[0] 593 b := v.Block 594 // match: (Add32withcarry <t> x y c) 595 // result: (ADD c (ADD <t> x y)) 596 for { 597 t := v.Type 598 x := v_0 599 y := v_1 600 c := v_2 601 v.reset(OpMIPSADD) 602 v0 := b.NewValue0(v.Pos, OpMIPSADD, t) 603 v0.AddArg2(x, y) 604 v.AddArg2(c, v0) 605 return true 606 } 607 } 608 func rewriteValueMIPS_OpAddr(v *Value) bool { 609 v_0 := v.Args[0] 610 // match: (Addr {sym} base) 611 // result: (MOVWaddr {sym} base) 612 for { 613 sym := auxToSym(v.Aux) 614 base := v_0 615 v.reset(OpMIPSMOVWaddr) 616 v.Aux = symToAux(sym) 617 v.AddArg(base) 618 return true 619 } 620 } 621 func rewriteValueMIPS_OpAtomicAnd8(v *Value) bool { 622 v_2 := v.Args[2] 623 v_1 := v.Args[1] 624 v_0 := v.Args[0] 625 b := v.Block 626 config := b.Func.Config 627 typ := &b.Func.Config.Types 628 // match: (AtomicAnd8 ptr val mem) 629 // cond: !config.BigEndian 630 // result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem) 631 for { 632 ptr := v_0 633 val := v_1 634 mem := v_2 635 if !(!config.BigEndian) { 636 break 637 } 638 v.reset(OpMIPSLoweredAtomicAnd) 639 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 640 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 641 v1.AuxInt = int32ToAuxInt(^3) 642 v0.AddArg2(v1, ptr) 643 v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32) 644 v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 645 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 646 v4.AddArg(val) 647 v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 648 v5.AuxInt = int32ToAuxInt(3) 649 v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 650 v6.AuxInt = int32ToAuxInt(3) 651 v6.AddArg(ptr) 652 v5.AddArg(v6) 653 v3.AddArg2(v4, v5) 654 v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32) 655 v7.AuxInt = int32ToAuxInt(0) 656 v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 657 v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 658 v9.AuxInt = int32ToAuxInt(0xff) 659 v8.AddArg2(v9, v5) 660 v7.AddArg(v8) 661 v2.AddArg2(v3, v7) 662 v.AddArg3(v0, v2, mem) 663 return true 664 } 665 // match: (AtomicAnd8 ptr val mem) 666 // cond: config.BigEndian 667 // result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem) 668 for { 669 ptr := v_0 670 val := v_1 671 mem := v_2 672 if !(config.BigEndian) { 673 break 674 } 675 v.reset(OpMIPSLoweredAtomicAnd) 676 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 677 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 678 v1.AuxInt = int32ToAuxInt(^3) 679 v0.AddArg2(v1, ptr) 680 v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32) 681 v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 682 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 683 v4.AddArg(val) 684 v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 685 v5.AuxInt = int32ToAuxInt(3) 686 v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 687 v6.AuxInt = int32ToAuxInt(3) 688 v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32) 689 v7.AuxInt = int32ToAuxInt(3) 690 v7.AddArg(ptr) 691 v6.AddArg(v7) 692 v5.AddArg(v6) 693 v3.AddArg2(v4, v5) 694 v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32) 695 v8.AuxInt = int32ToAuxInt(0) 696 v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 697 v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 698 v10.AuxInt = int32ToAuxInt(0xff) 699 v9.AddArg2(v10, v5) 700 v8.AddArg(v9) 701 v2.AddArg2(v3, v8) 702 v.AddArg3(v0, v2, mem) 703 return true 704 } 705 return false 706 } 707 func rewriteValueMIPS_OpAtomicOr8(v *Value) bool { 708 v_2 := v.Args[2] 709 v_1 := v.Args[1] 710 v_0 := v.Args[0] 711 b := v.Block 712 config := b.Func.Config 713 typ := &b.Func.Config.Types 714 // match: (AtomicOr8 ptr val mem) 715 // cond: !config.BigEndian 716 // result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem) 717 for { 718 ptr := v_0 719 val := v_1 720 mem := v_2 721 if !(!config.BigEndian) { 722 break 723 } 724 v.reset(OpMIPSLoweredAtomicOr) 725 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 726 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 727 v1.AuxInt = int32ToAuxInt(^3) 728 v0.AddArg2(v1, ptr) 729 v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 730 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 731 v3.AddArg(val) 732 v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 733 v4.AuxInt = int32ToAuxInt(3) 734 v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 735 v5.AuxInt = int32ToAuxInt(3) 736 v5.AddArg(ptr) 737 v4.AddArg(v5) 738 v2.AddArg2(v3, v4) 739 v.AddArg3(v0, v2, mem) 740 return true 741 } 742 // match: (AtomicOr8 ptr val mem) 743 // cond: config.BigEndian 744 // result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem) 745 for { 746 ptr := v_0 747 val := v_1 748 mem := v_2 749 if !(config.BigEndian) { 750 break 751 } 752 v.reset(OpMIPSLoweredAtomicOr) 753 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 754 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 755 v1.AuxInt = int32ToAuxInt(^3) 756 v0.AddArg2(v1, ptr) 757 v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 758 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 759 v3.AddArg(val) 760 v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 761 v4.AuxInt = int32ToAuxInt(3) 762 v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 763 v5.AuxInt = int32ToAuxInt(3) 764 v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32) 765 v6.AuxInt = int32ToAuxInt(3) 766 v6.AddArg(ptr) 767 v5.AddArg(v6) 768 v4.AddArg(v5) 769 v2.AddArg2(v3, v4) 770 v.AddArg3(v0, v2, mem) 771 return true 772 } 773 return false 774 } 775 func rewriteValueMIPS_OpAvg32u(v *Value) bool { 776 v_1 := v.Args[1] 777 v_0 := v.Args[0] 778 b := v.Block 779 // match: (Avg32u <t> x y) 780 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 781 for { 782 t := v.Type 783 x := v_0 784 y := v_1 785 v.reset(OpMIPSADD) 786 v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t) 787 v0.AuxInt = int32ToAuxInt(1) 788 v1 := b.NewValue0(v.Pos, OpMIPSSUB, t) 789 v1.AddArg2(x, y) 790 v0.AddArg(v1) 791 v.AddArg2(v0, y) 792 return true 793 } 794 } 795 func rewriteValueMIPS_OpBitLen32(v *Value) bool { 796 v_0 := v.Args[0] 797 b := v.Block 798 typ := &b.Func.Config.Types 799 // match: (BitLen32 <t> x) 800 // result: (SUB (MOVWconst [32]) (CLZ <t> x)) 801 for { 802 t := v.Type 803 x := v_0 804 v.reset(OpMIPSSUB) 805 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 806 v0.AuxInt = int32ToAuxInt(32) 807 v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t) 808 v1.AddArg(x) 809 v.AddArg2(v0, v1) 810 return true 811 } 812 } 813 func rewriteValueMIPS_OpCom16(v *Value) bool { 814 v_0 := v.Args[0] 815 // match: (Com16 x) 816 // result: (NORconst [0] x) 817 for { 818 x := v_0 819 v.reset(OpMIPSNORconst) 820 v.AuxInt = int32ToAuxInt(0) 821 v.AddArg(x) 822 return true 823 } 824 } 825 func rewriteValueMIPS_OpCom32(v *Value) bool { 826 v_0 := v.Args[0] 827 // match: (Com32 x) 828 // result: (NORconst [0] x) 829 for { 830 x := v_0 831 v.reset(OpMIPSNORconst) 832 v.AuxInt = int32ToAuxInt(0) 833 v.AddArg(x) 834 return true 835 } 836 } 837 func rewriteValueMIPS_OpCom8(v *Value) bool { 838 v_0 := v.Args[0] 839 // match: (Com8 x) 840 // result: (NORconst [0] x) 841 for { 842 x := v_0 843 v.reset(OpMIPSNORconst) 844 v.AuxInt = int32ToAuxInt(0) 845 v.AddArg(x) 846 return true 847 } 848 } 849 func rewriteValueMIPS_OpConst16(v *Value) bool { 850 // match: (Const16 [val]) 851 // result: (MOVWconst [int32(val)]) 852 for { 853 val := auxIntToInt16(v.AuxInt) 854 v.reset(OpMIPSMOVWconst) 855 v.AuxInt = int32ToAuxInt(int32(val)) 856 return true 857 } 858 } 859 func rewriteValueMIPS_OpConst32(v *Value) bool { 860 // match: (Const32 [val]) 861 // result: (MOVWconst [int32(val)]) 862 for { 863 val := auxIntToInt32(v.AuxInt) 864 v.reset(OpMIPSMOVWconst) 865 v.AuxInt = int32ToAuxInt(int32(val)) 866 return true 867 } 868 } 869 func rewriteValueMIPS_OpConst8(v *Value) bool { 870 // match: (Const8 [val]) 871 // result: (MOVWconst [int32(val)]) 872 for { 873 val := auxIntToInt8(v.AuxInt) 874 v.reset(OpMIPSMOVWconst) 875 v.AuxInt = int32ToAuxInt(int32(val)) 876 return true 877 } 878 } 879 func rewriteValueMIPS_OpConstBool(v *Value) bool { 880 // match: (ConstBool [t]) 881 // result: (MOVWconst [b2i32(t)]) 882 for { 883 t := auxIntToBool(v.AuxInt) 884 v.reset(OpMIPSMOVWconst) 885 v.AuxInt = int32ToAuxInt(b2i32(t)) 886 return true 887 } 888 } 889 func rewriteValueMIPS_OpConstNil(v *Value) bool { 890 // match: (ConstNil) 891 // result: (MOVWconst [0]) 892 for { 893 v.reset(OpMIPSMOVWconst) 894 v.AuxInt = int32ToAuxInt(0) 895 return true 896 } 897 } 898 func rewriteValueMIPS_OpCtz32(v *Value) bool { 899 v_0 := v.Args[0] 900 b := v.Block 901 typ := &b.Func.Config.Types 902 // match: (Ctz32 <t> x) 903 // result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x))))) 904 for { 905 t := v.Type 906 x := v_0 907 v.reset(OpMIPSSUB) 908 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 909 v0.AuxInt = int32ToAuxInt(32) 910 v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t) 911 v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t) 912 v2.AuxInt = int32ToAuxInt(1) 913 v3 := b.NewValue0(v.Pos, OpMIPSAND, t) 914 v4 := b.NewValue0(v.Pos, OpMIPSNEG, t) 915 v4.AddArg(x) 916 v3.AddArg2(x, v4) 917 v2.AddArg(v3) 918 v1.AddArg(v2) 919 v.AddArg2(v0, v1) 920 return true 921 } 922 } 923 func rewriteValueMIPS_OpDiv16(v *Value) bool { 924 v_1 := v.Args[1] 925 v_0 := v.Args[0] 926 b := v.Block 927 typ := &b.Func.Config.Types 928 // match: (Div16 x y) 929 // result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y))) 930 for { 931 x := v_0 932 y := v_1 933 v.reset(OpSelect1) 934 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 935 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 936 v1.AddArg(x) 937 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 938 v2.AddArg(y) 939 v0.AddArg2(v1, v2) 940 v.AddArg(v0) 941 return true 942 } 943 } 944 func rewriteValueMIPS_OpDiv16u(v *Value) bool { 945 v_1 := v.Args[1] 946 v_0 := v.Args[0] 947 b := v.Block 948 typ := &b.Func.Config.Types 949 // match: (Div16u x y) 950 // result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y))) 951 for { 952 x := v_0 953 y := v_1 954 v.reset(OpSelect1) 955 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 956 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 957 v1.AddArg(x) 958 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 959 v2.AddArg(y) 960 v0.AddArg2(v1, v2) 961 v.AddArg(v0) 962 return true 963 } 964 } 965 func rewriteValueMIPS_OpDiv32(v *Value) bool { 966 v_1 := v.Args[1] 967 v_0 := v.Args[0] 968 b := v.Block 969 typ := &b.Func.Config.Types 970 // match: (Div32 x y) 971 // result: (Select1 (DIV x y)) 972 for { 973 x := v_0 974 y := v_1 975 v.reset(OpSelect1) 976 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 977 v0.AddArg2(x, y) 978 v.AddArg(v0) 979 return true 980 } 981 } 982 func rewriteValueMIPS_OpDiv32u(v *Value) bool { 983 v_1 := v.Args[1] 984 v_0 := v.Args[0] 985 b := v.Block 986 typ := &b.Func.Config.Types 987 // match: (Div32u x y) 988 // result: (Select1 (DIVU x y)) 989 for { 990 x := v_0 991 y := v_1 992 v.reset(OpSelect1) 993 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 994 v0.AddArg2(x, y) 995 v.AddArg(v0) 996 return true 997 } 998 } 999 func rewriteValueMIPS_OpDiv8(v *Value) bool { 1000 v_1 := v.Args[1] 1001 v_0 := v.Args[0] 1002 b := v.Block 1003 typ := &b.Func.Config.Types 1004 // match: (Div8 x y) 1005 // result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y))) 1006 for { 1007 x := v_0 1008 y := v_1 1009 v.reset(OpSelect1) 1010 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 1011 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1012 v1.AddArg(x) 1013 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1014 v2.AddArg(y) 1015 v0.AddArg2(v1, v2) 1016 v.AddArg(v0) 1017 return true 1018 } 1019 } 1020 func rewriteValueMIPS_OpDiv8u(v *Value) bool { 1021 v_1 := v.Args[1] 1022 v_0 := v.Args[0] 1023 b := v.Block 1024 typ := &b.Func.Config.Types 1025 // match: (Div8u x y) 1026 // result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y))) 1027 for { 1028 x := v_0 1029 y := v_1 1030 v.reset(OpSelect1) 1031 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 1032 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1033 v1.AddArg(x) 1034 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1035 v2.AddArg(y) 1036 v0.AddArg2(v1, v2) 1037 v.AddArg(v0) 1038 return true 1039 } 1040 } 1041 func rewriteValueMIPS_OpEq16(v *Value) bool { 1042 v_1 := v.Args[1] 1043 v_0 := v.Args[0] 1044 b := v.Block 1045 typ := &b.Func.Config.Types 1046 // match: (Eq16 x y) 1047 // result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y))) 1048 for { 1049 x := v_0 1050 y := v_1 1051 v.reset(OpMIPSSGTUconst) 1052 v.AuxInt = int32ToAuxInt(1) 1053 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1054 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1055 v1.AddArg(x) 1056 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1057 v2.AddArg(y) 1058 v0.AddArg2(v1, v2) 1059 v.AddArg(v0) 1060 return true 1061 } 1062 } 1063 func rewriteValueMIPS_OpEq32(v *Value) bool { 1064 v_1 := v.Args[1] 1065 v_0 := v.Args[0] 1066 b := v.Block 1067 typ := &b.Func.Config.Types 1068 // match: (Eq32 x y) 1069 // result: (SGTUconst [1] (XOR x y)) 1070 for { 1071 x := v_0 1072 y := v_1 1073 v.reset(OpMIPSSGTUconst) 1074 v.AuxInt = int32ToAuxInt(1) 1075 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1076 v0.AddArg2(x, y) 1077 v.AddArg(v0) 1078 return true 1079 } 1080 } 1081 func rewriteValueMIPS_OpEq32F(v *Value) bool { 1082 v_1 := v.Args[1] 1083 v_0 := v.Args[0] 1084 b := v.Block 1085 // match: (Eq32F x y) 1086 // result: (FPFlagTrue (CMPEQF x y)) 1087 for { 1088 x := v_0 1089 y := v_1 1090 v.reset(OpMIPSFPFlagTrue) 1091 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags) 1092 v0.AddArg2(x, y) 1093 v.AddArg(v0) 1094 return true 1095 } 1096 } 1097 func rewriteValueMIPS_OpEq64F(v *Value) bool { 1098 v_1 := v.Args[1] 1099 v_0 := v.Args[0] 1100 b := v.Block 1101 // match: (Eq64F x y) 1102 // result: (FPFlagTrue (CMPEQD x y)) 1103 for { 1104 x := v_0 1105 y := v_1 1106 v.reset(OpMIPSFPFlagTrue) 1107 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags) 1108 v0.AddArg2(x, y) 1109 v.AddArg(v0) 1110 return true 1111 } 1112 } 1113 func rewriteValueMIPS_OpEq8(v *Value) bool { 1114 v_1 := v.Args[1] 1115 v_0 := v.Args[0] 1116 b := v.Block 1117 typ := &b.Func.Config.Types 1118 // match: (Eq8 x y) 1119 // result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y))) 1120 for { 1121 x := v_0 1122 y := v_1 1123 v.reset(OpMIPSSGTUconst) 1124 v.AuxInt = int32ToAuxInt(1) 1125 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1126 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1127 v1.AddArg(x) 1128 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1129 v2.AddArg(y) 1130 v0.AddArg2(v1, v2) 1131 v.AddArg(v0) 1132 return true 1133 } 1134 } 1135 func rewriteValueMIPS_OpEqB(v *Value) bool { 1136 v_1 := v.Args[1] 1137 v_0 := v.Args[0] 1138 b := v.Block 1139 typ := &b.Func.Config.Types 1140 // match: (EqB x y) 1141 // result: (XORconst [1] (XOR <typ.Bool> x y)) 1142 for { 1143 x := v_0 1144 y := v_1 1145 v.reset(OpMIPSXORconst) 1146 v.AuxInt = int32ToAuxInt(1) 1147 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool) 1148 v0.AddArg2(x, y) 1149 v.AddArg(v0) 1150 return true 1151 } 1152 } 1153 func rewriteValueMIPS_OpEqPtr(v *Value) bool { 1154 v_1 := v.Args[1] 1155 v_0 := v.Args[0] 1156 b := v.Block 1157 typ := &b.Func.Config.Types 1158 // match: (EqPtr x y) 1159 // result: (SGTUconst [1] (XOR x y)) 1160 for { 1161 x := v_0 1162 y := v_1 1163 v.reset(OpMIPSSGTUconst) 1164 v.AuxInt = int32ToAuxInt(1) 1165 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1166 v0.AddArg2(x, y) 1167 v.AddArg(v0) 1168 return true 1169 } 1170 } 1171 func rewriteValueMIPS_OpHmul32(v *Value) bool { 1172 v_1 := v.Args[1] 1173 v_0 := v.Args[0] 1174 b := v.Block 1175 typ := &b.Func.Config.Types 1176 // match: (Hmul32 x y) 1177 // result: (Select0 (MULT x y)) 1178 for { 1179 x := v_0 1180 y := v_1 1181 v.reset(OpSelect0) 1182 v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32)) 1183 v0.AddArg2(x, y) 1184 v.AddArg(v0) 1185 return true 1186 } 1187 } 1188 func rewriteValueMIPS_OpHmul32u(v *Value) bool { 1189 v_1 := v.Args[1] 1190 v_0 := v.Args[0] 1191 b := v.Block 1192 typ := &b.Func.Config.Types 1193 // match: (Hmul32u x y) 1194 // result: (Select0 (MULTU x y)) 1195 for { 1196 x := v_0 1197 y := v_1 1198 v.reset(OpSelect0) 1199 v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32)) 1200 v0.AddArg2(x, y) 1201 v.AddArg(v0) 1202 return true 1203 } 1204 } 1205 func rewriteValueMIPS_OpIsInBounds(v *Value) bool { 1206 v_1 := v.Args[1] 1207 v_0 := v.Args[0] 1208 // match: (IsInBounds idx len) 1209 // result: (SGTU len idx) 1210 for { 1211 idx := v_0 1212 len := v_1 1213 v.reset(OpMIPSSGTU) 1214 v.AddArg2(len, idx) 1215 return true 1216 } 1217 } 1218 func rewriteValueMIPS_OpIsNonNil(v *Value) bool { 1219 v_0 := v.Args[0] 1220 b := v.Block 1221 typ := &b.Func.Config.Types 1222 // match: (IsNonNil ptr) 1223 // result: (SGTU ptr (MOVWconst [0])) 1224 for { 1225 ptr := v_0 1226 v.reset(OpMIPSSGTU) 1227 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1228 v0.AuxInt = int32ToAuxInt(0) 1229 v.AddArg2(ptr, v0) 1230 return true 1231 } 1232 } 1233 func rewriteValueMIPS_OpIsSliceInBounds(v *Value) bool { 1234 v_1 := v.Args[1] 1235 v_0 := v.Args[0] 1236 b := v.Block 1237 typ := &b.Func.Config.Types 1238 // match: (IsSliceInBounds idx len) 1239 // result: (XORconst [1] (SGTU idx len)) 1240 for { 1241 idx := v_0 1242 len := v_1 1243 v.reset(OpMIPSXORconst) 1244 v.AuxInt = int32ToAuxInt(1) 1245 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1246 v0.AddArg2(idx, len) 1247 v.AddArg(v0) 1248 return true 1249 } 1250 } 1251 func rewriteValueMIPS_OpLeq16(v *Value) bool { 1252 v_1 := v.Args[1] 1253 v_0 := v.Args[0] 1254 b := v.Block 1255 typ := &b.Func.Config.Types 1256 // match: (Leq16 x y) 1257 // result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y))) 1258 for { 1259 x := v_0 1260 y := v_1 1261 v.reset(OpMIPSXORconst) 1262 v.AuxInt = int32ToAuxInt(1) 1263 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 1264 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1265 v1.AddArg(x) 1266 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1267 v2.AddArg(y) 1268 v0.AddArg2(v1, v2) 1269 v.AddArg(v0) 1270 return true 1271 } 1272 } 1273 func rewriteValueMIPS_OpLeq16U(v *Value) bool { 1274 v_1 := v.Args[1] 1275 v_0 := v.Args[0] 1276 b := v.Block 1277 typ := &b.Func.Config.Types 1278 // match: (Leq16U x y) 1279 // result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))) 1280 for { 1281 x := v_0 1282 y := v_1 1283 v.reset(OpMIPSXORconst) 1284 v.AuxInt = int32ToAuxInt(1) 1285 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1286 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1287 v1.AddArg(x) 1288 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1289 v2.AddArg(y) 1290 v0.AddArg2(v1, v2) 1291 v.AddArg(v0) 1292 return true 1293 } 1294 } 1295 func rewriteValueMIPS_OpLeq32(v *Value) bool { 1296 v_1 := v.Args[1] 1297 v_0 := v.Args[0] 1298 b := v.Block 1299 typ := &b.Func.Config.Types 1300 // match: (Leq32 x y) 1301 // result: (XORconst [1] (SGT x y)) 1302 for { 1303 x := v_0 1304 y := v_1 1305 v.reset(OpMIPSXORconst) 1306 v.AuxInt = int32ToAuxInt(1) 1307 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 1308 v0.AddArg2(x, y) 1309 v.AddArg(v0) 1310 return true 1311 } 1312 } 1313 func rewriteValueMIPS_OpLeq32F(v *Value) bool { 1314 v_1 := v.Args[1] 1315 v_0 := v.Args[0] 1316 b := v.Block 1317 // match: (Leq32F x y) 1318 // result: (FPFlagTrue (CMPGEF y x)) 1319 for { 1320 x := v_0 1321 y := v_1 1322 v.reset(OpMIPSFPFlagTrue) 1323 v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags) 1324 v0.AddArg2(y, x) 1325 v.AddArg(v0) 1326 return true 1327 } 1328 } 1329 func rewriteValueMIPS_OpLeq32U(v *Value) bool { 1330 v_1 := v.Args[1] 1331 v_0 := v.Args[0] 1332 b := v.Block 1333 typ := &b.Func.Config.Types 1334 // match: (Leq32U x y) 1335 // result: (XORconst [1] (SGTU x y)) 1336 for { 1337 x := v_0 1338 y := v_1 1339 v.reset(OpMIPSXORconst) 1340 v.AuxInt = int32ToAuxInt(1) 1341 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1342 v0.AddArg2(x, y) 1343 v.AddArg(v0) 1344 return true 1345 } 1346 } 1347 func rewriteValueMIPS_OpLeq64F(v *Value) bool { 1348 v_1 := v.Args[1] 1349 v_0 := v.Args[0] 1350 b := v.Block 1351 // match: (Leq64F x y) 1352 // result: (FPFlagTrue (CMPGED y x)) 1353 for { 1354 x := v_0 1355 y := v_1 1356 v.reset(OpMIPSFPFlagTrue) 1357 v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags) 1358 v0.AddArg2(y, x) 1359 v.AddArg(v0) 1360 return true 1361 } 1362 } 1363 func rewriteValueMIPS_OpLeq8(v *Value) bool { 1364 v_1 := v.Args[1] 1365 v_0 := v.Args[0] 1366 b := v.Block 1367 typ := &b.Func.Config.Types 1368 // match: (Leq8 x y) 1369 // result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y))) 1370 for { 1371 x := v_0 1372 y := v_1 1373 v.reset(OpMIPSXORconst) 1374 v.AuxInt = int32ToAuxInt(1) 1375 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 1376 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1377 v1.AddArg(x) 1378 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1379 v2.AddArg(y) 1380 v0.AddArg2(v1, v2) 1381 v.AddArg(v0) 1382 return true 1383 } 1384 } 1385 func rewriteValueMIPS_OpLeq8U(v *Value) bool { 1386 v_1 := v.Args[1] 1387 v_0 := v.Args[0] 1388 b := v.Block 1389 typ := &b.Func.Config.Types 1390 // match: (Leq8U x y) 1391 // result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))) 1392 for { 1393 x := v_0 1394 y := v_1 1395 v.reset(OpMIPSXORconst) 1396 v.AuxInt = int32ToAuxInt(1) 1397 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1398 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1399 v1.AddArg(x) 1400 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1401 v2.AddArg(y) 1402 v0.AddArg2(v1, v2) 1403 v.AddArg(v0) 1404 return true 1405 } 1406 } 1407 func rewriteValueMIPS_OpLess16(v *Value) bool { 1408 v_1 := v.Args[1] 1409 v_0 := v.Args[0] 1410 b := v.Block 1411 typ := &b.Func.Config.Types 1412 // match: (Less16 x y) 1413 // result: (SGT (SignExt16to32 y) (SignExt16to32 x)) 1414 for { 1415 x := v_0 1416 y := v_1 1417 v.reset(OpMIPSSGT) 1418 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1419 v0.AddArg(y) 1420 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1421 v1.AddArg(x) 1422 v.AddArg2(v0, v1) 1423 return true 1424 } 1425 } 1426 func rewriteValueMIPS_OpLess16U(v *Value) bool { 1427 v_1 := v.Args[1] 1428 v_0 := v.Args[0] 1429 b := v.Block 1430 typ := &b.Func.Config.Types 1431 // match: (Less16U x y) 1432 // result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)) 1433 for { 1434 x := v_0 1435 y := v_1 1436 v.reset(OpMIPSSGTU) 1437 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1438 v0.AddArg(y) 1439 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1440 v1.AddArg(x) 1441 v.AddArg2(v0, v1) 1442 return true 1443 } 1444 } 1445 func rewriteValueMIPS_OpLess32(v *Value) bool { 1446 v_1 := v.Args[1] 1447 v_0 := v.Args[0] 1448 // match: (Less32 x y) 1449 // result: (SGT y x) 1450 for { 1451 x := v_0 1452 y := v_1 1453 v.reset(OpMIPSSGT) 1454 v.AddArg2(y, x) 1455 return true 1456 } 1457 } 1458 func rewriteValueMIPS_OpLess32F(v *Value) bool { 1459 v_1 := v.Args[1] 1460 v_0 := v.Args[0] 1461 b := v.Block 1462 // match: (Less32F x y) 1463 // result: (FPFlagTrue (CMPGTF y x)) 1464 for { 1465 x := v_0 1466 y := v_1 1467 v.reset(OpMIPSFPFlagTrue) 1468 v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags) 1469 v0.AddArg2(y, x) 1470 v.AddArg(v0) 1471 return true 1472 } 1473 } 1474 func rewriteValueMIPS_OpLess32U(v *Value) bool { 1475 v_1 := v.Args[1] 1476 v_0 := v.Args[0] 1477 // match: (Less32U x y) 1478 // result: (SGTU y x) 1479 for { 1480 x := v_0 1481 y := v_1 1482 v.reset(OpMIPSSGTU) 1483 v.AddArg2(y, x) 1484 return true 1485 } 1486 } 1487 func rewriteValueMIPS_OpLess64F(v *Value) bool { 1488 v_1 := v.Args[1] 1489 v_0 := v.Args[0] 1490 b := v.Block 1491 // match: (Less64F x y) 1492 // result: (FPFlagTrue (CMPGTD y x)) 1493 for { 1494 x := v_0 1495 y := v_1 1496 v.reset(OpMIPSFPFlagTrue) 1497 v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags) 1498 v0.AddArg2(y, x) 1499 v.AddArg(v0) 1500 return true 1501 } 1502 } 1503 func rewriteValueMIPS_OpLess8(v *Value) bool { 1504 v_1 := v.Args[1] 1505 v_0 := v.Args[0] 1506 b := v.Block 1507 typ := &b.Func.Config.Types 1508 // match: (Less8 x y) 1509 // result: (SGT (SignExt8to32 y) (SignExt8to32 x)) 1510 for { 1511 x := v_0 1512 y := v_1 1513 v.reset(OpMIPSSGT) 1514 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1515 v0.AddArg(y) 1516 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1517 v1.AddArg(x) 1518 v.AddArg2(v0, v1) 1519 return true 1520 } 1521 } 1522 func rewriteValueMIPS_OpLess8U(v *Value) bool { 1523 v_1 := v.Args[1] 1524 v_0 := v.Args[0] 1525 b := v.Block 1526 typ := &b.Func.Config.Types 1527 // match: (Less8U x y) 1528 // result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)) 1529 for { 1530 x := v_0 1531 y := v_1 1532 v.reset(OpMIPSSGTU) 1533 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1534 v0.AddArg(y) 1535 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1536 v1.AddArg(x) 1537 v.AddArg2(v0, v1) 1538 return true 1539 } 1540 } 1541 func rewriteValueMIPS_OpLoad(v *Value) bool { 1542 v_1 := v.Args[1] 1543 v_0 := v.Args[0] 1544 // match: (Load <t> ptr mem) 1545 // cond: t.IsBoolean() 1546 // result: (MOVBUload ptr mem) 1547 for { 1548 t := v.Type 1549 ptr := v_0 1550 mem := v_1 1551 if !(t.IsBoolean()) { 1552 break 1553 } 1554 v.reset(OpMIPSMOVBUload) 1555 v.AddArg2(ptr, mem) 1556 return true 1557 } 1558 // match: (Load <t> ptr mem) 1559 // cond: (is8BitInt(t) && t.IsSigned()) 1560 // result: (MOVBload ptr mem) 1561 for { 1562 t := v.Type 1563 ptr := v_0 1564 mem := v_1 1565 if !(is8BitInt(t) && t.IsSigned()) { 1566 break 1567 } 1568 v.reset(OpMIPSMOVBload) 1569 v.AddArg2(ptr, mem) 1570 return true 1571 } 1572 // match: (Load <t> ptr mem) 1573 // cond: (is8BitInt(t) && !t.IsSigned()) 1574 // result: (MOVBUload ptr mem) 1575 for { 1576 t := v.Type 1577 ptr := v_0 1578 mem := v_1 1579 if !(is8BitInt(t) && !t.IsSigned()) { 1580 break 1581 } 1582 v.reset(OpMIPSMOVBUload) 1583 v.AddArg2(ptr, mem) 1584 return true 1585 } 1586 // match: (Load <t> ptr mem) 1587 // cond: (is16BitInt(t) && t.IsSigned()) 1588 // result: (MOVHload ptr mem) 1589 for { 1590 t := v.Type 1591 ptr := v_0 1592 mem := v_1 1593 if !(is16BitInt(t) && t.IsSigned()) { 1594 break 1595 } 1596 v.reset(OpMIPSMOVHload) 1597 v.AddArg2(ptr, mem) 1598 return true 1599 } 1600 // match: (Load <t> ptr mem) 1601 // cond: (is16BitInt(t) && !t.IsSigned()) 1602 // result: (MOVHUload ptr mem) 1603 for { 1604 t := v.Type 1605 ptr := v_0 1606 mem := v_1 1607 if !(is16BitInt(t) && !t.IsSigned()) { 1608 break 1609 } 1610 v.reset(OpMIPSMOVHUload) 1611 v.AddArg2(ptr, mem) 1612 return true 1613 } 1614 // match: (Load <t> ptr mem) 1615 // cond: (is32BitInt(t) || isPtr(t)) 1616 // result: (MOVWload ptr mem) 1617 for { 1618 t := v.Type 1619 ptr := v_0 1620 mem := v_1 1621 if !(is32BitInt(t) || isPtr(t)) { 1622 break 1623 } 1624 v.reset(OpMIPSMOVWload) 1625 v.AddArg2(ptr, mem) 1626 return true 1627 } 1628 // match: (Load <t> ptr mem) 1629 // cond: is32BitFloat(t) 1630 // result: (MOVFload ptr mem) 1631 for { 1632 t := v.Type 1633 ptr := v_0 1634 mem := v_1 1635 if !(is32BitFloat(t)) { 1636 break 1637 } 1638 v.reset(OpMIPSMOVFload) 1639 v.AddArg2(ptr, mem) 1640 return true 1641 } 1642 // match: (Load <t> ptr mem) 1643 // cond: is64BitFloat(t) 1644 // result: (MOVDload ptr mem) 1645 for { 1646 t := v.Type 1647 ptr := v_0 1648 mem := v_1 1649 if !(is64BitFloat(t)) { 1650 break 1651 } 1652 v.reset(OpMIPSMOVDload) 1653 v.AddArg2(ptr, mem) 1654 return true 1655 } 1656 return false 1657 } 1658 func rewriteValueMIPS_OpLocalAddr(v *Value) bool { 1659 v_1 := v.Args[1] 1660 v_0 := v.Args[0] 1661 b := v.Block 1662 typ := &b.Func.Config.Types 1663 // match: (LocalAddr <t> {sym} base mem) 1664 // cond: t.Elem().HasPointers() 1665 // result: (MOVWaddr {sym} (SPanchored base mem)) 1666 for { 1667 t := v.Type 1668 sym := auxToSym(v.Aux) 1669 base := v_0 1670 mem := v_1 1671 if !(t.Elem().HasPointers()) { 1672 break 1673 } 1674 v.reset(OpMIPSMOVWaddr) 1675 v.Aux = symToAux(sym) 1676 v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr) 1677 v0.AddArg2(base, mem) 1678 v.AddArg(v0) 1679 return true 1680 } 1681 // match: (LocalAddr <t> {sym} base _) 1682 // cond: !t.Elem().HasPointers() 1683 // result: (MOVWaddr {sym} base) 1684 for { 1685 t := v.Type 1686 sym := auxToSym(v.Aux) 1687 base := v_0 1688 if !(!t.Elem().HasPointers()) { 1689 break 1690 } 1691 v.reset(OpMIPSMOVWaddr) 1692 v.Aux = symToAux(sym) 1693 v.AddArg(base) 1694 return true 1695 } 1696 return false 1697 } 1698 func rewriteValueMIPS_OpLsh16x16(v *Value) bool { 1699 v_1 := v.Args[1] 1700 v_0 := v.Args[0] 1701 b := v.Block 1702 typ := &b.Func.Config.Types 1703 // match: (Lsh16x16 <t> x y) 1704 // result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 1705 for { 1706 t := v.Type 1707 x := v_0 1708 y := v_1 1709 v.reset(OpMIPSCMOVZ) 1710 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1711 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1712 v1.AddArg(y) 1713 v0.AddArg2(x, v1) 1714 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1715 v2.AuxInt = int32ToAuxInt(0) 1716 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1717 v3.AuxInt = int32ToAuxInt(32) 1718 v3.AddArg(v1) 1719 v.AddArg3(v0, v2, v3) 1720 return true 1721 } 1722 } 1723 func rewriteValueMIPS_OpLsh16x32(v *Value) bool { 1724 v_1 := v.Args[1] 1725 v_0 := v.Args[0] 1726 b := v.Block 1727 typ := &b.Func.Config.Types 1728 // match: (Lsh16x32 <t> x y) 1729 // result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 1730 for { 1731 t := v.Type 1732 x := v_0 1733 y := v_1 1734 v.reset(OpMIPSCMOVZ) 1735 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1736 v0.AddArg2(x, y) 1737 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1738 v1.AuxInt = int32ToAuxInt(0) 1739 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1740 v2.AuxInt = int32ToAuxInt(32) 1741 v2.AddArg(y) 1742 v.AddArg3(v0, v1, v2) 1743 return true 1744 } 1745 } 1746 func rewriteValueMIPS_OpLsh16x64(v *Value) bool { 1747 v_1 := v.Args[1] 1748 v_0 := v.Args[0] 1749 // match: (Lsh16x64 x (Const64 [c])) 1750 // cond: uint32(c) < 16 1751 // result: (SLLconst x [int32(c)]) 1752 for { 1753 x := v_0 1754 if v_1.Op != OpConst64 { 1755 break 1756 } 1757 c := auxIntToInt64(v_1.AuxInt) 1758 if !(uint32(c) < 16) { 1759 break 1760 } 1761 v.reset(OpMIPSSLLconst) 1762 v.AuxInt = int32ToAuxInt(int32(c)) 1763 v.AddArg(x) 1764 return true 1765 } 1766 // match: (Lsh16x64 _ (Const64 [c])) 1767 // cond: uint32(c) >= 16 1768 // result: (MOVWconst [0]) 1769 for { 1770 if v_1.Op != OpConst64 { 1771 break 1772 } 1773 c := auxIntToInt64(v_1.AuxInt) 1774 if !(uint32(c) >= 16) { 1775 break 1776 } 1777 v.reset(OpMIPSMOVWconst) 1778 v.AuxInt = int32ToAuxInt(0) 1779 return true 1780 } 1781 return false 1782 } 1783 func rewriteValueMIPS_OpLsh16x8(v *Value) bool { 1784 v_1 := v.Args[1] 1785 v_0 := v.Args[0] 1786 b := v.Block 1787 typ := &b.Func.Config.Types 1788 // match: (Lsh16x8 <t> x y) 1789 // result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 1790 for { 1791 t := v.Type 1792 x := v_0 1793 y := v_1 1794 v.reset(OpMIPSCMOVZ) 1795 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1796 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1797 v1.AddArg(y) 1798 v0.AddArg2(x, v1) 1799 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1800 v2.AuxInt = int32ToAuxInt(0) 1801 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1802 v3.AuxInt = int32ToAuxInt(32) 1803 v3.AddArg(v1) 1804 v.AddArg3(v0, v2, v3) 1805 return true 1806 } 1807 } 1808 func rewriteValueMIPS_OpLsh32x16(v *Value) bool { 1809 v_1 := v.Args[1] 1810 v_0 := v.Args[0] 1811 b := v.Block 1812 typ := &b.Func.Config.Types 1813 // match: (Lsh32x16 <t> x y) 1814 // result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 1815 for { 1816 t := v.Type 1817 x := v_0 1818 y := v_1 1819 v.reset(OpMIPSCMOVZ) 1820 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1821 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1822 v1.AddArg(y) 1823 v0.AddArg2(x, v1) 1824 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1825 v2.AuxInt = int32ToAuxInt(0) 1826 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1827 v3.AuxInt = int32ToAuxInt(32) 1828 v3.AddArg(v1) 1829 v.AddArg3(v0, v2, v3) 1830 return true 1831 } 1832 } 1833 func rewriteValueMIPS_OpLsh32x32(v *Value) bool { 1834 v_1 := v.Args[1] 1835 v_0 := v.Args[0] 1836 b := v.Block 1837 typ := &b.Func.Config.Types 1838 // match: (Lsh32x32 <t> x y) 1839 // result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 1840 for { 1841 t := v.Type 1842 x := v_0 1843 y := v_1 1844 v.reset(OpMIPSCMOVZ) 1845 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1846 v0.AddArg2(x, y) 1847 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1848 v1.AuxInt = int32ToAuxInt(0) 1849 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1850 v2.AuxInt = int32ToAuxInt(32) 1851 v2.AddArg(y) 1852 v.AddArg3(v0, v1, v2) 1853 return true 1854 } 1855 } 1856 func rewriteValueMIPS_OpLsh32x64(v *Value) bool { 1857 v_1 := v.Args[1] 1858 v_0 := v.Args[0] 1859 // match: (Lsh32x64 x (Const64 [c])) 1860 // cond: uint32(c) < 32 1861 // result: (SLLconst x [int32(c)]) 1862 for { 1863 x := v_0 1864 if v_1.Op != OpConst64 { 1865 break 1866 } 1867 c := auxIntToInt64(v_1.AuxInt) 1868 if !(uint32(c) < 32) { 1869 break 1870 } 1871 v.reset(OpMIPSSLLconst) 1872 v.AuxInt = int32ToAuxInt(int32(c)) 1873 v.AddArg(x) 1874 return true 1875 } 1876 // match: (Lsh32x64 _ (Const64 [c])) 1877 // cond: uint32(c) >= 32 1878 // result: (MOVWconst [0]) 1879 for { 1880 if v_1.Op != OpConst64 { 1881 break 1882 } 1883 c := auxIntToInt64(v_1.AuxInt) 1884 if !(uint32(c) >= 32) { 1885 break 1886 } 1887 v.reset(OpMIPSMOVWconst) 1888 v.AuxInt = int32ToAuxInt(0) 1889 return true 1890 } 1891 return false 1892 } 1893 func rewriteValueMIPS_OpLsh32x8(v *Value) bool { 1894 v_1 := v.Args[1] 1895 v_0 := v.Args[0] 1896 b := v.Block 1897 typ := &b.Func.Config.Types 1898 // match: (Lsh32x8 <t> x y) 1899 // result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 1900 for { 1901 t := v.Type 1902 x := v_0 1903 y := v_1 1904 v.reset(OpMIPSCMOVZ) 1905 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1906 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1907 v1.AddArg(y) 1908 v0.AddArg2(x, v1) 1909 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1910 v2.AuxInt = int32ToAuxInt(0) 1911 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1912 v3.AuxInt = int32ToAuxInt(32) 1913 v3.AddArg(v1) 1914 v.AddArg3(v0, v2, v3) 1915 return true 1916 } 1917 } 1918 func rewriteValueMIPS_OpLsh8x16(v *Value) bool { 1919 v_1 := v.Args[1] 1920 v_0 := v.Args[0] 1921 b := v.Block 1922 typ := &b.Func.Config.Types 1923 // match: (Lsh8x16 <t> x y) 1924 // result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 1925 for { 1926 t := v.Type 1927 x := v_0 1928 y := v_1 1929 v.reset(OpMIPSCMOVZ) 1930 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1931 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1932 v1.AddArg(y) 1933 v0.AddArg2(x, v1) 1934 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1935 v2.AuxInt = int32ToAuxInt(0) 1936 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1937 v3.AuxInt = int32ToAuxInt(32) 1938 v3.AddArg(v1) 1939 v.AddArg3(v0, v2, v3) 1940 return true 1941 } 1942 } 1943 func rewriteValueMIPS_OpLsh8x32(v *Value) bool { 1944 v_1 := v.Args[1] 1945 v_0 := v.Args[0] 1946 b := v.Block 1947 typ := &b.Func.Config.Types 1948 // match: (Lsh8x32 <t> x y) 1949 // result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 1950 for { 1951 t := v.Type 1952 x := v_0 1953 y := v_1 1954 v.reset(OpMIPSCMOVZ) 1955 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 1956 v0.AddArg2(x, y) 1957 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1958 v1.AuxInt = int32ToAuxInt(0) 1959 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 1960 v2.AuxInt = int32ToAuxInt(32) 1961 v2.AddArg(y) 1962 v.AddArg3(v0, v1, v2) 1963 return true 1964 } 1965 } 1966 func rewriteValueMIPS_OpLsh8x64(v *Value) bool { 1967 v_1 := v.Args[1] 1968 v_0 := v.Args[0] 1969 // match: (Lsh8x64 x (Const64 [c])) 1970 // cond: uint32(c) < 8 1971 // result: (SLLconst x [int32(c)]) 1972 for { 1973 x := v_0 1974 if v_1.Op != OpConst64 { 1975 break 1976 } 1977 c := auxIntToInt64(v_1.AuxInt) 1978 if !(uint32(c) < 8) { 1979 break 1980 } 1981 v.reset(OpMIPSSLLconst) 1982 v.AuxInt = int32ToAuxInt(int32(c)) 1983 v.AddArg(x) 1984 return true 1985 } 1986 // match: (Lsh8x64 _ (Const64 [c])) 1987 // cond: uint32(c) >= 8 1988 // result: (MOVWconst [0]) 1989 for { 1990 if v_1.Op != OpConst64 { 1991 break 1992 } 1993 c := auxIntToInt64(v_1.AuxInt) 1994 if !(uint32(c) >= 8) { 1995 break 1996 } 1997 v.reset(OpMIPSMOVWconst) 1998 v.AuxInt = int32ToAuxInt(0) 1999 return true 2000 } 2001 return false 2002 } 2003 func rewriteValueMIPS_OpLsh8x8(v *Value) bool { 2004 v_1 := v.Args[1] 2005 v_0 := v.Args[0] 2006 b := v.Block 2007 typ := &b.Func.Config.Types 2008 // match: (Lsh8x8 <t> x y) 2009 // result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 2010 for { 2011 t := v.Type 2012 x := v_0 2013 y := v_1 2014 v.reset(OpMIPSCMOVZ) 2015 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2016 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2017 v1.AddArg(y) 2018 v0.AddArg2(x, v1) 2019 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2020 v2.AuxInt = int32ToAuxInt(0) 2021 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2022 v3.AuxInt = int32ToAuxInt(32) 2023 v3.AddArg(v1) 2024 v.AddArg3(v0, v2, v3) 2025 return true 2026 } 2027 } 2028 func rewriteValueMIPS_OpMIPSADD(v *Value) bool { 2029 v_1 := v.Args[1] 2030 v_0 := v.Args[0] 2031 // match: (ADD x (MOVWconst <t> [c])) 2032 // cond: !t.IsPtr() 2033 // result: (ADDconst [c] x) 2034 for { 2035 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2036 x := v_0 2037 if v_1.Op != OpMIPSMOVWconst { 2038 continue 2039 } 2040 t := v_1.Type 2041 c := auxIntToInt32(v_1.AuxInt) 2042 if !(!t.IsPtr()) { 2043 continue 2044 } 2045 v.reset(OpMIPSADDconst) 2046 v.AuxInt = int32ToAuxInt(c) 2047 v.AddArg(x) 2048 return true 2049 } 2050 break 2051 } 2052 // match: (ADD x (NEG y)) 2053 // result: (SUB x y) 2054 for { 2055 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2056 x := v_0 2057 if v_1.Op != OpMIPSNEG { 2058 continue 2059 } 2060 y := v_1.Args[0] 2061 v.reset(OpMIPSSUB) 2062 v.AddArg2(x, y) 2063 return true 2064 } 2065 break 2066 } 2067 return false 2068 } 2069 func rewriteValueMIPS_OpMIPSADDconst(v *Value) bool { 2070 v_0 := v.Args[0] 2071 // match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr)) 2072 // result: (MOVWaddr [off1+off2] {sym} ptr) 2073 for { 2074 off1 := auxIntToInt32(v.AuxInt) 2075 if v_0.Op != OpMIPSMOVWaddr { 2076 break 2077 } 2078 off2 := auxIntToInt32(v_0.AuxInt) 2079 sym := auxToSym(v_0.Aux) 2080 ptr := v_0.Args[0] 2081 v.reset(OpMIPSMOVWaddr) 2082 v.AuxInt = int32ToAuxInt(off1 + off2) 2083 v.Aux = symToAux(sym) 2084 v.AddArg(ptr) 2085 return true 2086 } 2087 // match: (ADDconst [0] x) 2088 // result: x 2089 for { 2090 if auxIntToInt32(v.AuxInt) != 0 { 2091 break 2092 } 2093 x := v_0 2094 v.copyOf(x) 2095 return true 2096 } 2097 // match: (ADDconst [c] (MOVWconst [d])) 2098 // result: (MOVWconst [int32(c+d)]) 2099 for { 2100 c := auxIntToInt32(v.AuxInt) 2101 if v_0.Op != OpMIPSMOVWconst { 2102 break 2103 } 2104 d := auxIntToInt32(v_0.AuxInt) 2105 v.reset(OpMIPSMOVWconst) 2106 v.AuxInt = int32ToAuxInt(int32(c + d)) 2107 return true 2108 } 2109 // match: (ADDconst [c] (ADDconst [d] x)) 2110 // result: (ADDconst [c+d] x) 2111 for { 2112 c := auxIntToInt32(v.AuxInt) 2113 if v_0.Op != OpMIPSADDconst { 2114 break 2115 } 2116 d := auxIntToInt32(v_0.AuxInt) 2117 x := v_0.Args[0] 2118 v.reset(OpMIPSADDconst) 2119 v.AuxInt = int32ToAuxInt(c + d) 2120 v.AddArg(x) 2121 return true 2122 } 2123 // match: (ADDconst [c] (SUBconst [d] x)) 2124 // result: (ADDconst [c-d] x) 2125 for { 2126 c := auxIntToInt32(v.AuxInt) 2127 if v_0.Op != OpMIPSSUBconst { 2128 break 2129 } 2130 d := auxIntToInt32(v_0.AuxInt) 2131 x := v_0.Args[0] 2132 v.reset(OpMIPSADDconst) 2133 v.AuxInt = int32ToAuxInt(c - d) 2134 v.AddArg(x) 2135 return true 2136 } 2137 return false 2138 } 2139 func rewriteValueMIPS_OpMIPSAND(v *Value) bool { 2140 v_1 := v.Args[1] 2141 v_0 := v.Args[0] 2142 b := v.Block 2143 // match: (AND x (MOVWconst [c])) 2144 // result: (ANDconst [c] x) 2145 for { 2146 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2147 x := v_0 2148 if v_1.Op != OpMIPSMOVWconst { 2149 continue 2150 } 2151 c := auxIntToInt32(v_1.AuxInt) 2152 v.reset(OpMIPSANDconst) 2153 v.AuxInt = int32ToAuxInt(c) 2154 v.AddArg(x) 2155 return true 2156 } 2157 break 2158 } 2159 // match: (AND x x) 2160 // result: x 2161 for { 2162 x := v_0 2163 if x != v_1 { 2164 break 2165 } 2166 v.copyOf(x) 2167 return true 2168 } 2169 // match: (AND (SGTUconst [1] x) (SGTUconst [1] y)) 2170 // result: (SGTUconst [1] (OR <x.Type> x y)) 2171 for { 2172 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 2173 if v_0.Op != OpMIPSSGTUconst || auxIntToInt32(v_0.AuxInt) != 1 { 2174 continue 2175 } 2176 x := v_0.Args[0] 2177 if v_1.Op != OpMIPSSGTUconst || auxIntToInt32(v_1.AuxInt) != 1 { 2178 continue 2179 } 2180 y := v_1.Args[0] 2181 v.reset(OpMIPSSGTUconst) 2182 v.AuxInt = int32ToAuxInt(1) 2183 v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type) 2184 v0.AddArg2(x, y) 2185 v.AddArg(v0) 2186 return true 2187 } 2188 break 2189 } 2190 return false 2191 } 2192 func rewriteValueMIPS_OpMIPSANDconst(v *Value) bool { 2193 v_0 := v.Args[0] 2194 // match: (ANDconst [0] _) 2195 // result: (MOVWconst [0]) 2196 for { 2197 if auxIntToInt32(v.AuxInt) != 0 { 2198 break 2199 } 2200 v.reset(OpMIPSMOVWconst) 2201 v.AuxInt = int32ToAuxInt(0) 2202 return true 2203 } 2204 // match: (ANDconst [-1] x) 2205 // result: x 2206 for { 2207 if auxIntToInt32(v.AuxInt) != -1 { 2208 break 2209 } 2210 x := v_0 2211 v.copyOf(x) 2212 return true 2213 } 2214 // match: (ANDconst [c] (MOVWconst [d])) 2215 // result: (MOVWconst [c&d]) 2216 for { 2217 c := auxIntToInt32(v.AuxInt) 2218 if v_0.Op != OpMIPSMOVWconst { 2219 break 2220 } 2221 d := auxIntToInt32(v_0.AuxInt) 2222 v.reset(OpMIPSMOVWconst) 2223 v.AuxInt = int32ToAuxInt(c & d) 2224 return true 2225 } 2226 // match: (ANDconst [c] (ANDconst [d] x)) 2227 // result: (ANDconst [c&d] x) 2228 for { 2229 c := auxIntToInt32(v.AuxInt) 2230 if v_0.Op != OpMIPSANDconst { 2231 break 2232 } 2233 d := auxIntToInt32(v_0.AuxInt) 2234 x := v_0.Args[0] 2235 v.reset(OpMIPSANDconst) 2236 v.AuxInt = int32ToAuxInt(c & d) 2237 v.AddArg(x) 2238 return true 2239 } 2240 return false 2241 } 2242 func rewriteValueMIPS_OpMIPSCMOVZ(v *Value) bool { 2243 v_2 := v.Args[2] 2244 v_1 := v.Args[1] 2245 v_0 := v.Args[0] 2246 // match: (CMOVZ _ f (MOVWconst [0])) 2247 // result: f 2248 for { 2249 f := v_1 2250 if v_2.Op != OpMIPSMOVWconst || auxIntToInt32(v_2.AuxInt) != 0 { 2251 break 2252 } 2253 v.copyOf(f) 2254 return true 2255 } 2256 // match: (CMOVZ a _ (MOVWconst [c])) 2257 // cond: c!=0 2258 // result: a 2259 for { 2260 a := v_0 2261 if v_2.Op != OpMIPSMOVWconst { 2262 break 2263 } 2264 c := auxIntToInt32(v_2.AuxInt) 2265 if !(c != 0) { 2266 break 2267 } 2268 v.copyOf(a) 2269 return true 2270 } 2271 // match: (CMOVZ a (MOVWconst [0]) c) 2272 // result: (CMOVZzero a c) 2273 for { 2274 a := v_0 2275 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 2276 break 2277 } 2278 c := v_2 2279 v.reset(OpMIPSCMOVZzero) 2280 v.AddArg2(a, c) 2281 return true 2282 } 2283 return false 2284 } 2285 func rewriteValueMIPS_OpMIPSCMOVZzero(v *Value) bool { 2286 v_1 := v.Args[1] 2287 v_0 := v.Args[0] 2288 // match: (CMOVZzero _ (MOVWconst [0])) 2289 // result: (MOVWconst [0]) 2290 for { 2291 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 2292 break 2293 } 2294 v.reset(OpMIPSMOVWconst) 2295 v.AuxInt = int32ToAuxInt(0) 2296 return true 2297 } 2298 // match: (CMOVZzero a (MOVWconst [c])) 2299 // cond: c!=0 2300 // result: a 2301 for { 2302 a := v_0 2303 if v_1.Op != OpMIPSMOVWconst { 2304 break 2305 } 2306 c := auxIntToInt32(v_1.AuxInt) 2307 if !(c != 0) { 2308 break 2309 } 2310 v.copyOf(a) 2311 return true 2312 } 2313 return false 2314 } 2315 func rewriteValueMIPS_OpMIPSLoweredAtomicAdd(v *Value) bool { 2316 v_2 := v.Args[2] 2317 v_1 := v.Args[1] 2318 v_0 := v.Args[0] 2319 // match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem) 2320 // cond: is16Bit(int64(c)) 2321 // result: (LoweredAtomicAddconst [c] ptr mem) 2322 for { 2323 ptr := v_0 2324 if v_1.Op != OpMIPSMOVWconst { 2325 break 2326 } 2327 c := auxIntToInt32(v_1.AuxInt) 2328 mem := v_2 2329 if !(is16Bit(int64(c))) { 2330 break 2331 } 2332 v.reset(OpMIPSLoweredAtomicAddconst) 2333 v.AuxInt = int32ToAuxInt(c) 2334 v.AddArg2(ptr, mem) 2335 return true 2336 } 2337 return false 2338 } 2339 func rewriteValueMIPS_OpMIPSLoweredAtomicStore32(v *Value) bool { 2340 v_2 := v.Args[2] 2341 v_1 := v.Args[1] 2342 v_0 := v.Args[0] 2343 // match: (LoweredAtomicStore32 ptr (MOVWconst [0]) mem) 2344 // result: (LoweredAtomicStorezero ptr mem) 2345 for { 2346 ptr := v_0 2347 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 2348 break 2349 } 2350 mem := v_2 2351 v.reset(OpMIPSLoweredAtomicStorezero) 2352 v.AddArg2(ptr, mem) 2353 return true 2354 } 2355 return false 2356 } 2357 func rewriteValueMIPS_OpMIPSMOVBUload(v *Value) bool { 2358 v_1 := v.Args[1] 2359 v_0 := v.Args[0] 2360 // match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem) 2361 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2362 // result: (MOVBUload [off1+off2] {sym} ptr mem) 2363 for { 2364 off1 := auxIntToInt32(v.AuxInt) 2365 sym := auxToSym(v.Aux) 2366 x := v_0 2367 if x.Op != OpMIPSADDconst { 2368 break 2369 } 2370 off2 := auxIntToInt32(x.AuxInt) 2371 ptr := x.Args[0] 2372 mem := v_1 2373 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 2374 break 2375 } 2376 v.reset(OpMIPSMOVBUload) 2377 v.AuxInt = int32ToAuxInt(off1 + off2) 2378 v.Aux = symToAux(sym) 2379 v.AddArg2(ptr, mem) 2380 return true 2381 } 2382 // match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 2383 // cond: canMergeSym(sym1,sym2) 2384 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2385 for { 2386 off1 := auxIntToInt32(v.AuxInt) 2387 sym1 := auxToSym(v.Aux) 2388 if v_0.Op != OpMIPSMOVWaddr { 2389 break 2390 } 2391 off2 := auxIntToInt32(v_0.AuxInt) 2392 sym2 := auxToSym(v_0.Aux) 2393 ptr := v_0.Args[0] 2394 mem := v_1 2395 if !(canMergeSym(sym1, sym2)) { 2396 break 2397 } 2398 v.reset(OpMIPSMOVBUload) 2399 v.AuxInt = int32ToAuxInt(off1 + off2) 2400 v.Aux = symToAux(mergeSym(sym1, sym2)) 2401 v.AddArg2(ptr, mem) 2402 return true 2403 } 2404 // match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) 2405 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 2406 // result: (MOVBUreg x) 2407 for { 2408 off := auxIntToInt32(v.AuxInt) 2409 sym := auxToSym(v.Aux) 2410 ptr := v_0 2411 if v_1.Op != OpMIPSMOVBstore { 2412 break 2413 } 2414 off2 := auxIntToInt32(v_1.AuxInt) 2415 sym2 := auxToSym(v_1.Aux) 2416 x := v_1.Args[1] 2417 ptr2 := v_1.Args[0] 2418 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 2419 break 2420 } 2421 v.reset(OpMIPSMOVBUreg) 2422 v.AddArg(x) 2423 return true 2424 } 2425 return false 2426 } 2427 func rewriteValueMIPS_OpMIPSMOVBUreg(v *Value) bool { 2428 v_0 := v.Args[0] 2429 b := v.Block 2430 // match: (MOVBUreg x:(MOVBUload _ _)) 2431 // result: (MOVWreg x) 2432 for { 2433 x := v_0 2434 if x.Op != OpMIPSMOVBUload { 2435 break 2436 } 2437 v.reset(OpMIPSMOVWreg) 2438 v.AddArg(x) 2439 return true 2440 } 2441 // match: (MOVBUreg x:(MOVBUreg _)) 2442 // result: (MOVWreg x) 2443 for { 2444 x := v_0 2445 if x.Op != OpMIPSMOVBUreg { 2446 break 2447 } 2448 v.reset(OpMIPSMOVWreg) 2449 v.AddArg(x) 2450 return true 2451 } 2452 // match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem)) 2453 // cond: x.Uses == 1 && clobber(x) 2454 // result: @x.Block (MOVBUload <t> [off] {sym} ptr mem) 2455 for { 2456 t := v.Type 2457 x := v_0 2458 if x.Op != OpMIPSMOVBload { 2459 break 2460 } 2461 off := auxIntToInt32(x.AuxInt) 2462 sym := auxToSym(x.Aux) 2463 mem := x.Args[1] 2464 ptr := x.Args[0] 2465 if !(x.Uses == 1 && clobber(x)) { 2466 break 2467 } 2468 b = x.Block 2469 v0 := b.NewValue0(x.Pos, OpMIPSMOVBUload, t) 2470 v.copyOf(v0) 2471 v0.AuxInt = int32ToAuxInt(off) 2472 v0.Aux = symToAux(sym) 2473 v0.AddArg2(ptr, mem) 2474 return true 2475 } 2476 // match: (MOVBUreg (ANDconst [c] x)) 2477 // result: (ANDconst [c&0xff] x) 2478 for { 2479 if v_0.Op != OpMIPSANDconst { 2480 break 2481 } 2482 c := auxIntToInt32(v_0.AuxInt) 2483 x := v_0.Args[0] 2484 v.reset(OpMIPSANDconst) 2485 v.AuxInt = int32ToAuxInt(c & 0xff) 2486 v.AddArg(x) 2487 return true 2488 } 2489 // match: (MOVBUreg (MOVWconst [c])) 2490 // result: (MOVWconst [int32(uint8(c))]) 2491 for { 2492 if v_0.Op != OpMIPSMOVWconst { 2493 break 2494 } 2495 c := auxIntToInt32(v_0.AuxInt) 2496 v.reset(OpMIPSMOVWconst) 2497 v.AuxInt = int32ToAuxInt(int32(uint8(c))) 2498 return true 2499 } 2500 return false 2501 } 2502 func rewriteValueMIPS_OpMIPSMOVBload(v *Value) bool { 2503 v_1 := v.Args[1] 2504 v_0 := v.Args[0] 2505 // match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem) 2506 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2507 // result: (MOVBload [off1+off2] {sym} ptr mem) 2508 for { 2509 off1 := auxIntToInt32(v.AuxInt) 2510 sym := auxToSym(v.Aux) 2511 x := v_0 2512 if x.Op != OpMIPSADDconst { 2513 break 2514 } 2515 off2 := auxIntToInt32(x.AuxInt) 2516 ptr := x.Args[0] 2517 mem := v_1 2518 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 2519 break 2520 } 2521 v.reset(OpMIPSMOVBload) 2522 v.AuxInt = int32ToAuxInt(off1 + off2) 2523 v.Aux = symToAux(sym) 2524 v.AddArg2(ptr, mem) 2525 return true 2526 } 2527 // match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 2528 // cond: canMergeSym(sym1,sym2) 2529 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2530 for { 2531 off1 := auxIntToInt32(v.AuxInt) 2532 sym1 := auxToSym(v.Aux) 2533 if v_0.Op != OpMIPSMOVWaddr { 2534 break 2535 } 2536 off2 := auxIntToInt32(v_0.AuxInt) 2537 sym2 := auxToSym(v_0.Aux) 2538 ptr := v_0.Args[0] 2539 mem := v_1 2540 if !(canMergeSym(sym1, sym2)) { 2541 break 2542 } 2543 v.reset(OpMIPSMOVBload) 2544 v.AuxInt = int32ToAuxInt(off1 + off2) 2545 v.Aux = symToAux(mergeSym(sym1, sym2)) 2546 v.AddArg2(ptr, mem) 2547 return true 2548 } 2549 // match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) 2550 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 2551 // result: (MOVBreg x) 2552 for { 2553 off := auxIntToInt32(v.AuxInt) 2554 sym := auxToSym(v.Aux) 2555 ptr := v_0 2556 if v_1.Op != OpMIPSMOVBstore { 2557 break 2558 } 2559 off2 := auxIntToInt32(v_1.AuxInt) 2560 sym2 := auxToSym(v_1.Aux) 2561 x := v_1.Args[1] 2562 ptr2 := v_1.Args[0] 2563 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 2564 break 2565 } 2566 v.reset(OpMIPSMOVBreg) 2567 v.AddArg(x) 2568 return true 2569 } 2570 return false 2571 } 2572 func rewriteValueMIPS_OpMIPSMOVBreg(v *Value) bool { 2573 v_0 := v.Args[0] 2574 b := v.Block 2575 // match: (MOVBreg x:(MOVBload _ _)) 2576 // result: (MOVWreg x) 2577 for { 2578 x := v_0 2579 if x.Op != OpMIPSMOVBload { 2580 break 2581 } 2582 v.reset(OpMIPSMOVWreg) 2583 v.AddArg(x) 2584 return true 2585 } 2586 // match: (MOVBreg x:(MOVBreg _)) 2587 // result: (MOVWreg x) 2588 for { 2589 x := v_0 2590 if x.Op != OpMIPSMOVBreg { 2591 break 2592 } 2593 v.reset(OpMIPSMOVWreg) 2594 v.AddArg(x) 2595 return true 2596 } 2597 // match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem)) 2598 // cond: x.Uses == 1 && clobber(x) 2599 // result: @x.Block (MOVBload <t> [off] {sym} ptr mem) 2600 for { 2601 t := v.Type 2602 x := v_0 2603 if x.Op != OpMIPSMOVBUload { 2604 break 2605 } 2606 off := auxIntToInt32(x.AuxInt) 2607 sym := auxToSym(x.Aux) 2608 mem := x.Args[1] 2609 ptr := x.Args[0] 2610 if !(x.Uses == 1 && clobber(x)) { 2611 break 2612 } 2613 b = x.Block 2614 v0 := b.NewValue0(x.Pos, OpMIPSMOVBload, t) 2615 v.copyOf(v0) 2616 v0.AuxInt = int32ToAuxInt(off) 2617 v0.Aux = symToAux(sym) 2618 v0.AddArg2(ptr, mem) 2619 return true 2620 } 2621 // match: (MOVBreg (ANDconst [c] x)) 2622 // cond: c & 0x80 == 0 2623 // result: (ANDconst [c&0x7f] x) 2624 for { 2625 if v_0.Op != OpMIPSANDconst { 2626 break 2627 } 2628 c := auxIntToInt32(v_0.AuxInt) 2629 x := v_0.Args[0] 2630 if !(c&0x80 == 0) { 2631 break 2632 } 2633 v.reset(OpMIPSANDconst) 2634 v.AuxInt = int32ToAuxInt(c & 0x7f) 2635 v.AddArg(x) 2636 return true 2637 } 2638 // match: (MOVBreg (MOVWconst [c])) 2639 // result: (MOVWconst [int32(int8(c))]) 2640 for { 2641 if v_0.Op != OpMIPSMOVWconst { 2642 break 2643 } 2644 c := auxIntToInt32(v_0.AuxInt) 2645 v.reset(OpMIPSMOVWconst) 2646 v.AuxInt = int32ToAuxInt(int32(int8(c))) 2647 return true 2648 } 2649 return false 2650 } 2651 func rewriteValueMIPS_OpMIPSMOVBstore(v *Value) bool { 2652 v_2 := v.Args[2] 2653 v_1 := v.Args[1] 2654 v_0 := v.Args[0] 2655 // match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 2656 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2657 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 2658 for { 2659 off1 := auxIntToInt32(v.AuxInt) 2660 sym := auxToSym(v.Aux) 2661 x := v_0 2662 if x.Op != OpMIPSADDconst { 2663 break 2664 } 2665 off2 := auxIntToInt32(x.AuxInt) 2666 ptr := x.Args[0] 2667 val := v_1 2668 mem := v_2 2669 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 2670 break 2671 } 2672 v.reset(OpMIPSMOVBstore) 2673 v.AuxInt = int32ToAuxInt(off1 + off2) 2674 v.Aux = symToAux(sym) 2675 v.AddArg3(ptr, val, mem) 2676 return true 2677 } 2678 // match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 2679 // cond: canMergeSym(sym1,sym2) 2680 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2681 for { 2682 off1 := auxIntToInt32(v.AuxInt) 2683 sym1 := auxToSym(v.Aux) 2684 if v_0.Op != OpMIPSMOVWaddr { 2685 break 2686 } 2687 off2 := auxIntToInt32(v_0.AuxInt) 2688 sym2 := auxToSym(v_0.Aux) 2689 ptr := v_0.Args[0] 2690 val := v_1 2691 mem := v_2 2692 if !(canMergeSym(sym1, sym2)) { 2693 break 2694 } 2695 v.reset(OpMIPSMOVBstore) 2696 v.AuxInt = int32ToAuxInt(off1 + off2) 2697 v.Aux = symToAux(mergeSym(sym1, sym2)) 2698 v.AddArg3(ptr, val, mem) 2699 return true 2700 } 2701 // match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem) 2702 // result: (MOVBstorezero [off] {sym} ptr mem) 2703 for { 2704 off := auxIntToInt32(v.AuxInt) 2705 sym := auxToSym(v.Aux) 2706 ptr := v_0 2707 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 2708 break 2709 } 2710 mem := v_2 2711 v.reset(OpMIPSMOVBstorezero) 2712 v.AuxInt = int32ToAuxInt(off) 2713 v.Aux = symToAux(sym) 2714 v.AddArg2(ptr, mem) 2715 return true 2716 } 2717 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 2718 // result: (MOVBstore [off] {sym} ptr x mem) 2719 for { 2720 off := auxIntToInt32(v.AuxInt) 2721 sym := auxToSym(v.Aux) 2722 ptr := v_0 2723 if v_1.Op != OpMIPSMOVBreg { 2724 break 2725 } 2726 x := v_1.Args[0] 2727 mem := v_2 2728 v.reset(OpMIPSMOVBstore) 2729 v.AuxInt = int32ToAuxInt(off) 2730 v.Aux = symToAux(sym) 2731 v.AddArg3(ptr, x, mem) 2732 return true 2733 } 2734 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 2735 // result: (MOVBstore [off] {sym} ptr x mem) 2736 for { 2737 off := auxIntToInt32(v.AuxInt) 2738 sym := auxToSym(v.Aux) 2739 ptr := v_0 2740 if v_1.Op != OpMIPSMOVBUreg { 2741 break 2742 } 2743 x := v_1.Args[0] 2744 mem := v_2 2745 v.reset(OpMIPSMOVBstore) 2746 v.AuxInt = int32ToAuxInt(off) 2747 v.Aux = symToAux(sym) 2748 v.AddArg3(ptr, x, mem) 2749 return true 2750 } 2751 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 2752 // result: (MOVBstore [off] {sym} ptr x mem) 2753 for { 2754 off := auxIntToInt32(v.AuxInt) 2755 sym := auxToSym(v.Aux) 2756 ptr := v_0 2757 if v_1.Op != OpMIPSMOVHreg { 2758 break 2759 } 2760 x := v_1.Args[0] 2761 mem := v_2 2762 v.reset(OpMIPSMOVBstore) 2763 v.AuxInt = int32ToAuxInt(off) 2764 v.Aux = symToAux(sym) 2765 v.AddArg3(ptr, x, mem) 2766 return true 2767 } 2768 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 2769 // result: (MOVBstore [off] {sym} ptr x mem) 2770 for { 2771 off := auxIntToInt32(v.AuxInt) 2772 sym := auxToSym(v.Aux) 2773 ptr := v_0 2774 if v_1.Op != OpMIPSMOVHUreg { 2775 break 2776 } 2777 x := v_1.Args[0] 2778 mem := v_2 2779 v.reset(OpMIPSMOVBstore) 2780 v.AuxInt = int32ToAuxInt(off) 2781 v.Aux = symToAux(sym) 2782 v.AddArg3(ptr, x, mem) 2783 return true 2784 } 2785 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 2786 // result: (MOVBstore [off] {sym} ptr x mem) 2787 for { 2788 off := auxIntToInt32(v.AuxInt) 2789 sym := auxToSym(v.Aux) 2790 ptr := v_0 2791 if v_1.Op != OpMIPSMOVWreg { 2792 break 2793 } 2794 x := v_1.Args[0] 2795 mem := v_2 2796 v.reset(OpMIPSMOVBstore) 2797 v.AuxInt = int32ToAuxInt(off) 2798 v.Aux = symToAux(sym) 2799 v.AddArg3(ptr, x, mem) 2800 return true 2801 } 2802 return false 2803 } 2804 func rewriteValueMIPS_OpMIPSMOVBstorezero(v *Value) bool { 2805 v_1 := v.Args[1] 2806 v_0 := v.Args[0] 2807 // match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem) 2808 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2809 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 2810 for { 2811 off1 := auxIntToInt32(v.AuxInt) 2812 sym := auxToSym(v.Aux) 2813 x := v_0 2814 if x.Op != OpMIPSADDconst { 2815 break 2816 } 2817 off2 := auxIntToInt32(x.AuxInt) 2818 ptr := x.Args[0] 2819 mem := v_1 2820 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 2821 break 2822 } 2823 v.reset(OpMIPSMOVBstorezero) 2824 v.AuxInt = int32ToAuxInt(off1 + off2) 2825 v.Aux = symToAux(sym) 2826 v.AddArg2(ptr, mem) 2827 return true 2828 } 2829 // match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 2830 // cond: canMergeSym(sym1,sym2) 2831 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2832 for { 2833 off1 := auxIntToInt32(v.AuxInt) 2834 sym1 := auxToSym(v.Aux) 2835 if v_0.Op != OpMIPSMOVWaddr { 2836 break 2837 } 2838 off2 := auxIntToInt32(v_0.AuxInt) 2839 sym2 := auxToSym(v_0.Aux) 2840 ptr := v_0.Args[0] 2841 mem := v_1 2842 if !(canMergeSym(sym1, sym2)) { 2843 break 2844 } 2845 v.reset(OpMIPSMOVBstorezero) 2846 v.AuxInt = int32ToAuxInt(off1 + off2) 2847 v.Aux = symToAux(mergeSym(sym1, sym2)) 2848 v.AddArg2(ptr, mem) 2849 return true 2850 } 2851 return false 2852 } 2853 func rewriteValueMIPS_OpMIPSMOVDload(v *Value) bool { 2854 v_1 := v.Args[1] 2855 v_0 := v.Args[0] 2856 // match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem) 2857 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2858 // result: (MOVDload [off1+off2] {sym} ptr mem) 2859 for { 2860 off1 := auxIntToInt32(v.AuxInt) 2861 sym := auxToSym(v.Aux) 2862 x := v_0 2863 if x.Op != OpMIPSADDconst { 2864 break 2865 } 2866 off2 := auxIntToInt32(x.AuxInt) 2867 ptr := x.Args[0] 2868 mem := v_1 2869 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 2870 break 2871 } 2872 v.reset(OpMIPSMOVDload) 2873 v.AuxInt = int32ToAuxInt(off1 + off2) 2874 v.Aux = symToAux(sym) 2875 v.AddArg2(ptr, mem) 2876 return true 2877 } 2878 // match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 2879 // cond: canMergeSym(sym1,sym2) 2880 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 2881 for { 2882 off1 := auxIntToInt32(v.AuxInt) 2883 sym1 := auxToSym(v.Aux) 2884 if v_0.Op != OpMIPSMOVWaddr { 2885 break 2886 } 2887 off2 := auxIntToInt32(v_0.AuxInt) 2888 sym2 := auxToSym(v_0.Aux) 2889 ptr := v_0.Args[0] 2890 mem := v_1 2891 if !(canMergeSym(sym1, sym2)) { 2892 break 2893 } 2894 v.reset(OpMIPSMOVDload) 2895 v.AuxInt = int32ToAuxInt(off1 + off2) 2896 v.Aux = symToAux(mergeSym(sym1, sym2)) 2897 v.AddArg2(ptr, mem) 2898 return true 2899 } 2900 // match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _)) 2901 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 2902 // result: x 2903 for { 2904 off := auxIntToInt32(v.AuxInt) 2905 sym := auxToSym(v.Aux) 2906 ptr := v_0 2907 if v_1.Op != OpMIPSMOVDstore { 2908 break 2909 } 2910 off2 := auxIntToInt32(v_1.AuxInt) 2911 sym2 := auxToSym(v_1.Aux) 2912 x := v_1.Args[1] 2913 ptr2 := v_1.Args[0] 2914 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 2915 break 2916 } 2917 v.copyOf(x) 2918 return true 2919 } 2920 return false 2921 } 2922 func rewriteValueMIPS_OpMIPSMOVDstore(v *Value) bool { 2923 v_2 := v.Args[2] 2924 v_1 := v.Args[1] 2925 v_0 := v.Args[0] 2926 // match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 2927 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2928 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 2929 for { 2930 off1 := auxIntToInt32(v.AuxInt) 2931 sym := auxToSym(v.Aux) 2932 x := v_0 2933 if x.Op != OpMIPSADDconst { 2934 break 2935 } 2936 off2 := auxIntToInt32(x.AuxInt) 2937 ptr := x.Args[0] 2938 val := v_1 2939 mem := v_2 2940 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 2941 break 2942 } 2943 v.reset(OpMIPSMOVDstore) 2944 v.AuxInt = int32ToAuxInt(off1 + off2) 2945 v.Aux = symToAux(sym) 2946 v.AddArg3(ptr, val, mem) 2947 return true 2948 } 2949 // match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 2950 // cond: canMergeSym(sym1,sym2) 2951 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 2952 for { 2953 off1 := auxIntToInt32(v.AuxInt) 2954 sym1 := auxToSym(v.Aux) 2955 if v_0.Op != OpMIPSMOVWaddr { 2956 break 2957 } 2958 off2 := auxIntToInt32(v_0.AuxInt) 2959 sym2 := auxToSym(v_0.Aux) 2960 ptr := v_0.Args[0] 2961 val := v_1 2962 mem := v_2 2963 if !(canMergeSym(sym1, sym2)) { 2964 break 2965 } 2966 v.reset(OpMIPSMOVDstore) 2967 v.AuxInt = int32ToAuxInt(off1 + off2) 2968 v.Aux = symToAux(mergeSym(sym1, sym2)) 2969 v.AddArg3(ptr, val, mem) 2970 return true 2971 } 2972 return false 2973 } 2974 func rewriteValueMIPS_OpMIPSMOVFload(v *Value) bool { 2975 v_1 := v.Args[1] 2976 v_0 := v.Args[0] 2977 // match: (MOVFload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _)) 2978 // result: (MOVWgpfp val) 2979 for { 2980 off := auxIntToInt32(v.AuxInt) 2981 sym := auxToSym(v.Aux) 2982 ptr := v_0 2983 if v_1.Op != OpMIPSMOVWstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym { 2984 break 2985 } 2986 val := v_1.Args[1] 2987 if ptr != v_1.Args[0] { 2988 break 2989 } 2990 v.reset(OpMIPSMOVWgpfp) 2991 v.AddArg(val) 2992 return true 2993 } 2994 // match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem) 2995 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 2996 // result: (MOVFload [off1+off2] {sym} ptr mem) 2997 for { 2998 off1 := auxIntToInt32(v.AuxInt) 2999 sym := auxToSym(v.Aux) 3000 x := v_0 3001 if x.Op != OpMIPSADDconst { 3002 break 3003 } 3004 off2 := auxIntToInt32(x.AuxInt) 3005 ptr := x.Args[0] 3006 mem := v_1 3007 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3008 break 3009 } 3010 v.reset(OpMIPSMOVFload) 3011 v.AuxInt = int32ToAuxInt(off1 + off2) 3012 v.Aux = symToAux(sym) 3013 v.AddArg2(ptr, mem) 3014 return true 3015 } 3016 // match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3017 // cond: canMergeSym(sym1,sym2) 3018 // result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3019 for { 3020 off1 := auxIntToInt32(v.AuxInt) 3021 sym1 := auxToSym(v.Aux) 3022 if v_0.Op != OpMIPSMOVWaddr { 3023 break 3024 } 3025 off2 := auxIntToInt32(v_0.AuxInt) 3026 sym2 := auxToSym(v_0.Aux) 3027 ptr := v_0.Args[0] 3028 mem := v_1 3029 if !(canMergeSym(sym1, sym2)) { 3030 break 3031 } 3032 v.reset(OpMIPSMOVFload) 3033 v.AuxInt = int32ToAuxInt(off1 + off2) 3034 v.Aux = symToAux(mergeSym(sym1, sym2)) 3035 v.AddArg2(ptr, mem) 3036 return true 3037 } 3038 // match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _)) 3039 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3040 // result: x 3041 for { 3042 off := auxIntToInt32(v.AuxInt) 3043 sym := auxToSym(v.Aux) 3044 ptr := v_0 3045 if v_1.Op != OpMIPSMOVFstore { 3046 break 3047 } 3048 off2 := auxIntToInt32(v_1.AuxInt) 3049 sym2 := auxToSym(v_1.Aux) 3050 x := v_1.Args[1] 3051 ptr2 := v_1.Args[0] 3052 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3053 break 3054 } 3055 v.copyOf(x) 3056 return true 3057 } 3058 return false 3059 } 3060 func rewriteValueMIPS_OpMIPSMOVFstore(v *Value) bool { 3061 v_2 := v.Args[2] 3062 v_1 := v.Args[1] 3063 v_0 := v.Args[0] 3064 // match: (MOVFstore [off] {sym} ptr (MOVWgpfp val) mem) 3065 // result: (MOVWstore [off] {sym} ptr val mem) 3066 for { 3067 off := auxIntToInt32(v.AuxInt) 3068 sym := auxToSym(v.Aux) 3069 ptr := v_0 3070 if v_1.Op != OpMIPSMOVWgpfp { 3071 break 3072 } 3073 val := v_1.Args[0] 3074 mem := v_2 3075 v.reset(OpMIPSMOVWstore) 3076 v.AuxInt = int32ToAuxInt(off) 3077 v.Aux = symToAux(sym) 3078 v.AddArg3(ptr, val, mem) 3079 return true 3080 } 3081 // match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 3082 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3083 // result: (MOVFstore [off1+off2] {sym} ptr val mem) 3084 for { 3085 off1 := auxIntToInt32(v.AuxInt) 3086 sym := auxToSym(v.Aux) 3087 x := v_0 3088 if x.Op != OpMIPSADDconst { 3089 break 3090 } 3091 off2 := auxIntToInt32(x.AuxInt) 3092 ptr := x.Args[0] 3093 val := v_1 3094 mem := v_2 3095 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3096 break 3097 } 3098 v.reset(OpMIPSMOVFstore) 3099 v.AuxInt = int32ToAuxInt(off1 + off2) 3100 v.Aux = symToAux(sym) 3101 v.AddArg3(ptr, val, mem) 3102 return true 3103 } 3104 // match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 3105 // cond: canMergeSym(sym1,sym2) 3106 // result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3107 for { 3108 off1 := auxIntToInt32(v.AuxInt) 3109 sym1 := auxToSym(v.Aux) 3110 if v_0.Op != OpMIPSMOVWaddr { 3111 break 3112 } 3113 off2 := auxIntToInt32(v_0.AuxInt) 3114 sym2 := auxToSym(v_0.Aux) 3115 ptr := v_0.Args[0] 3116 val := v_1 3117 mem := v_2 3118 if !(canMergeSym(sym1, sym2)) { 3119 break 3120 } 3121 v.reset(OpMIPSMOVFstore) 3122 v.AuxInt = int32ToAuxInt(off1 + off2) 3123 v.Aux = symToAux(mergeSym(sym1, sym2)) 3124 v.AddArg3(ptr, val, mem) 3125 return true 3126 } 3127 return false 3128 } 3129 func rewriteValueMIPS_OpMIPSMOVHUload(v *Value) bool { 3130 v_1 := v.Args[1] 3131 v_0 := v.Args[0] 3132 // match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem) 3133 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3134 // result: (MOVHUload [off1+off2] {sym} ptr mem) 3135 for { 3136 off1 := auxIntToInt32(v.AuxInt) 3137 sym := auxToSym(v.Aux) 3138 x := v_0 3139 if x.Op != OpMIPSADDconst { 3140 break 3141 } 3142 off2 := auxIntToInt32(x.AuxInt) 3143 ptr := x.Args[0] 3144 mem := v_1 3145 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3146 break 3147 } 3148 v.reset(OpMIPSMOVHUload) 3149 v.AuxInt = int32ToAuxInt(off1 + off2) 3150 v.Aux = symToAux(sym) 3151 v.AddArg2(ptr, mem) 3152 return true 3153 } 3154 // match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3155 // cond: canMergeSym(sym1,sym2) 3156 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3157 for { 3158 off1 := auxIntToInt32(v.AuxInt) 3159 sym1 := auxToSym(v.Aux) 3160 if v_0.Op != OpMIPSMOVWaddr { 3161 break 3162 } 3163 off2 := auxIntToInt32(v_0.AuxInt) 3164 sym2 := auxToSym(v_0.Aux) 3165 ptr := v_0.Args[0] 3166 mem := v_1 3167 if !(canMergeSym(sym1, sym2)) { 3168 break 3169 } 3170 v.reset(OpMIPSMOVHUload) 3171 v.AuxInt = int32ToAuxInt(off1 + off2) 3172 v.Aux = symToAux(mergeSym(sym1, sym2)) 3173 v.AddArg2(ptr, mem) 3174 return true 3175 } 3176 // match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _)) 3177 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3178 // result: (MOVHUreg x) 3179 for { 3180 off := auxIntToInt32(v.AuxInt) 3181 sym := auxToSym(v.Aux) 3182 ptr := v_0 3183 if v_1.Op != OpMIPSMOVHstore { 3184 break 3185 } 3186 off2 := auxIntToInt32(v_1.AuxInt) 3187 sym2 := auxToSym(v_1.Aux) 3188 x := v_1.Args[1] 3189 ptr2 := v_1.Args[0] 3190 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3191 break 3192 } 3193 v.reset(OpMIPSMOVHUreg) 3194 v.AddArg(x) 3195 return true 3196 } 3197 return false 3198 } 3199 func rewriteValueMIPS_OpMIPSMOVHUreg(v *Value) bool { 3200 v_0 := v.Args[0] 3201 b := v.Block 3202 // match: (MOVHUreg x:(MOVBUload _ _)) 3203 // result: (MOVWreg x) 3204 for { 3205 x := v_0 3206 if x.Op != OpMIPSMOVBUload { 3207 break 3208 } 3209 v.reset(OpMIPSMOVWreg) 3210 v.AddArg(x) 3211 return true 3212 } 3213 // match: (MOVHUreg x:(MOVHUload _ _)) 3214 // result: (MOVWreg x) 3215 for { 3216 x := v_0 3217 if x.Op != OpMIPSMOVHUload { 3218 break 3219 } 3220 v.reset(OpMIPSMOVWreg) 3221 v.AddArg(x) 3222 return true 3223 } 3224 // match: (MOVHUreg x:(MOVBUreg _)) 3225 // result: (MOVWreg x) 3226 for { 3227 x := v_0 3228 if x.Op != OpMIPSMOVBUreg { 3229 break 3230 } 3231 v.reset(OpMIPSMOVWreg) 3232 v.AddArg(x) 3233 return true 3234 } 3235 // match: (MOVHUreg x:(MOVHUreg _)) 3236 // result: (MOVWreg x) 3237 for { 3238 x := v_0 3239 if x.Op != OpMIPSMOVHUreg { 3240 break 3241 } 3242 v.reset(OpMIPSMOVWreg) 3243 v.AddArg(x) 3244 return true 3245 } 3246 // match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem)) 3247 // cond: x.Uses == 1 && clobber(x) 3248 // result: @x.Block (MOVHUload <t> [off] {sym} ptr mem) 3249 for { 3250 t := v.Type 3251 x := v_0 3252 if x.Op != OpMIPSMOVHload { 3253 break 3254 } 3255 off := auxIntToInt32(x.AuxInt) 3256 sym := auxToSym(x.Aux) 3257 mem := x.Args[1] 3258 ptr := x.Args[0] 3259 if !(x.Uses == 1 && clobber(x)) { 3260 break 3261 } 3262 b = x.Block 3263 v0 := b.NewValue0(x.Pos, OpMIPSMOVHUload, t) 3264 v.copyOf(v0) 3265 v0.AuxInt = int32ToAuxInt(off) 3266 v0.Aux = symToAux(sym) 3267 v0.AddArg2(ptr, mem) 3268 return true 3269 } 3270 // match: (MOVHUreg (ANDconst [c] x)) 3271 // result: (ANDconst [c&0xffff] x) 3272 for { 3273 if v_0.Op != OpMIPSANDconst { 3274 break 3275 } 3276 c := auxIntToInt32(v_0.AuxInt) 3277 x := v_0.Args[0] 3278 v.reset(OpMIPSANDconst) 3279 v.AuxInt = int32ToAuxInt(c & 0xffff) 3280 v.AddArg(x) 3281 return true 3282 } 3283 // match: (MOVHUreg (MOVWconst [c])) 3284 // result: (MOVWconst [int32(uint16(c))]) 3285 for { 3286 if v_0.Op != OpMIPSMOVWconst { 3287 break 3288 } 3289 c := auxIntToInt32(v_0.AuxInt) 3290 v.reset(OpMIPSMOVWconst) 3291 v.AuxInt = int32ToAuxInt(int32(uint16(c))) 3292 return true 3293 } 3294 return false 3295 } 3296 func rewriteValueMIPS_OpMIPSMOVHload(v *Value) bool { 3297 v_1 := v.Args[1] 3298 v_0 := v.Args[0] 3299 // match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem) 3300 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3301 // result: (MOVHload [off1+off2] {sym} ptr mem) 3302 for { 3303 off1 := auxIntToInt32(v.AuxInt) 3304 sym := auxToSym(v.Aux) 3305 x := v_0 3306 if x.Op != OpMIPSADDconst { 3307 break 3308 } 3309 off2 := auxIntToInt32(x.AuxInt) 3310 ptr := x.Args[0] 3311 mem := v_1 3312 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3313 break 3314 } 3315 v.reset(OpMIPSMOVHload) 3316 v.AuxInt = int32ToAuxInt(off1 + off2) 3317 v.Aux = symToAux(sym) 3318 v.AddArg2(ptr, mem) 3319 return true 3320 } 3321 // match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3322 // cond: canMergeSym(sym1,sym2) 3323 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3324 for { 3325 off1 := auxIntToInt32(v.AuxInt) 3326 sym1 := auxToSym(v.Aux) 3327 if v_0.Op != OpMIPSMOVWaddr { 3328 break 3329 } 3330 off2 := auxIntToInt32(v_0.AuxInt) 3331 sym2 := auxToSym(v_0.Aux) 3332 ptr := v_0.Args[0] 3333 mem := v_1 3334 if !(canMergeSym(sym1, sym2)) { 3335 break 3336 } 3337 v.reset(OpMIPSMOVHload) 3338 v.AuxInt = int32ToAuxInt(off1 + off2) 3339 v.Aux = symToAux(mergeSym(sym1, sym2)) 3340 v.AddArg2(ptr, mem) 3341 return true 3342 } 3343 // match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _)) 3344 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3345 // result: (MOVHreg x) 3346 for { 3347 off := auxIntToInt32(v.AuxInt) 3348 sym := auxToSym(v.Aux) 3349 ptr := v_0 3350 if v_1.Op != OpMIPSMOVHstore { 3351 break 3352 } 3353 off2 := auxIntToInt32(v_1.AuxInt) 3354 sym2 := auxToSym(v_1.Aux) 3355 x := v_1.Args[1] 3356 ptr2 := v_1.Args[0] 3357 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3358 break 3359 } 3360 v.reset(OpMIPSMOVHreg) 3361 v.AddArg(x) 3362 return true 3363 } 3364 return false 3365 } 3366 func rewriteValueMIPS_OpMIPSMOVHreg(v *Value) bool { 3367 v_0 := v.Args[0] 3368 b := v.Block 3369 // match: (MOVHreg x:(MOVBload _ _)) 3370 // result: (MOVWreg x) 3371 for { 3372 x := v_0 3373 if x.Op != OpMIPSMOVBload { 3374 break 3375 } 3376 v.reset(OpMIPSMOVWreg) 3377 v.AddArg(x) 3378 return true 3379 } 3380 // match: (MOVHreg x:(MOVBUload _ _)) 3381 // result: (MOVWreg x) 3382 for { 3383 x := v_0 3384 if x.Op != OpMIPSMOVBUload { 3385 break 3386 } 3387 v.reset(OpMIPSMOVWreg) 3388 v.AddArg(x) 3389 return true 3390 } 3391 // match: (MOVHreg x:(MOVHload _ _)) 3392 // result: (MOVWreg x) 3393 for { 3394 x := v_0 3395 if x.Op != OpMIPSMOVHload { 3396 break 3397 } 3398 v.reset(OpMIPSMOVWreg) 3399 v.AddArg(x) 3400 return true 3401 } 3402 // match: (MOVHreg x:(MOVBreg _)) 3403 // result: (MOVWreg x) 3404 for { 3405 x := v_0 3406 if x.Op != OpMIPSMOVBreg { 3407 break 3408 } 3409 v.reset(OpMIPSMOVWreg) 3410 v.AddArg(x) 3411 return true 3412 } 3413 // match: (MOVHreg x:(MOVBUreg _)) 3414 // result: (MOVWreg x) 3415 for { 3416 x := v_0 3417 if x.Op != OpMIPSMOVBUreg { 3418 break 3419 } 3420 v.reset(OpMIPSMOVWreg) 3421 v.AddArg(x) 3422 return true 3423 } 3424 // match: (MOVHreg x:(MOVHreg _)) 3425 // result: (MOVWreg x) 3426 for { 3427 x := v_0 3428 if x.Op != OpMIPSMOVHreg { 3429 break 3430 } 3431 v.reset(OpMIPSMOVWreg) 3432 v.AddArg(x) 3433 return true 3434 } 3435 // match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem)) 3436 // cond: x.Uses == 1 && clobber(x) 3437 // result: @x.Block (MOVHload <t> [off] {sym} ptr mem) 3438 for { 3439 t := v.Type 3440 x := v_0 3441 if x.Op != OpMIPSMOVHUload { 3442 break 3443 } 3444 off := auxIntToInt32(x.AuxInt) 3445 sym := auxToSym(x.Aux) 3446 mem := x.Args[1] 3447 ptr := x.Args[0] 3448 if !(x.Uses == 1 && clobber(x)) { 3449 break 3450 } 3451 b = x.Block 3452 v0 := b.NewValue0(x.Pos, OpMIPSMOVHload, t) 3453 v.copyOf(v0) 3454 v0.AuxInt = int32ToAuxInt(off) 3455 v0.Aux = symToAux(sym) 3456 v0.AddArg2(ptr, mem) 3457 return true 3458 } 3459 // match: (MOVHreg (ANDconst [c] x)) 3460 // cond: c & 0x8000 == 0 3461 // result: (ANDconst [c&0x7fff] x) 3462 for { 3463 if v_0.Op != OpMIPSANDconst { 3464 break 3465 } 3466 c := auxIntToInt32(v_0.AuxInt) 3467 x := v_0.Args[0] 3468 if !(c&0x8000 == 0) { 3469 break 3470 } 3471 v.reset(OpMIPSANDconst) 3472 v.AuxInt = int32ToAuxInt(c & 0x7fff) 3473 v.AddArg(x) 3474 return true 3475 } 3476 // match: (MOVHreg (MOVWconst [c])) 3477 // result: (MOVWconst [int32(int16(c))]) 3478 for { 3479 if v_0.Op != OpMIPSMOVWconst { 3480 break 3481 } 3482 c := auxIntToInt32(v_0.AuxInt) 3483 v.reset(OpMIPSMOVWconst) 3484 v.AuxInt = int32ToAuxInt(int32(int16(c))) 3485 return true 3486 } 3487 return false 3488 } 3489 func rewriteValueMIPS_OpMIPSMOVHstore(v *Value) bool { 3490 v_2 := v.Args[2] 3491 v_1 := v.Args[1] 3492 v_0 := v.Args[0] 3493 // match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 3494 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3495 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 3496 for { 3497 off1 := auxIntToInt32(v.AuxInt) 3498 sym := auxToSym(v.Aux) 3499 x := v_0 3500 if x.Op != OpMIPSADDconst { 3501 break 3502 } 3503 off2 := auxIntToInt32(x.AuxInt) 3504 ptr := x.Args[0] 3505 val := v_1 3506 mem := v_2 3507 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3508 break 3509 } 3510 v.reset(OpMIPSMOVHstore) 3511 v.AuxInt = int32ToAuxInt(off1 + off2) 3512 v.Aux = symToAux(sym) 3513 v.AddArg3(ptr, val, mem) 3514 return true 3515 } 3516 // match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 3517 // cond: canMergeSym(sym1,sym2) 3518 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3519 for { 3520 off1 := auxIntToInt32(v.AuxInt) 3521 sym1 := auxToSym(v.Aux) 3522 if v_0.Op != OpMIPSMOVWaddr { 3523 break 3524 } 3525 off2 := auxIntToInt32(v_0.AuxInt) 3526 sym2 := auxToSym(v_0.Aux) 3527 ptr := v_0.Args[0] 3528 val := v_1 3529 mem := v_2 3530 if !(canMergeSym(sym1, sym2)) { 3531 break 3532 } 3533 v.reset(OpMIPSMOVHstore) 3534 v.AuxInt = int32ToAuxInt(off1 + off2) 3535 v.Aux = symToAux(mergeSym(sym1, sym2)) 3536 v.AddArg3(ptr, val, mem) 3537 return true 3538 } 3539 // match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem) 3540 // result: (MOVHstorezero [off] {sym} ptr mem) 3541 for { 3542 off := auxIntToInt32(v.AuxInt) 3543 sym := auxToSym(v.Aux) 3544 ptr := v_0 3545 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 3546 break 3547 } 3548 mem := v_2 3549 v.reset(OpMIPSMOVHstorezero) 3550 v.AuxInt = int32ToAuxInt(off) 3551 v.Aux = symToAux(sym) 3552 v.AddArg2(ptr, mem) 3553 return true 3554 } 3555 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 3556 // result: (MOVHstore [off] {sym} ptr x mem) 3557 for { 3558 off := auxIntToInt32(v.AuxInt) 3559 sym := auxToSym(v.Aux) 3560 ptr := v_0 3561 if v_1.Op != OpMIPSMOVHreg { 3562 break 3563 } 3564 x := v_1.Args[0] 3565 mem := v_2 3566 v.reset(OpMIPSMOVHstore) 3567 v.AuxInt = int32ToAuxInt(off) 3568 v.Aux = symToAux(sym) 3569 v.AddArg3(ptr, x, mem) 3570 return true 3571 } 3572 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 3573 // result: (MOVHstore [off] {sym} ptr x mem) 3574 for { 3575 off := auxIntToInt32(v.AuxInt) 3576 sym := auxToSym(v.Aux) 3577 ptr := v_0 3578 if v_1.Op != OpMIPSMOVHUreg { 3579 break 3580 } 3581 x := v_1.Args[0] 3582 mem := v_2 3583 v.reset(OpMIPSMOVHstore) 3584 v.AuxInt = int32ToAuxInt(off) 3585 v.Aux = symToAux(sym) 3586 v.AddArg3(ptr, x, mem) 3587 return true 3588 } 3589 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 3590 // result: (MOVHstore [off] {sym} ptr x mem) 3591 for { 3592 off := auxIntToInt32(v.AuxInt) 3593 sym := auxToSym(v.Aux) 3594 ptr := v_0 3595 if v_1.Op != OpMIPSMOVWreg { 3596 break 3597 } 3598 x := v_1.Args[0] 3599 mem := v_2 3600 v.reset(OpMIPSMOVHstore) 3601 v.AuxInt = int32ToAuxInt(off) 3602 v.Aux = symToAux(sym) 3603 v.AddArg3(ptr, x, mem) 3604 return true 3605 } 3606 return false 3607 } 3608 func rewriteValueMIPS_OpMIPSMOVHstorezero(v *Value) bool { 3609 v_1 := v.Args[1] 3610 v_0 := v.Args[0] 3611 // match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem) 3612 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3613 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 3614 for { 3615 off1 := auxIntToInt32(v.AuxInt) 3616 sym := auxToSym(v.Aux) 3617 x := v_0 3618 if x.Op != OpMIPSADDconst { 3619 break 3620 } 3621 off2 := auxIntToInt32(x.AuxInt) 3622 ptr := x.Args[0] 3623 mem := v_1 3624 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3625 break 3626 } 3627 v.reset(OpMIPSMOVHstorezero) 3628 v.AuxInt = int32ToAuxInt(off1 + off2) 3629 v.Aux = symToAux(sym) 3630 v.AddArg2(ptr, mem) 3631 return true 3632 } 3633 // match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3634 // cond: canMergeSym(sym1,sym2) 3635 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3636 for { 3637 off1 := auxIntToInt32(v.AuxInt) 3638 sym1 := auxToSym(v.Aux) 3639 if v_0.Op != OpMIPSMOVWaddr { 3640 break 3641 } 3642 off2 := auxIntToInt32(v_0.AuxInt) 3643 sym2 := auxToSym(v_0.Aux) 3644 ptr := v_0.Args[0] 3645 mem := v_1 3646 if !(canMergeSym(sym1, sym2)) { 3647 break 3648 } 3649 v.reset(OpMIPSMOVHstorezero) 3650 v.AuxInt = int32ToAuxInt(off1 + off2) 3651 v.Aux = symToAux(mergeSym(sym1, sym2)) 3652 v.AddArg2(ptr, mem) 3653 return true 3654 } 3655 return false 3656 } 3657 func rewriteValueMIPS_OpMIPSMOVWload(v *Value) bool { 3658 v_1 := v.Args[1] 3659 v_0 := v.Args[0] 3660 // match: (MOVWload [off] {sym} ptr (MOVFstore [off] {sym} ptr val _)) 3661 // result: (MOVWfpgp val) 3662 for { 3663 off := auxIntToInt32(v.AuxInt) 3664 sym := auxToSym(v.Aux) 3665 ptr := v_0 3666 if v_1.Op != OpMIPSMOVFstore || auxIntToInt32(v_1.AuxInt) != off || auxToSym(v_1.Aux) != sym { 3667 break 3668 } 3669 val := v_1.Args[1] 3670 if ptr != v_1.Args[0] { 3671 break 3672 } 3673 v.reset(OpMIPSMOVWfpgp) 3674 v.AddArg(val) 3675 return true 3676 } 3677 // match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem) 3678 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3679 // result: (MOVWload [off1+off2] {sym} ptr mem) 3680 for { 3681 off1 := auxIntToInt32(v.AuxInt) 3682 sym := auxToSym(v.Aux) 3683 x := v_0 3684 if x.Op != OpMIPSADDconst { 3685 break 3686 } 3687 off2 := auxIntToInt32(x.AuxInt) 3688 ptr := x.Args[0] 3689 mem := v_1 3690 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3691 break 3692 } 3693 v.reset(OpMIPSMOVWload) 3694 v.AuxInt = int32ToAuxInt(off1 + off2) 3695 v.Aux = symToAux(sym) 3696 v.AddArg2(ptr, mem) 3697 return true 3698 } 3699 // match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3700 // cond: canMergeSym(sym1,sym2) 3701 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3702 for { 3703 off1 := auxIntToInt32(v.AuxInt) 3704 sym1 := auxToSym(v.Aux) 3705 if v_0.Op != OpMIPSMOVWaddr { 3706 break 3707 } 3708 off2 := auxIntToInt32(v_0.AuxInt) 3709 sym2 := auxToSym(v_0.Aux) 3710 ptr := v_0.Args[0] 3711 mem := v_1 3712 if !(canMergeSym(sym1, sym2)) { 3713 break 3714 } 3715 v.reset(OpMIPSMOVWload) 3716 v.AuxInt = int32ToAuxInt(off1 + off2) 3717 v.Aux = symToAux(mergeSym(sym1, sym2)) 3718 v.AddArg2(ptr, mem) 3719 return true 3720 } 3721 // match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _)) 3722 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3723 // result: x 3724 for { 3725 off := auxIntToInt32(v.AuxInt) 3726 sym := auxToSym(v.Aux) 3727 ptr := v_0 3728 if v_1.Op != OpMIPSMOVWstore { 3729 break 3730 } 3731 off2 := auxIntToInt32(v_1.AuxInt) 3732 sym2 := auxToSym(v_1.Aux) 3733 x := v_1.Args[1] 3734 ptr2 := v_1.Args[0] 3735 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3736 break 3737 } 3738 v.copyOf(x) 3739 return true 3740 } 3741 return false 3742 } 3743 func rewriteValueMIPS_OpMIPSMOVWnop(v *Value) bool { 3744 v_0 := v.Args[0] 3745 // match: (MOVWnop (MOVWconst [c])) 3746 // result: (MOVWconst [c]) 3747 for { 3748 if v_0.Op != OpMIPSMOVWconst { 3749 break 3750 } 3751 c := auxIntToInt32(v_0.AuxInt) 3752 v.reset(OpMIPSMOVWconst) 3753 v.AuxInt = int32ToAuxInt(c) 3754 return true 3755 } 3756 return false 3757 } 3758 func rewriteValueMIPS_OpMIPSMOVWreg(v *Value) bool { 3759 v_0 := v.Args[0] 3760 // match: (MOVWreg x) 3761 // cond: x.Uses == 1 3762 // result: (MOVWnop x) 3763 for { 3764 x := v_0 3765 if !(x.Uses == 1) { 3766 break 3767 } 3768 v.reset(OpMIPSMOVWnop) 3769 v.AddArg(x) 3770 return true 3771 } 3772 // match: (MOVWreg (MOVWconst [c])) 3773 // result: (MOVWconst [c]) 3774 for { 3775 if v_0.Op != OpMIPSMOVWconst { 3776 break 3777 } 3778 c := auxIntToInt32(v_0.AuxInt) 3779 v.reset(OpMIPSMOVWconst) 3780 v.AuxInt = int32ToAuxInt(c) 3781 return true 3782 } 3783 return false 3784 } 3785 func rewriteValueMIPS_OpMIPSMOVWstore(v *Value) bool { 3786 v_2 := v.Args[2] 3787 v_1 := v.Args[1] 3788 v_0 := v.Args[0] 3789 // match: (MOVWstore [off] {sym} ptr (MOVWfpgp val) mem) 3790 // result: (MOVFstore [off] {sym} ptr val mem) 3791 for { 3792 off := auxIntToInt32(v.AuxInt) 3793 sym := auxToSym(v.Aux) 3794 ptr := v_0 3795 if v_1.Op != OpMIPSMOVWfpgp { 3796 break 3797 } 3798 val := v_1.Args[0] 3799 mem := v_2 3800 v.reset(OpMIPSMOVFstore) 3801 v.AuxInt = int32ToAuxInt(off) 3802 v.Aux = symToAux(sym) 3803 v.AddArg3(ptr, val, mem) 3804 return true 3805 } 3806 // match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 3807 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3808 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 3809 for { 3810 off1 := auxIntToInt32(v.AuxInt) 3811 sym := auxToSym(v.Aux) 3812 x := v_0 3813 if x.Op != OpMIPSADDconst { 3814 break 3815 } 3816 off2 := auxIntToInt32(x.AuxInt) 3817 ptr := x.Args[0] 3818 val := v_1 3819 mem := v_2 3820 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3821 break 3822 } 3823 v.reset(OpMIPSMOVWstore) 3824 v.AuxInt = int32ToAuxInt(off1 + off2) 3825 v.Aux = symToAux(sym) 3826 v.AddArg3(ptr, val, mem) 3827 return true 3828 } 3829 // match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 3830 // cond: canMergeSym(sym1,sym2) 3831 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3832 for { 3833 off1 := auxIntToInt32(v.AuxInt) 3834 sym1 := auxToSym(v.Aux) 3835 if v_0.Op != OpMIPSMOVWaddr { 3836 break 3837 } 3838 off2 := auxIntToInt32(v_0.AuxInt) 3839 sym2 := auxToSym(v_0.Aux) 3840 ptr := v_0.Args[0] 3841 val := v_1 3842 mem := v_2 3843 if !(canMergeSym(sym1, sym2)) { 3844 break 3845 } 3846 v.reset(OpMIPSMOVWstore) 3847 v.AuxInt = int32ToAuxInt(off1 + off2) 3848 v.Aux = symToAux(mergeSym(sym1, sym2)) 3849 v.AddArg3(ptr, val, mem) 3850 return true 3851 } 3852 // match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem) 3853 // result: (MOVWstorezero [off] {sym} ptr mem) 3854 for { 3855 off := auxIntToInt32(v.AuxInt) 3856 sym := auxToSym(v.Aux) 3857 ptr := v_0 3858 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 3859 break 3860 } 3861 mem := v_2 3862 v.reset(OpMIPSMOVWstorezero) 3863 v.AuxInt = int32ToAuxInt(off) 3864 v.Aux = symToAux(sym) 3865 v.AddArg2(ptr, mem) 3866 return true 3867 } 3868 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 3869 // result: (MOVWstore [off] {sym} ptr x mem) 3870 for { 3871 off := auxIntToInt32(v.AuxInt) 3872 sym := auxToSym(v.Aux) 3873 ptr := v_0 3874 if v_1.Op != OpMIPSMOVWreg { 3875 break 3876 } 3877 x := v_1.Args[0] 3878 mem := v_2 3879 v.reset(OpMIPSMOVWstore) 3880 v.AuxInt = int32ToAuxInt(off) 3881 v.Aux = symToAux(sym) 3882 v.AddArg3(ptr, x, mem) 3883 return true 3884 } 3885 return false 3886 } 3887 func rewriteValueMIPS_OpMIPSMOVWstorezero(v *Value) bool { 3888 v_1 := v.Args[1] 3889 v_0 := v.Args[0] 3890 // match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem) 3891 // cond: (is16Bit(int64(off1+off2)) || x.Uses == 1) 3892 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 3893 for { 3894 off1 := auxIntToInt32(v.AuxInt) 3895 sym := auxToSym(v.Aux) 3896 x := v_0 3897 if x.Op != OpMIPSADDconst { 3898 break 3899 } 3900 off2 := auxIntToInt32(x.AuxInt) 3901 ptr := x.Args[0] 3902 mem := v_1 3903 if !(is16Bit(int64(off1+off2)) || x.Uses == 1) { 3904 break 3905 } 3906 v.reset(OpMIPSMOVWstorezero) 3907 v.AuxInt = int32ToAuxInt(off1 + off2) 3908 v.Aux = symToAux(sym) 3909 v.AddArg2(ptr, mem) 3910 return true 3911 } 3912 // match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3913 // cond: canMergeSym(sym1,sym2) 3914 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3915 for { 3916 off1 := auxIntToInt32(v.AuxInt) 3917 sym1 := auxToSym(v.Aux) 3918 if v_0.Op != OpMIPSMOVWaddr { 3919 break 3920 } 3921 off2 := auxIntToInt32(v_0.AuxInt) 3922 sym2 := auxToSym(v_0.Aux) 3923 ptr := v_0.Args[0] 3924 mem := v_1 3925 if !(canMergeSym(sym1, sym2)) { 3926 break 3927 } 3928 v.reset(OpMIPSMOVWstorezero) 3929 v.AuxInt = int32ToAuxInt(off1 + off2) 3930 v.Aux = symToAux(mergeSym(sym1, sym2)) 3931 v.AddArg2(ptr, mem) 3932 return true 3933 } 3934 return false 3935 } 3936 func rewriteValueMIPS_OpMIPSMUL(v *Value) bool { 3937 v_1 := v.Args[1] 3938 v_0 := v.Args[0] 3939 // match: (MUL (MOVWconst [0]) _ ) 3940 // result: (MOVWconst [0]) 3941 for { 3942 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 3943 if v_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0.AuxInt) != 0 { 3944 continue 3945 } 3946 v.reset(OpMIPSMOVWconst) 3947 v.AuxInt = int32ToAuxInt(0) 3948 return true 3949 } 3950 break 3951 } 3952 // match: (MUL (MOVWconst [1]) x ) 3953 // result: x 3954 for { 3955 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 3956 if v_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0.AuxInt) != 1 { 3957 continue 3958 } 3959 x := v_1 3960 v.copyOf(x) 3961 return true 3962 } 3963 break 3964 } 3965 // match: (MUL (MOVWconst [-1]) x ) 3966 // result: (NEG x) 3967 for { 3968 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 3969 if v_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0.AuxInt) != -1 { 3970 continue 3971 } 3972 x := v_1 3973 v.reset(OpMIPSNEG) 3974 v.AddArg(x) 3975 return true 3976 } 3977 break 3978 } 3979 // match: (MUL (MOVWconst [c]) x ) 3980 // cond: isPowerOfTwo64(int64(uint32(c))) 3981 // result: (SLLconst [int32(log2uint32(int64(c)))] x) 3982 for { 3983 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 3984 if v_0.Op != OpMIPSMOVWconst { 3985 continue 3986 } 3987 c := auxIntToInt32(v_0.AuxInt) 3988 x := v_1 3989 if !(isPowerOfTwo64(int64(uint32(c)))) { 3990 continue 3991 } 3992 v.reset(OpMIPSSLLconst) 3993 v.AuxInt = int32ToAuxInt(int32(log2uint32(int64(c)))) 3994 v.AddArg(x) 3995 return true 3996 } 3997 break 3998 } 3999 // match: (MUL (MOVWconst [c]) (MOVWconst [d])) 4000 // result: (MOVWconst [c*d]) 4001 for { 4002 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4003 if v_0.Op != OpMIPSMOVWconst { 4004 continue 4005 } 4006 c := auxIntToInt32(v_0.AuxInt) 4007 if v_1.Op != OpMIPSMOVWconst { 4008 continue 4009 } 4010 d := auxIntToInt32(v_1.AuxInt) 4011 v.reset(OpMIPSMOVWconst) 4012 v.AuxInt = int32ToAuxInt(c * d) 4013 return true 4014 } 4015 break 4016 } 4017 return false 4018 } 4019 func rewriteValueMIPS_OpMIPSNEG(v *Value) bool { 4020 v_0 := v.Args[0] 4021 // match: (NEG (MOVWconst [c])) 4022 // result: (MOVWconst [-c]) 4023 for { 4024 if v_0.Op != OpMIPSMOVWconst { 4025 break 4026 } 4027 c := auxIntToInt32(v_0.AuxInt) 4028 v.reset(OpMIPSMOVWconst) 4029 v.AuxInt = int32ToAuxInt(-c) 4030 return true 4031 } 4032 return false 4033 } 4034 func rewriteValueMIPS_OpMIPSNOR(v *Value) bool { 4035 v_1 := v.Args[1] 4036 v_0 := v.Args[0] 4037 // match: (NOR x (MOVWconst [c])) 4038 // result: (NORconst [c] x) 4039 for { 4040 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4041 x := v_0 4042 if v_1.Op != OpMIPSMOVWconst { 4043 continue 4044 } 4045 c := auxIntToInt32(v_1.AuxInt) 4046 v.reset(OpMIPSNORconst) 4047 v.AuxInt = int32ToAuxInt(c) 4048 v.AddArg(x) 4049 return true 4050 } 4051 break 4052 } 4053 return false 4054 } 4055 func rewriteValueMIPS_OpMIPSNORconst(v *Value) bool { 4056 v_0 := v.Args[0] 4057 // match: (NORconst [c] (MOVWconst [d])) 4058 // result: (MOVWconst [^(c|d)]) 4059 for { 4060 c := auxIntToInt32(v.AuxInt) 4061 if v_0.Op != OpMIPSMOVWconst { 4062 break 4063 } 4064 d := auxIntToInt32(v_0.AuxInt) 4065 v.reset(OpMIPSMOVWconst) 4066 v.AuxInt = int32ToAuxInt(^(c | d)) 4067 return true 4068 } 4069 return false 4070 } 4071 func rewriteValueMIPS_OpMIPSOR(v *Value) bool { 4072 v_1 := v.Args[1] 4073 v_0 := v.Args[0] 4074 b := v.Block 4075 // match: (OR x (MOVWconst [c])) 4076 // result: (ORconst [c] x) 4077 for { 4078 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4079 x := v_0 4080 if v_1.Op != OpMIPSMOVWconst { 4081 continue 4082 } 4083 c := auxIntToInt32(v_1.AuxInt) 4084 v.reset(OpMIPSORconst) 4085 v.AuxInt = int32ToAuxInt(c) 4086 v.AddArg(x) 4087 return true 4088 } 4089 break 4090 } 4091 // match: (OR x x) 4092 // result: x 4093 for { 4094 x := v_0 4095 if x != v_1 { 4096 break 4097 } 4098 v.copyOf(x) 4099 return true 4100 } 4101 // match: (OR (SGTUzero x) (SGTUzero y)) 4102 // result: (SGTUzero (OR <x.Type> x y)) 4103 for { 4104 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4105 if v_0.Op != OpMIPSSGTUzero { 4106 continue 4107 } 4108 x := v_0.Args[0] 4109 if v_1.Op != OpMIPSSGTUzero { 4110 continue 4111 } 4112 y := v_1.Args[0] 4113 v.reset(OpMIPSSGTUzero) 4114 v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type) 4115 v0.AddArg2(x, y) 4116 v.AddArg(v0) 4117 return true 4118 } 4119 break 4120 } 4121 return false 4122 } 4123 func rewriteValueMIPS_OpMIPSORconst(v *Value) bool { 4124 v_0 := v.Args[0] 4125 // match: (ORconst [0] x) 4126 // result: x 4127 for { 4128 if auxIntToInt32(v.AuxInt) != 0 { 4129 break 4130 } 4131 x := v_0 4132 v.copyOf(x) 4133 return true 4134 } 4135 // match: (ORconst [-1] _) 4136 // result: (MOVWconst [-1]) 4137 for { 4138 if auxIntToInt32(v.AuxInt) != -1 { 4139 break 4140 } 4141 v.reset(OpMIPSMOVWconst) 4142 v.AuxInt = int32ToAuxInt(-1) 4143 return true 4144 } 4145 // match: (ORconst [c] (MOVWconst [d])) 4146 // result: (MOVWconst [c|d]) 4147 for { 4148 c := auxIntToInt32(v.AuxInt) 4149 if v_0.Op != OpMIPSMOVWconst { 4150 break 4151 } 4152 d := auxIntToInt32(v_0.AuxInt) 4153 v.reset(OpMIPSMOVWconst) 4154 v.AuxInt = int32ToAuxInt(c | d) 4155 return true 4156 } 4157 // match: (ORconst [c] (ORconst [d] x)) 4158 // result: (ORconst [c|d] x) 4159 for { 4160 c := auxIntToInt32(v.AuxInt) 4161 if v_0.Op != OpMIPSORconst { 4162 break 4163 } 4164 d := auxIntToInt32(v_0.AuxInt) 4165 x := v_0.Args[0] 4166 v.reset(OpMIPSORconst) 4167 v.AuxInt = int32ToAuxInt(c | d) 4168 v.AddArg(x) 4169 return true 4170 } 4171 return false 4172 } 4173 func rewriteValueMIPS_OpMIPSSGT(v *Value) bool { 4174 v_1 := v.Args[1] 4175 v_0 := v.Args[0] 4176 // match: (SGT (MOVWconst [c]) x) 4177 // result: (SGTconst [c] x) 4178 for { 4179 if v_0.Op != OpMIPSMOVWconst { 4180 break 4181 } 4182 c := auxIntToInt32(v_0.AuxInt) 4183 x := v_1 4184 v.reset(OpMIPSSGTconst) 4185 v.AuxInt = int32ToAuxInt(c) 4186 v.AddArg(x) 4187 return true 4188 } 4189 // match: (SGT x (MOVWconst [0])) 4190 // result: (SGTzero x) 4191 for { 4192 x := v_0 4193 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 4194 break 4195 } 4196 v.reset(OpMIPSSGTzero) 4197 v.AddArg(x) 4198 return true 4199 } 4200 return false 4201 } 4202 func rewriteValueMIPS_OpMIPSSGTU(v *Value) bool { 4203 v_1 := v.Args[1] 4204 v_0 := v.Args[0] 4205 // match: (SGTU (MOVWconst [c]) x) 4206 // result: (SGTUconst [c] x) 4207 for { 4208 if v_0.Op != OpMIPSMOVWconst { 4209 break 4210 } 4211 c := auxIntToInt32(v_0.AuxInt) 4212 x := v_1 4213 v.reset(OpMIPSSGTUconst) 4214 v.AuxInt = int32ToAuxInt(c) 4215 v.AddArg(x) 4216 return true 4217 } 4218 // match: (SGTU x (MOVWconst [0])) 4219 // result: (SGTUzero x) 4220 for { 4221 x := v_0 4222 if v_1.Op != OpMIPSMOVWconst || auxIntToInt32(v_1.AuxInt) != 0 { 4223 break 4224 } 4225 v.reset(OpMIPSSGTUzero) 4226 v.AddArg(x) 4227 return true 4228 } 4229 return false 4230 } 4231 func rewriteValueMIPS_OpMIPSSGTUconst(v *Value) bool { 4232 v_0 := v.Args[0] 4233 // match: (SGTUconst [c] (MOVWconst [d])) 4234 // cond: uint32(c) > uint32(d) 4235 // result: (MOVWconst [1]) 4236 for { 4237 c := auxIntToInt32(v.AuxInt) 4238 if v_0.Op != OpMIPSMOVWconst { 4239 break 4240 } 4241 d := auxIntToInt32(v_0.AuxInt) 4242 if !(uint32(c) > uint32(d)) { 4243 break 4244 } 4245 v.reset(OpMIPSMOVWconst) 4246 v.AuxInt = int32ToAuxInt(1) 4247 return true 4248 } 4249 // match: (SGTUconst [c] (MOVWconst [d])) 4250 // cond: uint32(c) <= uint32(d) 4251 // result: (MOVWconst [0]) 4252 for { 4253 c := auxIntToInt32(v.AuxInt) 4254 if v_0.Op != OpMIPSMOVWconst { 4255 break 4256 } 4257 d := auxIntToInt32(v_0.AuxInt) 4258 if !(uint32(c) <= uint32(d)) { 4259 break 4260 } 4261 v.reset(OpMIPSMOVWconst) 4262 v.AuxInt = int32ToAuxInt(0) 4263 return true 4264 } 4265 // match: (SGTUconst [c] (MOVBUreg _)) 4266 // cond: 0xff < uint32(c) 4267 // result: (MOVWconst [1]) 4268 for { 4269 c := auxIntToInt32(v.AuxInt) 4270 if v_0.Op != OpMIPSMOVBUreg || !(0xff < uint32(c)) { 4271 break 4272 } 4273 v.reset(OpMIPSMOVWconst) 4274 v.AuxInt = int32ToAuxInt(1) 4275 return true 4276 } 4277 // match: (SGTUconst [c] (MOVHUreg _)) 4278 // cond: 0xffff < uint32(c) 4279 // result: (MOVWconst [1]) 4280 for { 4281 c := auxIntToInt32(v.AuxInt) 4282 if v_0.Op != OpMIPSMOVHUreg || !(0xffff < uint32(c)) { 4283 break 4284 } 4285 v.reset(OpMIPSMOVWconst) 4286 v.AuxInt = int32ToAuxInt(1) 4287 return true 4288 } 4289 // match: (SGTUconst [c] (ANDconst [m] _)) 4290 // cond: uint32(m) < uint32(c) 4291 // result: (MOVWconst [1]) 4292 for { 4293 c := auxIntToInt32(v.AuxInt) 4294 if v_0.Op != OpMIPSANDconst { 4295 break 4296 } 4297 m := auxIntToInt32(v_0.AuxInt) 4298 if !(uint32(m) < uint32(c)) { 4299 break 4300 } 4301 v.reset(OpMIPSMOVWconst) 4302 v.AuxInt = int32ToAuxInt(1) 4303 return true 4304 } 4305 // match: (SGTUconst [c] (SRLconst _ [d])) 4306 // cond: uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c) 4307 // result: (MOVWconst [1]) 4308 for { 4309 c := auxIntToInt32(v.AuxInt) 4310 if v_0.Op != OpMIPSSRLconst { 4311 break 4312 } 4313 d := auxIntToInt32(v_0.AuxInt) 4314 if !(uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)) { 4315 break 4316 } 4317 v.reset(OpMIPSMOVWconst) 4318 v.AuxInt = int32ToAuxInt(1) 4319 return true 4320 } 4321 return false 4322 } 4323 func rewriteValueMIPS_OpMIPSSGTUzero(v *Value) bool { 4324 v_0 := v.Args[0] 4325 // match: (SGTUzero (MOVWconst [d])) 4326 // cond: d != 0 4327 // result: (MOVWconst [1]) 4328 for { 4329 if v_0.Op != OpMIPSMOVWconst { 4330 break 4331 } 4332 d := auxIntToInt32(v_0.AuxInt) 4333 if !(d != 0) { 4334 break 4335 } 4336 v.reset(OpMIPSMOVWconst) 4337 v.AuxInt = int32ToAuxInt(1) 4338 return true 4339 } 4340 // match: (SGTUzero (MOVWconst [d])) 4341 // cond: d == 0 4342 // result: (MOVWconst [0]) 4343 for { 4344 if v_0.Op != OpMIPSMOVWconst { 4345 break 4346 } 4347 d := auxIntToInt32(v_0.AuxInt) 4348 if !(d == 0) { 4349 break 4350 } 4351 v.reset(OpMIPSMOVWconst) 4352 v.AuxInt = int32ToAuxInt(0) 4353 return true 4354 } 4355 return false 4356 } 4357 func rewriteValueMIPS_OpMIPSSGTconst(v *Value) bool { 4358 v_0 := v.Args[0] 4359 // match: (SGTconst [c] (MOVWconst [d])) 4360 // cond: c > d 4361 // result: (MOVWconst [1]) 4362 for { 4363 c := auxIntToInt32(v.AuxInt) 4364 if v_0.Op != OpMIPSMOVWconst { 4365 break 4366 } 4367 d := auxIntToInt32(v_0.AuxInt) 4368 if !(c > d) { 4369 break 4370 } 4371 v.reset(OpMIPSMOVWconst) 4372 v.AuxInt = int32ToAuxInt(1) 4373 return true 4374 } 4375 // match: (SGTconst [c] (MOVWconst [d])) 4376 // cond: c <= d 4377 // result: (MOVWconst [0]) 4378 for { 4379 c := auxIntToInt32(v.AuxInt) 4380 if v_0.Op != OpMIPSMOVWconst { 4381 break 4382 } 4383 d := auxIntToInt32(v_0.AuxInt) 4384 if !(c <= d) { 4385 break 4386 } 4387 v.reset(OpMIPSMOVWconst) 4388 v.AuxInt = int32ToAuxInt(0) 4389 return true 4390 } 4391 // match: (SGTconst [c] (MOVBreg _)) 4392 // cond: 0x7f < c 4393 // result: (MOVWconst [1]) 4394 for { 4395 c := auxIntToInt32(v.AuxInt) 4396 if v_0.Op != OpMIPSMOVBreg || !(0x7f < c) { 4397 break 4398 } 4399 v.reset(OpMIPSMOVWconst) 4400 v.AuxInt = int32ToAuxInt(1) 4401 return true 4402 } 4403 // match: (SGTconst [c] (MOVBreg _)) 4404 // cond: c <= -0x80 4405 // result: (MOVWconst [0]) 4406 for { 4407 c := auxIntToInt32(v.AuxInt) 4408 if v_0.Op != OpMIPSMOVBreg || !(c <= -0x80) { 4409 break 4410 } 4411 v.reset(OpMIPSMOVWconst) 4412 v.AuxInt = int32ToAuxInt(0) 4413 return true 4414 } 4415 // match: (SGTconst [c] (MOVBUreg _)) 4416 // cond: 0xff < c 4417 // result: (MOVWconst [1]) 4418 for { 4419 c := auxIntToInt32(v.AuxInt) 4420 if v_0.Op != OpMIPSMOVBUreg || !(0xff < c) { 4421 break 4422 } 4423 v.reset(OpMIPSMOVWconst) 4424 v.AuxInt = int32ToAuxInt(1) 4425 return true 4426 } 4427 // match: (SGTconst [c] (MOVBUreg _)) 4428 // cond: c < 0 4429 // result: (MOVWconst [0]) 4430 for { 4431 c := auxIntToInt32(v.AuxInt) 4432 if v_0.Op != OpMIPSMOVBUreg || !(c < 0) { 4433 break 4434 } 4435 v.reset(OpMIPSMOVWconst) 4436 v.AuxInt = int32ToAuxInt(0) 4437 return true 4438 } 4439 // match: (SGTconst [c] (MOVHreg _)) 4440 // cond: 0x7fff < c 4441 // result: (MOVWconst [1]) 4442 for { 4443 c := auxIntToInt32(v.AuxInt) 4444 if v_0.Op != OpMIPSMOVHreg || !(0x7fff < c) { 4445 break 4446 } 4447 v.reset(OpMIPSMOVWconst) 4448 v.AuxInt = int32ToAuxInt(1) 4449 return true 4450 } 4451 // match: (SGTconst [c] (MOVHreg _)) 4452 // cond: c <= -0x8000 4453 // result: (MOVWconst [0]) 4454 for { 4455 c := auxIntToInt32(v.AuxInt) 4456 if v_0.Op != OpMIPSMOVHreg || !(c <= -0x8000) { 4457 break 4458 } 4459 v.reset(OpMIPSMOVWconst) 4460 v.AuxInt = int32ToAuxInt(0) 4461 return true 4462 } 4463 // match: (SGTconst [c] (MOVHUreg _)) 4464 // cond: 0xffff < c 4465 // result: (MOVWconst [1]) 4466 for { 4467 c := auxIntToInt32(v.AuxInt) 4468 if v_0.Op != OpMIPSMOVHUreg || !(0xffff < c) { 4469 break 4470 } 4471 v.reset(OpMIPSMOVWconst) 4472 v.AuxInt = int32ToAuxInt(1) 4473 return true 4474 } 4475 // match: (SGTconst [c] (MOVHUreg _)) 4476 // cond: c < 0 4477 // result: (MOVWconst [0]) 4478 for { 4479 c := auxIntToInt32(v.AuxInt) 4480 if v_0.Op != OpMIPSMOVHUreg || !(c < 0) { 4481 break 4482 } 4483 v.reset(OpMIPSMOVWconst) 4484 v.AuxInt = int32ToAuxInt(0) 4485 return true 4486 } 4487 // match: (SGTconst [c] (ANDconst [m] _)) 4488 // cond: 0 <= m && m < c 4489 // result: (MOVWconst [1]) 4490 for { 4491 c := auxIntToInt32(v.AuxInt) 4492 if v_0.Op != OpMIPSANDconst { 4493 break 4494 } 4495 m := auxIntToInt32(v_0.AuxInt) 4496 if !(0 <= m && m < c) { 4497 break 4498 } 4499 v.reset(OpMIPSMOVWconst) 4500 v.AuxInt = int32ToAuxInt(1) 4501 return true 4502 } 4503 // match: (SGTconst [c] (SRLconst _ [d])) 4504 // cond: 0 <= c && uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c) 4505 // result: (MOVWconst [1]) 4506 for { 4507 c := auxIntToInt32(v.AuxInt) 4508 if v_0.Op != OpMIPSSRLconst { 4509 break 4510 } 4511 d := auxIntToInt32(v_0.AuxInt) 4512 if !(0 <= c && uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)) { 4513 break 4514 } 4515 v.reset(OpMIPSMOVWconst) 4516 v.AuxInt = int32ToAuxInt(1) 4517 return true 4518 } 4519 return false 4520 } 4521 func rewriteValueMIPS_OpMIPSSGTzero(v *Value) bool { 4522 v_0 := v.Args[0] 4523 // match: (SGTzero (MOVWconst [d])) 4524 // cond: d > 0 4525 // result: (MOVWconst [1]) 4526 for { 4527 if v_0.Op != OpMIPSMOVWconst { 4528 break 4529 } 4530 d := auxIntToInt32(v_0.AuxInt) 4531 if !(d > 0) { 4532 break 4533 } 4534 v.reset(OpMIPSMOVWconst) 4535 v.AuxInt = int32ToAuxInt(1) 4536 return true 4537 } 4538 // match: (SGTzero (MOVWconst [d])) 4539 // cond: d <= 0 4540 // result: (MOVWconst [0]) 4541 for { 4542 if v_0.Op != OpMIPSMOVWconst { 4543 break 4544 } 4545 d := auxIntToInt32(v_0.AuxInt) 4546 if !(d <= 0) { 4547 break 4548 } 4549 v.reset(OpMIPSMOVWconst) 4550 v.AuxInt = int32ToAuxInt(0) 4551 return true 4552 } 4553 return false 4554 } 4555 func rewriteValueMIPS_OpMIPSSLL(v *Value) bool { 4556 v_1 := v.Args[1] 4557 v_0 := v.Args[0] 4558 // match: (SLL x (MOVWconst [c])) 4559 // result: (SLLconst x [c&31]) 4560 for { 4561 x := v_0 4562 if v_1.Op != OpMIPSMOVWconst { 4563 break 4564 } 4565 c := auxIntToInt32(v_1.AuxInt) 4566 v.reset(OpMIPSSLLconst) 4567 v.AuxInt = int32ToAuxInt(c & 31) 4568 v.AddArg(x) 4569 return true 4570 } 4571 return false 4572 } 4573 func rewriteValueMIPS_OpMIPSSLLconst(v *Value) bool { 4574 v_0 := v.Args[0] 4575 // match: (SLLconst [c] (MOVWconst [d])) 4576 // result: (MOVWconst [d<<uint32(c)]) 4577 for { 4578 c := auxIntToInt32(v.AuxInt) 4579 if v_0.Op != OpMIPSMOVWconst { 4580 break 4581 } 4582 d := auxIntToInt32(v_0.AuxInt) 4583 v.reset(OpMIPSMOVWconst) 4584 v.AuxInt = int32ToAuxInt(d << uint32(c)) 4585 return true 4586 } 4587 return false 4588 } 4589 func rewriteValueMIPS_OpMIPSSRA(v *Value) bool { 4590 v_1 := v.Args[1] 4591 v_0 := v.Args[0] 4592 // match: (SRA x (MOVWconst [c])) 4593 // result: (SRAconst x [c&31]) 4594 for { 4595 x := v_0 4596 if v_1.Op != OpMIPSMOVWconst { 4597 break 4598 } 4599 c := auxIntToInt32(v_1.AuxInt) 4600 v.reset(OpMIPSSRAconst) 4601 v.AuxInt = int32ToAuxInt(c & 31) 4602 v.AddArg(x) 4603 return true 4604 } 4605 return false 4606 } 4607 func rewriteValueMIPS_OpMIPSSRAconst(v *Value) bool { 4608 v_0 := v.Args[0] 4609 // match: (SRAconst [c] (MOVWconst [d])) 4610 // result: (MOVWconst [d>>uint32(c)]) 4611 for { 4612 c := auxIntToInt32(v.AuxInt) 4613 if v_0.Op != OpMIPSMOVWconst { 4614 break 4615 } 4616 d := auxIntToInt32(v_0.AuxInt) 4617 v.reset(OpMIPSMOVWconst) 4618 v.AuxInt = int32ToAuxInt(d >> uint32(c)) 4619 return true 4620 } 4621 return false 4622 } 4623 func rewriteValueMIPS_OpMIPSSRL(v *Value) bool { 4624 v_1 := v.Args[1] 4625 v_0 := v.Args[0] 4626 // match: (SRL x (MOVWconst [c])) 4627 // result: (SRLconst x [c&31]) 4628 for { 4629 x := v_0 4630 if v_1.Op != OpMIPSMOVWconst { 4631 break 4632 } 4633 c := auxIntToInt32(v_1.AuxInt) 4634 v.reset(OpMIPSSRLconst) 4635 v.AuxInt = int32ToAuxInt(c & 31) 4636 v.AddArg(x) 4637 return true 4638 } 4639 return false 4640 } 4641 func rewriteValueMIPS_OpMIPSSRLconst(v *Value) bool { 4642 v_0 := v.Args[0] 4643 // match: (SRLconst [c] (MOVWconst [d])) 4644 // result: (MOVWconst [int32(uint32(d)>>uint32(c))]) 4645 for { 4646 c := auxIntToInt32(v.AuxInt) 4647 if v_0.Op != OpMIPSMOVWconst { 4648 break 4649 } 4650 d := auxIntToInt32(v_0.AuxInt) 4651 v.reset(OpMIPSMOVWconst) 4652 v.AuxInt = int32ToAuxInt(int32(uint32(d) >> uint32(c))) 4653 return true 4654 } 4655 return false 4656 } 4657 func rewriteValueMIPS_OpMIPSSUB(v *Value) bool { 4658 v_1 := v.Args[1] 4659 v_0 := v.Args[0] 4660 // match: (SUB x (MOVWconst [c])) 4661 // result: (SUBconst [c] x) 4662 for { 4663 x := v_0 4664 if v_1.Op != OpMIPSMOVWconst { 4665 break 4666 } 4667 c := auxIntToInt32(v_1.AuxInt) 4668 v.reset(OpMIPSSUBconst) 4669 v.AuxInt = int32ToAuxInt(c) 4670 v.AddArg(x) 4671 return true 4672 } 4673 // match: (SUB x x) 4674 // result: (MOVWconst [0]) 4675 for { 4676 x := v_0 4677 if x != v_1 { 4678 break 4679 } 4680 v.reset(OpMIPSMOVWconst) 4681 v.AuxInt = int32ToAuxInt(0) 4682 return true 4683 } 4684 // match: (SUB (MOVWconst [0]) x) 4685 // result: (NEG x) 4686 for { 4687 if v_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0.AuxInt) != 0 { 4688 break 4689 } 4690 x := v_1 4691 v.reset(OpMIPSNEG) 4692 v.AddArg(x) 4693 return true 4694 } 4695 return false 4696 } 4697 func rewriteValueMIPS_OpMIPSSUBconst(v *Value) bool { 4698 v_0 := v.Args[0] 4699 // match: (SUBconst [0] x) 4700 // result: x 4701 for { 4702 if auxIntToInt32(v.AuxInt) != 0 { 4703 break 4704 } 4705 x := v_0 4706 v.copyOf(x) 4707 return true 4708 } 4709 // match: (SUBconst [c] (MOVWconst [d])) 4710 // result: (MOVWconst [d-c]) 4711 for { 4712 c := auxIntToInt32(v.AuxInt) 4713 if v_0.Op != OpMIPSMOVWconst { 4714 break 4715 } 4716 d := auxIntToInt32(v_0.AuxInt) 4717 v.reset(OpMIPSMOVWconst) 4718 v.AuxInt = int32ToAuxInt(d - c) 4719 return true 4720 } 4721 // match: (SUBconst [c] (SUBconst [d] x)) 4722 // result: (ADDconst [-c-d] x) 4723 for { 4724 c := auxIntToInt32(v.AuxInt) 4725 if v_0.Op != OpMIPSSUBconst { 4726 break 4727 } 4728 d := auxIntToInt32(v_0.AuxInt) 4729 x := v_0.Args[0] 4730 v.reset(OpMIPSADDconst) 4731 v.AuxInt = int32ToAuxInt(-c - d) 4732 v.AddArg(x) 4733 return true 4734 } 4735 // match: (SUBconst [c] (ADDconst [d] x)) 4736 // result: (ADDconst [-c+d] x) 4737 for { 4738 c := auxIntToInt32(v.AuxInt) 4739 if v_0.Op != OpMIPSADDconst { 4740 break 4741 } 4742 d := auxIntToInt32(v_0.AuxInt) 4743 x := v_0.Args[0] 4744 v.reset(OpMIPSADDconst) 4745 v.AuxInt = int32ToAuxInt(-c + d) 4746 v.AddArg(x) 4747 return true 4748 } 4749 return false 4750 } 4751 func rewriteValueMIPS_OpMIPSXOR(v *Value) bool { 4752 v_1 := v.Args[1] 4753 v_0 := v.Args[0] 4754 // match: (XOR x (MOVWconst [c])) 4755 // result: (XORconst [c] x) 4756 for { 4757 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4758 x := v_0 4759 if v_1.Op != OpMIPSMOVWconst { 4760 continue 4761 } 4762 c := auxIntToInt32(v_1.AuxInt) 4763 v.reset(OpMIPSXORconst) 4764 v.AuxInt = int32ToAuxInt(c) 4765 v.AddArg(x) 4766 return true 4767 } 4768 break 4769 } 4770 // match: (XOR x x) 4771 // result: (MOVWconst [0]) 4772 for { 4773 x := v_0 4774 if x != v_1 { 4775 break 4776 } 4777 v.reset(OpMIPSMOVWconst) 4778 v.AuxInt = int32ToAuxInt(0) 4779 return true 4780 } 4781 return false 4782 } 4783 func rewriteValueMIPS_OpMIPSXORconst(v *Value) bool { 4784 v_0 := v.Args[0] 4785 // match: (XORconst [0] x) 4786 // result: x 4787 for { 4788 if auxIntToInt32(v.AuxInt) != 0 { 4789 break 4790 } 4791 x := v_0 4792 v.copyOf(x) 4793 return true 4794 } 4795 // match: (XORconst [-1] x) 4796 // result: (NORconst [0] x) 4797 for { 4798 if auxIntToInt32(v.AuxInt) != -1 { 4799 break 4800 } 4801 x := v_0 4802 v.reset(OpMIPSNORconst) 4803 v.AuxInt = int32ToAuxInt(0) 4804 v.AddArg(x) 4805 return true 4806 } 4807 // match: (XORconst [c] (MOVWconst [d])) 4808 // result: (MOVWconst [c^d]) 4809 for { 4810 c := auxIntToInt32(v.AuxInt) 4811 if v_0.Op != OpMIPSMOVWconst { 4812 break 4813 } 4814 d := auxIntToInt32(v_0.AuxInt) 4815 v.reset(OpMIPSMOVWconst) 4816 v.AuxInt = int32ToAuxInt(c ^ d) 4817 return true 4818 } 4819 // match: (XORconst [c] (XORconst [d] x)) 4820 // result: (XORconst [c^d] x) 4821 for { 4822 c := auxIntToInt32(v.AuxInt) 4823 if v_0.Op != OpMIPSXORconst { 4824 break 4825 } 4826 d := auxIntToInt32(v_0.AuxInt) 4827 x := v_0.Args[0] 4828 v.reset(OpMIPSXORconst) 4829 v.AuxInt = int32ToAuxInt(c ^ d) 4830 v.AddArg(x) 4831 return true 4832 } 4833 return false 4834 } 4835 func rewriteValueMIPS_OpMod16(v *Value) bool { 4836 v_1 := v.Args[1] 4837 v_0 := v.Args[0] 4838 b := v.Block 4839 typ := &b.Func.Config.Types 4840 // match: (Mod16 x y) 4841 // result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y))) 4842 for { 4843 x := v_0 4844 y := v_1 4845 v.reset(OpSelect0) 4846 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 4847 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 4848 v1.AddArg(x) 4849 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 4850 v2.AddArg(y) 4851 v0.AddArg2(v1, v2) 4852 v.AddArg(v0) 4853 return true 4854 } 4855 } 4856 func rewriteValueMIPS_OpMod16u(v *Value) bool { 4857 v_1 := v.Args[1] 4858 v_0 := v.Args[0] 4859 b := v.Block 4860 typ := &b.Func.Config.Types 4861 // match: (Mod16u x y) 4862 // result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y))) 4863 for { 4864 x := v_0 4865 y := v_1 4866 v.reset(OpSelect0) 4867 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 4868 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 4869 v1.AddArg(x) 4870 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 4871 v2.AddArg(y) 4872 v0.AddArg2(v1, v2) 4873 v.AddArg(v0) 4874 return true 4875 } 4876 } 4877 func rewriteValueMIPS_OpMod32(v *Value) bool { 4878 v_1 := v.Args[1] 4879 v_0 := v.Args[0] 4880 b := v.Block 4881 typ := &b.Func.Config.Types 4882 // match: (Mod32 x y) 4883 // result: (Select0 (DIV x y)) 4884 for { 4885 x := v_0 4886 y := v_1 4887 v.reset(OpSelect0) 4888 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 4889 v0.AddArg2(x, y) 4890 v.AddArg(v0) 4891 return true 4892 } 4893 } 4894 func rewriteValueMIPS_OpMod32u(v *Value) bool { 4895 v_1 := v.Args[1] 4896 v_0 := v.Args[0] 4897 b := v.Block 4898 typ := &b.Func.Config.Types 4899 // match: (Mod32u x y) 4900 // result: (Select0 (DIVU x y)) 4901 for { 4902 x := v_0 4903 y := v_1 4904 v.reset(OpSelect0) 4905 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 4906 v0.AddArg2(x, y) 4907 v.AddArg(v0) 4908 return true 4909 } 4910 } 4911 func rewriteValueMIPS_OpMod8(v *Value) bool { 4912 v_1 := v.Args[1] 4913 v_0 := v.Args[0] 4914 b := v.Block 4915 typ := &b.Func.Config.Types 4916 // match: (Mod8 x y) 4917 // result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y))) 4918 for { 4919 x := v_0 4920 y := v_1 4921 v.reset(OpSelect0) 4922 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 4923 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 4924 v1.AddArg(x) 4925 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 4926 v2.AddArg(y) 4927 v0.AddArg2(v1, v2) 4928 v.AddArg(v0) 4929 return true 4930 } 4931 } 4932 func rewriteValueMIPS_OpMod8u(v *Value) bool { 4933 v_1 := v.Args[1] 4934 v_0 := v.Args[0] 4935 b := v.Block 4936 typ := &b.Func.Config.Types 4937 // match: (Mod8u x y) 4938 // result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y))) 4939 for { 4940 x := v_0 4941 y := v_1 4942 v.reset(OpSelect0) 4943 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 4944 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 4945 v1.AddArg(x) 4946 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 4947 v2.AddArg(y) 4948 v0.AddArg2(v1, v2) 4949 v.AddArg(v0) 4950 return true 4951 } 4952 } 4953 func rewriteValueMIPS_OpMove(v *Value) bool { 4954 v_2 := v.Args[2] 4955 v_1 := v.Args[1] 4956 v_0 := v.Args[0] 4957 b := v.Block 4958 config := b.Func.Config 4959 typ := &b.Func.Config.Types 4960 // match: (Move [0] _ _ mem) 4961 // result: mem 4962 for { 4963 if auxIntToInt64(v.AuxInt) != 0 { 4964 break 4965 } 4966 mem := v_2 4967 v.copyOf(mem) 4968 return true 4969 } 4970 // match: (Move [1] dst src mem) 4971 // result: (MOVBstore dst (MOVBUload src mem) mem) 4972 for { 4973 if auxIntToInt64(v.AuxInt) != 1 { 4974 break 4975 } 4976 dst := v_0 4977 src := v_1 4978 mem := v_2 4979 v.reset(OpMIPSMOVBstore) 4980 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 4981 v0.AddArg2(src, mem) 4982 v.AddArg3(dst, v0, mem) 4983 return true 4984 } 4985 // match: (Move [2] {t} dst src mem) 4986 // cond: t.Alignment()%2 == 0 4987 // result: (MOVHstore dst (MOVHUload src mem) mem) 4988 for { 4989 if auxIntToInt64(v.AuxInt) != 2 { 4990 break 4991 } 4992 t := auxToType(v.Aux) 4993 dst := v_0 4994 src := v_1 4995 mem := v_2 4996 if !(t.Alignment()%2 == 0) { 4997 break 4998 } 4999 v.reset(OpMIPSMOVHstore) 5000 v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16) 5001 v0.AddArg2(src, mem) 5002 v.AddArg3(dst, v0, mem) 5003 return true 5004 } 5005 // match: (Move [2] dst src mem) 5006 // result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)) 5007 for { 5008 if auxIntToInt64(v.AuxInt) != 2 { 5009 break 5010 } 5011 dst := v_0 5012 src := v_1 5013 mem := v_2 5014 v.reset(OpMIPSMOVBstore) 5015 v.AuxInt = int32ToAuxInt(1) 5016 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5017 v0.AuxInt = int32ToAuxInt(1) 5018 v0.AddArg2(src, mem) 5019 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 5020 v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5021 v2.AddArg2(src, mem) 5022 v1.AddArg3(dst, v2, mem) 5023 v.AddArg3(dst, v0, v1) 5024 return true 5025 } 5026 // match: (Move [4] {t} dst src mem) 5027 // cond: t.Alignment()%4 == 0 5028 // result: (MOVWstore dst (MOVWload src mem) mem) 5029 for { 5030 if auxIntToInt64(v.AuxInt) != 4 { 5031 break 5032 } 5033 t := auxToType(v.Aux) 5034 dst := v_0 5035 src := v_1 5036 mem := v_2 5037 if !(t.Alignment()%4 == 0) { 5038 break 5039 } 5040 v.reset(OpMIPSMOVWstore) 5041 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5042 v0.AddArg2(src, mem) 5043 v.AddArg3(dst, v0, mem) 5044 return true 5045 } 5046 // match: (Move [4] {t} dst src mem) 5047 // cond: t.Alignment()%2 == 0 5048 // result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 5049 for { 5050 if auxIntToInt64(v.AuxInt) != 4 { 5051 break 5052 } 5053 t := auxToType(v.Aux) 5054 dst := v_0 5055 src := v_1 5056 mem := v_2 5057 if !(t.Alignment()%2 == 0) { 5058 break 5059 } 5060 v.reset(OpMIPSMOVHstore) 5061 v.AuxInt = int32ToAuxInt(2) 5062 v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16) 5063 v0.AuxInt = int32ToAuxInt(2) 5064 v0.AddArg2(src, mem) 5065 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 5066 v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16) 5067 v2.AddArg2(src, mem) 5068 v1.AddArg3(dst, v2, mem) 5069 v.AddArg3(dst, v0, v1) 5070 return true 5071 } 5072 // match: (Move [4] dst src mem) 5073 // result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))) 5074 for { 5075 if auxIntToInt64(v.AuxInt) != 4 { 5076 break 5077 } 5078 dst := v_0 5079 src := v_1 5080 mem := v_2 5081 v.reset(OpMIPSMOVBstore) 5082 v.AuxInt = int32ToAuxInt(3) 5083 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5084 v0.AuxInt = int32ToAuxInt(3) 5085 v0.AddArg2(src, mem) 5086 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 5087 v1.AuxInt = int32ToAuxInt(2) 5088 v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5089 v2.AuxInt = int32ToAuxInt(2) 5090 v2.AddArg2(src, mem) 5091 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 5092 v3.AuxInt = int32ToAuxInt(1) 5093 v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5094 v4.AuxInt = int32ToAuxInt(1) 5095 v4.AddArg2(src, mem) 5096 v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 5097 v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5098 v6.AddArg2(src, mem) 5099 v5.AddArg3(dst, v6, mem) 5100 v3.AddArg3(dst, v4, v5) 5101 v1.AddArg3(dst, v2, v3) 5102 v.AddArg3(dst, v0, v1) 5103 return true 5104 } 5105 // match: (Move [3] dst src mem) 5106 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))) 5107 for { 5108 if auxIntToInt64(v.AuxInt) != 3 { 5109 break 5110 } 5111 dst := v_0 5112 src := v_1 5113 mem := v_2 5114 v.reset(OpMIPSMOVBstore) 5115 v.AuxInt = int32ToAuxInt(2) 5116 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5117 v0.AuxInt = int32ToAuxInt(2) 5118 v0.AddArg2(src, mem) 5119 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 5120 v1.AuxInt = int32ToAuxInt(1) 5121 v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5122 v2.AuxInt = int32ToAuxInt(1) 5123 v2.AddArg2(src, mem) 5124 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 5125 v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 5126 v4.AddArg2(src, mem) 5127 v3.AddArg3(dst, v4, mem) 5128 v1.AddArg3(dst, v2, v3) 5129 v.AddArg3(dst, v0, v1) 5130 return true 5131 } 5132 // match: (Move [8] {t} dst src mem) 5133 // cond: t.Alignment()%4 == 0 5134 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) 5135 for { 5136 if auxIntToInt64(v.AuxInt) != 8 { 5137 break 5138 } 5139 t := auxToType(v.Aux) 5140 dst := v_0 5141 src := v_1 5142 mem := v_2 5143 if !(t.Alignment()%4 == 0) { 5144 break 5145 } 5146 v.reset(OpMIPSMOVWstore) 5147 v.AuxInt = int32ToAuxInt(4) 5148 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5149 v0.AuxInt = int32ToAuxInt(4) 5150 v0.AddArg2(src, mem) 5151 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 5152 v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5153 v2.AddArg2(src, mem) 5154 v1.AddArg3(dst, v2, mem) 5155 v.AddArg3(dst, v0, v1) 5156 return true 5157 } 5158 // match: (Move [8] {t} dst src mem) 5159 // cond: t.Alignment()%2 == 0 5160 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) 5161 for { 5162 if auxIntToInt64(v.AuxInt) != 8 { 5163 break 5164 } 5165 t := auxToType(v.Aux) 5166 dst := v_0 5167 src := v_1 5168 mem := v_2 5169 if !(t.Alignment()%2 == 0) { 5170 break 5171 } 5172 v.reset(OpMIPSMOVHstore) 5173 v.AuxInt = int32ToAuxInt(6) 5174 v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5175 v0.AuxInt = int32ToAuxInt(6) 5176 v0.AddArg2(src, mem) 5177 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 5178 v1.AuxInt = int32ToAuxInt(4) 5179 v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5180 v2.AuxInt = int32ToAuxInt(4) 5181 v2.AddArg2(src, mem) 5182 v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 5183 v3.AuxInt = int32ToAuxInt(2) 5184 v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5185 v4.AuxInt = int32ToAuxInt(2) 5186 v4.AddArg2(src, mem) 5187 v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 5188 v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5189 v6.AddArg2(src, mem) 5190 v5.AddArg3(dst, v6, mem) 5191 v3.AddArg3(dst, v4, v5) 5192 v1.AddArg3(dst, v2, v3) 5193 v.AddArg3(dst, v0, v1) 5194 return true 5195 } 5196 // match: (Move [6] {t} dst src mem) 5197 // cond: t.Alignment()%2 == 0 5198 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) 5199 for { 5200 if auxIntToInt64(v.AuxInt) != 6 { 5201 break 5202 } 5203 t := auxToType(v.Aux) 5204 dst := v_0 5205 src := v_1 5206 mem := v_2 5207 if !(t.Alignment()%2 == 0) { 5208 break 5209 } 5210 v.reset(OpMIPSMOVHstore) 5211 v.AuxInt = int32ToAuxInt(4) 5212 v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5213 v0.AuxInt = int32ToAuxInt(4) 5214 v0.AddArg2(src, mem) 5215 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 5216 v1.AuxInt = int32ToAuxInt(2) 5217 v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5218 v2.AuxInt = int32ToAuxInt(2) 5219 v2.AddArg2(src, mem) 5220 v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 5221 v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 5222 v4.AddArg2(src, mem) 5223 v3.AddArg3(dst, v4, mem) 5224 v1.AddArg3(dst, v2, v3) 5225 v.AddArg3(dst, v0, v1) 5226 return true 5227 } 5228 // match: (Move [12] {t} dst src mem) 5229 // cond: t.Alignment()%4 == 0 5230 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) 5231 for { 5232 if auxIntToInt64(v.AuxInt) != 12 { 5233 break 5234 } 5235 t := auxToType(v.Aux) 5236 dst := v_0 5237 src := v_1 5238 mem := v_2 5239 if !(t.Alignment()%4 == 0) { 5240 break 5241 } 5242 v.reset(OpMIPSMOVWstore) 5243 v.AuxInt = int32ToAuxInt(8) 5244 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5245 v0.AuxInt = int32ToAuxInt(8) 5246 v0.AddArg2(src, mem) 5247 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 5248 v1.AuxInt = int32ToAuxInt(4) 5249 v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5250 v2.AuxInt = int32ToAuxInt(4) 5251 v2.AddArg2(src, mem) 5252 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 5253 v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5254 v4.AddArg2(src, mem) 5255 v3.AddArg3(dst, v4, mem) 5256 v1.AddArg3(dst, v2, v3) 5257 v.AddArg3(dst, v0, v1) 5258 return true 5259 } 5260 // match: (Move [16] {t} dst src mem) 5261 // cond: t.Alignment()%4 == 0 5262 // result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))) 5263 for { 5264 if auxIntToInt64(v.AuxInt) != 16 { 5265 break 5266 } 5267 t := auxToType(v.Aux) 5268 dst := v_0 5269 src := v_1 5270 mem := v_2 5271 if !(t.Alignment()%4 == 0) { 5272 break 5273 } 5274 v.reset(OpMIPSMOVWstore) 5275 v.AuxInt = int32ToAuxInt(12) 5276 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5277 v0.AuxInt = int32ToAuxInt(12) 5278 v0.AddArg2(src, mem) 5279 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 5280 v1.AuxInt = int32ToAuxInt(8) 5281 v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5282 v2.AuxInt = int32ToAuxInt(8) 5283 v2.AddArg2(src, mem) 5284 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 5285 v3.AuxInt = int32ToAuxInt(4) 5286 v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5287 v4.AuxInt = int32ToAuxInt(4) 5288 v4.AddArg2(src, mem) 5289 v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 5290 v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 5291 v6.AddArg2(src, mem) 5292 v5.AddArg3(dst, v6, mem) 5293 v3.AddArg3(dst, v4, v5) 5294 v1.AddArg3(dst, v2, v3) 5295 v.AddArg3(dst, v0, v1) 5296 return true 5297 } 5298 // match: (Move [s] {t} dst src mem) 5299 // cond: (s > 16 && logLargeCopy(v, s) || t.Alignment()%4 != 0) 5300 // result: (LoweredMove [int32(t.Alignment())] dst src (ADDconst <src.Type> src [int32(s-moveSize(t.Alignment(), config))]) mem) 5301 for { 5302 s := auxIntToInt64(v.AuxInt) 5303 t := auxToType(v.Aux) 5304 dst := v_0 5305 src := v_1 5306 mem := v_2 5307 if !(s > 16 && logLargeCopy(v, s) || t.Alignment()%4 != 0) { 5308 break 5309 } 5310 v.reset(OpMIPSLoweredMove) 5311 v.AuxInt = int32ToAuxInt(int32(t.Alignment())) 5312 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type) 5313 v0.AuxInt = int32ToAuxInt(int32(s - moveSize(t.Alignment(), config))) 5314 v0.AddArg(src) 5315 v.AddArg4(dst, src, v0, mem) 5316 return true 5317 } 5318 return false 5319 } 5320 func rewriteValueMIPS_OpNeq16(v *Value) bool { 5321 v_1 := v.Args[1] 5322 v_0 := v.Args[0] 5323 b := v.Block 5324 typ := &b.Func.Config.Types 5325 // match: (Neq16 x y) 5326 // result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0])) 5327 for { 5328 x := v_0 5329 y := v_1 5330 v.reset(OpMIPSSGTU) 5331 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 5332 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5333 v1.AddArg(x) 5334 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5335 v2.AddArg(y) 5336 v0.AddArg2(v1, v2) 5337 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5338 v3.AuxInt = int32ToAuxInt(0) 5339 v.AddArg2(v0, v3) 5340 return true 5341 } 5342 } 5343 func rewriteValueMIPS_OpNeq32(v *Value) bool { 5344 v_1 := v.Args[1] 5345 v_0 := v.Args[0] 5346 b := v.Block 5347 typ := &b.Func.Config.Types 5348 // match: (Neq32 x y) 5349 // result: (SGTU (XOR x y) (MOVWconst [0])) 5350 for { 5351 x := v_0 5352 y := v_1 5353 v.reset(OpMIPSSGTU) 5354 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 5355 v0.AddArg2(x, y) 5356 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5357 v1.AuxInt = int32ToAuxInt(0) 5358 v.AddArg2(v0, v1) 5359 return true 5360 } 5361 } 5362 func rewriteValueMIPS_OpNeq32F(v *Value) bool { 5363 v_1 := v.Args[1] 5364 v_0 := v.Args[0] 5365 b := v.Block 5366 // match: (Neq32F x y) 5367 // result: (FPFlagFalse (CMPEQF x y)) 5368 for { 5369 x := v_0 5370 y := v_1 5371 v.reset(OpMIPSFPFlagFalse) 5372 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags) 5373 v0.AddArg2(x, y) 5374 v.AddArg(v0) 5375 return true 5376 } 5377 } 5378 func rewriteValueMIPS_OpNeq64F(v *Value) bool { 5379 v_1 := v.Args[1] 5380 v_0 := v.Args[0] 5381 b := v.Block 5382 // match: (Neq64F x y) 5383 // result: (FPFlagFalse (CMPEQD x y)) 5384 for { 5385 x := v_0 5386 y := v_1 5387 v.reset(OpMIPSFPFlagFalse) 5388 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags) 5389 v0.AddArg2(x, y) 5390 v.AddArg(v0) 5391 return true 5392 } 5393 } 5394 func rewriteValueMIPS_OpNeq8(v *Value) bool { 5395 v_1 := v.Args[1] 5396 v_0 := v.Args[0] 5397 b := v.Block 5398 typ := &b.Func.Config.Types 5399 // match: (Neq8 x y) 5400 // result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0])) 5401 for { 5402 x := v_0 5403 y := v_1 5404 v.reset(OpMIPSSGTU) 5405 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 5406 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 5407 v1.AddArg(x) 5408 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 5409 v2.AddArg(y) 5410 v0.AddArg2(v1, v2) 5411 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5412 v3.AuxInt = int32ToAuxInt(0) 5413 v.AddArg2(v0, v3) 5414 return true 5415 } 5416 } 5417 func rewriteValueMIPS_OpNeqPtr(v *Value) bool { 5418 v_1 := v.Args[1] 5419 v_0 := v.Args[0] 5420 b := v.Block 5421 typ := &b.Func.Config.Types 5422 // match: (NeqPtr x y) 5423 // result: (SGTU (XOR x y) (MOVWconst [0])) 5424 for { 5425 x := v_0 5426 y := v_1 5427 v.reset(OpMIPSSGTU) 5428 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 5429 v0.AddArg2(x, y) 5430 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5431 v1.AuxInt = int32ToAuxInt(0) 5432 v.AddArg2(v0, v1) 5433 return true 5434 } 5435 } 5436 func rewriteValueMIPS_OpNot(v *Value) bool { 5437 v_0 := v.Args[0] 5438 // match: (Not x) 5439 // result: (XORconst [1] x) 5440 for { 5441 x := v_0 5442 v.reset(OpMIPSXORconst) 5443 v.AuxInt = int32ToAuxInt(1) 5444 v.AddArg(x) 5445 return true 5446 } 5447 } 5448 func rewriteValueMIPS_OpOffPtr(v *Value) bool { 5449 v_0 := v.Args[0] 5450 // match: (OffPtr [off] ptr:(SP)) 5451 // result: (MOVWaddr [int32(off)] ptr) 5452 for { 5453 off := auxIntToInt64(v.AuxInt) 5454 ptr := v_0 5455 if ptr.Op != OpSP { 5456 break 5457 } 5458 v.reset(OpMIPSMOVWaddr) 5459 v.AuxInt = int32ToAuxInt(int32(off)) 5460 v.AddArg(ptr) 5461 return true 5462 } 5463 // match: (OffPtr [off] ptr) 5464 // result: (ADDconst [int32(off)] ptr) 5465 for { 5466 off := auxIntToInt64(v.AuxInt) 5467 ptr := v_0 5468 v.reset(OpMIPSADDconst) 5469 v.AuxInt = int32ToAuxInt(int32(off)) 5470 v.AddArg(ptr) 5471 return true 5472 } 5473 } 5474 func rewriteValueMIPS_OpPanicBounds(v *Value) bool { 5475 v_2 := v.Args[2] 5476 v_1 := v.Args[1] 5477 v_0 := v.Args[0] 5478 // match: (PanicBounds [kind] x y mem) 5479 // cond: boundsABI(kind) == 0 5480 // result: (LoweredPanicBoundsA [kind] x y mem) 5481 for { 5482 kind := auxIntToInt64(v.AuxInt) 5483 x := v_0 5484 y := v_1 5485 mem := v_2 5486 if !(boundsABI(kind) == 0) { 5487 break 5488 } 5489 v.reset(OpMIPSLoweredPanicBoundsA) 5490 v.AuxInt = int64ToAuxInt(kind) 5491 v.AddArg3(x, y, mem) 5492 return true 5493 } 5494 // match: (PanicBounds [kind] x y mem) 5495 // cond: boundsABI(kind) == 1 5496 // result: (LoweredPanicBoundsB [kind] x y mem) 5497 for { 5498 kind := auxIntToInt64(v.AuxInt) 5499 x := v_0 5500 y := v_1 5501 mem := v_2 5502 if !(boundsABI(kind) == 1) { 5503 break 5504 } 5505 v.reset(OpMIPSLoweredPanicBoundsB) 5506 v.AuxInt = int64ToAuxInt(kind) 5507 v.AddArg3(x, y, mem) 5508 return true 5509 } 5510 // match: (PanicBounds [kind] x y mem) 5511 // cond: boundsABI(kind) == 2 5512 // result: (LoweredPanicBoundsC [kind] x y mem) 5513 for { 5514 kind := auxIntToInt64(v.AuxInt) 5515 x := v_0 5516 y := v_1 5517 mem := v_2 5518 if !(boundsABI(kind) == 2) { 5519 break 5520 } 5521 v.reset(OpMIPSLoweredPanicBoundsC) 5522 v.AuxInt = int64ToAuxInt(kind) 5523 v.AddArg3(x, y, mem) 5524 return true 5525 } 5526 return false 5527 } 5528 func rewriteValueMIPS_OpPanicExtend(v *Value) bool { 5529 v_3 := v.Args[3] 5530 v_2 := v.Args[2] 5531 v_1 := v.Args[1] 5532 v_0 := v.Args[0] 5533 // match: (PanicExtend [kind] hi lo y mem) 5534 // cond: boundsABI(kind) == 0 5535 // result: (LoweredPanicExtendA [kind] hi lo y mem) 5536 for { 5537 kind := auxIntToInt64(v.AuxInt) 5538 hi := v_0 5539 lo := v_1 5540 y := v_2 5541 mem := v_3 5542 if !(boundsABI(kind) == 0) { 5543 break 5544 } 5545 v.reset(OpMIPSLoweredPanicExtendA) 5546 v.AuxInt = int64ToAuxInt(kind) 5547 v.AddArg4(hi, lo, y, mem) 5548 return true 5549 } 5550 // match: (PanicExtend [kind] hi lo y mem) 5551 // cond: boundsABI(kind) == 1 5552 // result: (LoweredPanicExtendB [kind] hi lo y mem) 5553 for { 5554 kind := auxIntToInt64(v.AuxInt) 5555 hi := v_0 5556 lo := v_1 5557 y := v_2 5558 mem := v_3 5559 if !(boundsABI(kind) == 1) { 5560 break 5561 } 5562 v.reset(OpMIPSLoweredPanicExtendB) 5563 v.AuxInt = int64ToAuxInt(kind) 5564 v.AddArg4(hi, lo, y, mem) 5565 return true 5566 } 5567 // match: (PanicExtend [kind] hi lo y mem) 5568 // cond: boundsABI(kind) == 2 5569 // result: (LoweredPanicExtendC [kind] hi lo y mem) 5570 for { 5571 kind := auxIntToInt64(v.AuxInt) 5572 hi := v_0 5573 lo := v_1 5574 y := v_2 5575 mem := v_3 5576 if !(boundsABI(kind) == 2) { 5577 break 5578 } 5579 v.reset(OpMIPSLoweredPanicExtendC) 5580 v.AuxInt = int64ToAuxInt(kind) 5581 v.AddArg4(hi, lo, y, mem) 5582 return true 5583 } 5584 return false 5585 } 5586 func rewriteValueMIPS_OpRotateLeft16(v *Value) bool { 5587 v_1 := v.Args[1] 5588 v_0 := v.Args[0] 5589 b := v.Block 5590 typ := &b.Func.Config.Types 5591 // match: (RotateLeft16 <t> x (MOVWconst [c])) 5592 // result: (Or16 (Lsh16x32 <t> x (MOVWconst [c&15])) (Rsh16Ux32 <t> x (MOVWconst [-c&15]))) 5593 for { 5594 t := v.Type 5595 x := v_0 5596 if v_1.Op != OpMIPSMOVWconst { 5597 break 5598 } 5599 c := auxIntToInt32(v_1.AuxInt) 5600 v.reset(OpOr16) 5601 v0 := b.NewValue0(v.Pos, OpLsh16x32, t) 5602 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5603 v1.AuxInt = int32ToAuxInt(c & 15) 5604 v0.AddArg2(x, v1) 5605 v2 := b.NewValue0(v.Pos, OpRsh16Ux32, t) 5606 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5607 v3.AuxInt = int32ToAuxInt(-c & 15) 5608 v2.AddArg2(x, v3) 5609 v.AddArg2(v0, v2) 5610 return true 5611 } 5612 return false 5613 } 5614 func rewriteValueMIPS_OpRotateLeft32(v *Value) bool { 5615 v_1 := v.Args[1] 5616 v_0 := v.Args[0] 5617 b := v.Block 5618 typ := &b.Func.Config.Types 5619 // match: (RotateLeft32 <t> x (MOVWconst [c])) 5620 // result: (Or32 (Lsh32x32 <t> x (MOVWconst [c&31])) (Rsh32Ux32 <t> x (MOVWconst [-c&31]))) 5621 for { 5622 t := v.Type 5623 x := v_0 5624 if v_1.Op != OpMIPSMOVWconst { 5625 break 5626 } 5627 c := auxIntToInt32(v_1.AuxInt) 5628 v.reset(OpOr32) 5629 v0 := b.NewValue0(v.Pos, OpLsh32x32, t) 5630 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5631 v1.AuxInt = int32ToAuxInt(c & 31) 5632 v0.AddArg2(x, v1) 5633 v2 := b.NewValue0(v.Pos, OpRsh32Ux32, t) 5634 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5635 v3.AuxInt = int32ToAuxInt(-c & 31) 5636 v2.AddArg2(x, v3) 5637 v.AddArg2(v0, v2) 5638 return true 5639 } 5640 return false 5641 } 5642 func rewriteValueMIPS_OpRotateLeft64(v *Value) bool { 5643 v_1 := v.Args[1] 5644 v_0 := v.Args[0] 5645 b := v.Block 5646 typ := &b.Func.Config.Types 5647 // match: (RotateLeft64 <t> x (MOVWconst [c])) 5648 // result: (Or64 (Lsh64x32 <t> x (MOVWconst [c&63])) (Rsh64Ux32 <t> x (MOVWconst [-c&63]))) 5649 for { 5650 t := v.Type 5651 x := v_0 5652 if v_1.Op != OpMIPSMOVWconst { 5653 break 5654 } 5655 c := auxIntToInt32(v_1.AuxInt) 5656 v.reset(OpOr64) 5657 v0 := b.NewValue0(v.Pos, OpLsh64x32, t) 5658 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5659 v1.AuxInt = int32ToAuxInt(c & 63) 5660 v0.AddArg2(x, v1) 5661 v2 := b.NewValue0(v.Pos, OpRsh64Ux32, t) 5662 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5663 v3.AuxInt = int32ToAuxInt(-c & 63) 5664 v2.AddArg2(x, v3) 5665 v.AddArg2(v0, v2) 5666 return true 5667 } 5668 return false 5669 } 5670 func rewriteValueMIPS_OpRotateLeft8(v *Value) bool { 5671 v_1 := v.Args[1] 5672 v_0 := v.Args[0] 5673 b := v.Block 5674 typ := &b.Func.Config.Types 5675 // match: (RotateLeft8 <t> x (MOVWconst [c])) 5676 // result: (Or8 (Lsh8x32 <t> x (MOVWconst [c&7])) (Rsh8Ux32 <t> x (MOVWconst [-c&7]))) 5677 for { 5678 t := v.Type 5679 x := v_0 5680 if v_1.Op != OpMIPSMOVWconst { 5681 break 5682 } 5683 c := auxIntToInt32(v_1.AuxInt) 5684 v.reset(OpOr8) 5685 v0 := b.NewValue0(v.Pos, OpLsh8x32, t) 5686 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5687 v1.AuxInt = int32ToAuxInt(c & 7) 5688 v0.AddArg2(x, v1) 5689 v2 := b.NewValue0(v.Pos, OpRsh8Ux32, t) 5690 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5691 v3.AuxInt = int32ToAuxInt(-c & 7) 5692 v2.AddArg2(x, v3) 5693 v.AddArg2(v0, v2) 5694 return true 5695 } 5696 return false 5697 } 5698 func rewriteValueMIPS_OpRsh16Ux16(v *Value) bool { 5699 v_1 := v.Args[1] 5700 v_0 := v.Args[0] 5701 b := v.Block 5702 typ := &b.Func.Config.Types 5703 // match: (Rsh16Ux16 <t> x y) 5704 // result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 5705 for { 5706 t := v.Type 5707 x := v_0 5708 y := v_1 5709 v.reset(OpMIPSCMOVZ) 5710 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 5711 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5712 v1.AddArg(x) 5713 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5714 v2.AddArg(y) 5715 v0.AddArg2(v1, v2) 5716 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5717 v3.AuxInt = int32ToAuxInt(0) 5718 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5719 v4.AuxInt = int32ToAuxInt(32) 5720 v4.AddArg(v2) 5721 v.AddArg3(v0, v3, v4) 5722 return true 5723 } 5724 } 5725 func rewriteValueMIPS_OpRsh16Ux32(v *Value) bool { 5726 v_1 := v.Args[1] 5727 v_0 := v.Args[0] 5728 b := v.Block 5729 typ := &b.Func.Config.Types 5730 // match: (Rsh16Ux32 <t> x y) 5731 // result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y)) 5732 for { 5733 t := v.Type 5734 x := v_0 5735 y := v_1 5736 v.reset(OpMIPSCMOVZ) 5737 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 5738 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5739 v1.AddArg(x) 5740 v0.AddArg2(v1, y) 5741 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5742 v2.AuxInt = int32ToAuxInt(0) 5743 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5744 v3.AuxInt = int32ToAuxInt(32) 5745 v3.AddArg(y) 5746 v.AddArg3(v0, v2, v3) 5747 return true 5748 } 5749 } 5750 func rewriteValueMIPS_OpRsh16Ux64(v *Value) bool { 5751 v_1 := v.Args[1] 5752 v_0 := v.Args[0] 5753 b := v.Block 5754 typ := &b.Func.Config.Types 5755 // match: (Rsh16Ux64 x (Const64 [c])) 5756 // cond: uint32(c) < 16 5757 // result: (SRLconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)]) 5758 for { 5759 x := v_0 5760 if v_1.Op != OpConst64 { 5761 break 5762 } 5763 c := auxIntToInt64(v_1.AuxInt) 5764 if !(uint32(c) < 16) { 5765 break 5766 } 5767 v.reset(OpMIPSSRLconst) 5768 v.AuxInt = int32ToAuxInt(int32(c + 16)) 5769 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 5770 v0.AuxInt = int32ToAuxInt(16) 5771 v0.AddArg(x) 5772 v.AddArg(v0) 5773 return true 5774 } 5775 // match: (Rsh16Ux64 _ (Const64 [c])) 5776 // cond: uint32(c) >= 16 5777 // result: (MOVWconst [0]) 5778 for { 5779 if v_1.Op != OpConst64 { 5780 break 5781 } 5782 c := auxIntToInt64(v_1.AuxInt) 5783 if !(uint32(c) >= 16) { 5784 break 5785 } 5786 v.reset(OpMIPSMOVWconst) 5787 v.AuxInt = int32ToAuxInt(0) 5788 return true 5789 } 5790 return false 5791 } 5792 func rewriteValueMIPS_OpRsh16Ux8(v *Value) bool { 5793 v_1 := v.Args[1] 5794 v_0 := v.Args[0] 5795 b := v.Block 5796 typ := &b.Func.Config.Types 5797 // match: (Rsh16Ux8 <t> x y) 5798 // result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 5799 for { 5800 t := v.Type 5801 x := v_0 5802 y := v_1 5803 v.reset(OpMIPSCMOVZ) 5804 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 5805 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5806 v1.AddArg(x) 5807 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 5808 v2.AddArg(y) 5809 v0.AddArg2(v1, v2) 5810 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5811 v3.AuxInt = int32ToAuxInt(0) 5812 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5813 v4.AuxInt = int32ToAuxInt(32) 5814 v4.AddArg(v2) 5815 v.AddArg3(v0, v3, v4) 5816 return true 5817 } 5818 } 5819 func rewriteValueMIPS_OpRsh16x16(v *Value) bool { 5820 v_1 := v.Args[1] 5821 v_0 := v.Args[0] 5822 b := v.Block 5823 typ := &b.Func.Config.Types 5824 // match: (Rsh16x16 x y) 5825 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [31]) (SGTUconst [32] (ZeroExt16to32 y)))) 5826 for { 5827 x := v_0 5828 y := v_1 5829 v.reset(OpMIPSSRA) 5830 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 5831 v0.AddArg(x) 5832 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 5833 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5834 v2.AddArg(y) 5835 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5836 v3.AuxInt = int32ToAuxInt(31) 5837 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5838 v4.AuxInt = int32ToAuxInt(32) 5839 v4.AddArg(v2) 5840 v1.AddArg3(v2, v3, v4) 5841 v.AddArg2(v0, v1) 5842 return true 5843 } 5844 } 5845 func rewriteValueMIPS_OpRsh16x32(v *Value) bool { 5846 v_1 := v.Args[1] 5847 v_0 := v.Args[0] 5848 b := v.Block 5849 typ := &b.Func.Config.Types 5850 // match: (Rsh16x32 x y) 5851 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [31]) (SGTUconst [32] y))) 5852 for { 5853 x := v_0 5854 y := v_1 5855 v.reset(OpMIPSSRA) 5856 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 5857 v0.AddArg(x) 5858 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 5859 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5860 v2.AuxInt = int32ToAuxInt(31) 5861 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5862 v3.AuxInt = int32ToAuxInt(32) 5863 v3.AddArg(y) 5864 v1.AddArg3(y, v2, v3) 5865 v.AddArg2(v0, v1) 5866 return true 5867 } 5868 } 5869 func rewriteValueMIPS_OpRsh16x64(v *Value) bool { 5870 v_1 := v.Args[1] 5871 v_0 := v.Args[0] 5872 b := v.Block 5873 typ := &b.Func.Config.Types 5874 // match: (Rsh16x64 x (Const64 [c])) 5875 // cond: uint32(c) < 16 5876 // result: (SRAconst (SLLconst <typ.UInt32> x [16]) [int32(c+16)]) 5877 for { 5878 x := v_0 5879 if v_1.Op != OpConst64 { 5880 break 5881 } 5882 c := auxIntToInt64(v_1.AuxInt) 5883 if !(uint32(c) < 16) { 5884 break 5885 } 5886 v.reset(OpMIPSSRAconst) 5887 v.AuxInt = int32ToAuxInt(int32(c + 16)) 5888 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 5889 v0.AuxInt = int32ToAuxInt(16) 5890 v0.AddArg(x) 5891 v.AddArg(v0) 5892 return true 5893 } 5894 // match: (Rsh16x64 x (Const64 [c])) 5895 // cond: uint32(c) >= 16 5896 // result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31]) 5897 for { 5898 x := v_0 5899 if v_1.Op != OpConst64 { 5900 break 5901 } 5902 c := auxIntToInt64(v_1.AuxInt) 5903 if !(uint32(c) >= 16) { 5904 break 5905 } 5906 v.reset(OpMIPSSRAconst) 5907 v.AuxInt = int32ToAuxInt(31) 5908 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 5909 v0.AuxInt = int32ToAuxInt(16) 5910 v0.AddArg(x) 5911 v.AddArg(v0) 5912 return true 5913 } 5914 return false 5915 } 5916 func rewriteValueMIPS_OpRsh16x8(v *Value) bool { 5917 v_1 := v.Args[1] 5918 v_0 := v.Args[0] 5919 b := v.Block 5920 typ := &b.Func.Config.Types 5921 // match: (Rsh16x8 x y) 5922 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [31]) (SGTUconst [32] (ZeroExt8to32 y)))) 5923 for { 5924 x := v_0 5925 y := v_1 5926 v.reset(OpMIPSSRA) 5927 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 5928 v0.AddArg(x) 5929 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 5930 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 5931 v2.AddArg(y) 5932 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5933 v3.AuxInt = int32ToAuxInt(31) 5934 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5935 v4.AuxInt = int32ToAuxInt(32) 5936 v4.AddArg(v2) 5937 v1.AddArg3(v2, v3, v4) 5938 v.AddArg2(v0, v1) 5939 return true 5940 } 5941 } 5942 func rewriteValueMIPS_OpRsh32Ux16(v *Value) bool { 5943 v_1 := v.Args[1] 5944 v_0 := v.Args[0] 5945 b := v.Block 5946 typ := &b.Func.Config.Types 5947 // match: (Rsh32Ux16 <t> x y) 5948 // result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 5949 for { 5950 t := v.Type 5951 x := v_0 5952 y := v_1 5953 v.reset(OpMIPSCMOVZ) 5954 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 5955 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5956 v1.AddArg(y) 5957 v0.AddArg2(x, v1) 5958 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5959 v2.AuxInt = int32ToAuxInt(0) 5960 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5961 v3.AuxInt = int32ToAuxInt(32) 5962 v3.AddArg(v1) 5963 v.AddArg3(v0, v2, v3) 5964 return true 5965 } 5966 } 5967 func rewriteValueMIPS_OpRsh32Ux32(v *Value) bool { 5968 v_1 := v.Args[1] 5969 v_0 := v.Args[0] 5970 b := v.Block 5971 typ := &b.Func.Config.Types 5972 // match: (Rsh32Ux32 <t> x y) 5973 // result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 5974 for { 5975 t := v.Type 5976 x := v_0 5977 y := v_1 5978 v.reset(OpMIPSCMOVZ) 5979 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 5980 v0.AddArg2(x, y) 5981 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 5982 v1.AuxInt = int32ToAuxInt(0) 5983 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 5984 v2.AuxInt = int32ToAuxInt(32) 5985 v2.AddArg(y) 5986 v.AddArg3(v0, v1, v2) 5987 return true 5988 } 5989 } 5990 func rewriteValueMIPS_OpRsh32Ux64(v *Value) bool { 5991 v_1 := v.Args[1] 5992 v_0 := v.Args[0] 5993 // match: (Rsh32Ux64 x (Const64 [c])) 5994 // cond: uint32(c) < 32 5995 // result: (SRLconst x [int32(c)]) 5996 for { 5997 x := v_0 5998 if v_1.Op != OpConst64 { 5999 break 6000 } 6001 c := auxIntToInt64(v_1.AuxInt) 6002 if !(uint32(c) < 32) { 6003 break 6004 } 6005 v.reset(OpMIPSSRLconst) 6006 v.AuxInt = int32ToAuxInt(int32(c)) 6007 v.AddArg(x) 6008 return true 6009 } 6010 // match: (Rsh32Ux64 _ (Const64 [c])) 6011 // cond: uint32(c) >= 32 6012 // result: (MOVWconst [0]) 6013 for { 6014 if v_1.Op != OpConst64 { 6015 break 6016 } 6017 c := auxIntToInt64(v_1.AuxInt) 6018 if !(uint32(c) >= 32) { 6019 break 6020 } 6021 v.reset(OpMIPSMOVWconst) 6022 v.AuxInt = int32ToAuxInt(0) 6023 return true 6024 } 6025 return false 6026 } 6027 func rewriteValueMIPS_OpRsh32Ux8(v *Value) bool { 6028 v_1 := v.Args[1] 6029 v_0 := v.Args[0] 6030 b := v.Block 6031 typ := &b.Func.Config.Types 6032 // match: (Rsh32Ux8 <t> x y) 6033 // result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 6034 for { 6035 t := v.Type 6036 x := v_0 6037 y := v_1 6038 v.reset(OpMIPSCMOVZ) 6039 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 6040 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6041 v1.AddArg(y) 6042 v0.AddArg2(x, v1) 6043 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6044 v2.AuxInt = int32ToAuxInt(0) 6045 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6046 v3.AuxInt = int32ToAuxInt(32) 6047 v3.AddArg(v1) 6048 v.AddArg3(v0, v2, v3) 6049 return true 6050 } 6051 } 6052 func rewriteValueMIPS_OpRsh32x16(v *Value) bool { 6053 v_1 := v.Args[1] 6054 v_0 := v.Args[0] 6055 b := v.Block 6056 typ := &b.Func.Config.Types 6057 // match: (Rsh32x16 x y) 6058 // result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [31]) (SGTUconst [32] (ZeroExt16to32 y)))) 6059 for { 6060 x := v_0 6061 y := v_1 6062 v.reset(OpMIPSSRA) 6063 v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 6064 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6065 v1.AddArg(y) 6066 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6067 v2.AuxInt = int32ToAuxInt(31) 6068 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6069 v3.AuxInt = int32ToAuxInt(32) 6070 v3.AddArg(v1) 6071 v0.AddArg3(v1, v2, v3) 6072 v.AddArg2(x, v0) 6073 return true 6074 } 6075 } 6076 func rewriteValueMIPS_OpRsh32x32(v *Value) bool { 6077 v_1 := v.Args[1] 6078 v_0 := v.Args[0] 6079 b := v.Block 6080 typ := &b.Func.Config.Types 6081 // match: (Rsh32x32 x y) 6082 // result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [31]) (SGTUconst [32] y))) 6083 for { 6084 x := v_0 6085 y := v_1 6086 v.reset(OpMIPSSRA) 6087 v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 6088 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6089 v1.AuxInt = int32ToAuxInt(31) 6090 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6091 v2.AuxInt = int32ToAuxInt(32) 6092 v2.AddArg(y) 6093 v0.AddArg3(y, v1, v2) 6094 v.AddArg2(x, v0) 6095 return true 6096 } 6097 } 6098 func rewriteValueMIPS_OpRsh32x64(v *Value) bool { 6099 v_1 := v.Args[1] 6100 v_0 := v.Args[0] 6101 // match: (Rsh32x64 x (Const64 [c])) 6102 // cond: uint32(c) < 32 6103 // result: (SRAconst x [int32(c)]) 6104 for { 6105 x := v_0 6106 if v_1.Op != OpConst64 { 6107 break 6108 } 6109 c := auxIntToInt64(v_1.AuxInt) 6110 if !(uint32(c) < 32) { 6111 break 6112 } 6113 v.reset(OpMIPSSRAconst) 6114 v.AuxInt = int32ToAuxInt(int32(c)) 6115 v.AddArg(x) 6116 return true 6117 } 6118 // match: (Rsh32x64 x (Const64 [c])) 6119 // cond: uint32(c) >= 32 6120 // result: (SRAconst x [31]) 6121 for { 6122 x := v_0 6123 if v_1.Op != OpConst64 { 6124 break 6125 } 6126 c := auxIntToInt64(v_1.AuxInt) 6127 if !(uint32(c) >= 32) { 6128 break 6129 } 6130 v.reset(OpMIPSSRAconst) 6131 v.AuxInt = int32ToAuxInt(31) 6132 v.AddArg(x) 6133 return true 6134 } 6135 return false 6136 } 6137 func rewriteValueMIPS_OpRsh32x8(v *Value) bool { 6138 v_1 := v.Args[1] 6139 v_0 := v.Args[0] 6140 b := v.Block 6141 typ := &b.Func.Config.Types 6142 // match: (Rsh32x8 x y) 6143 // result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [31]) (SGTUconst [32] (ZeroExt8to32 y)))) 6144 for { 6145 x := v_0 6146 y := v_1 6147 v.reset(OpMIPSSRA) 6148 v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 6149 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6150 v1.AddArg(y) 6151 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6152 v2.AuxInt = int32ToAuxInt(31) 6153 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6154 v3.AuxInt = int32ToAuxInt(32) 6155 v3.AddArg(v1) 6156 v0.AddArg3(v1, v2, v3) 6157 v.AddArg2(x, v0) 6158 return true 6159 } 6160 } 6161 func rewriteValueMIPS_OpRsh8Ux16(v *Value) bool { 6162 v_1 := v.Args[1] 6163 v_0 := v.Args[0] 6164 b := v.Block 6165 typ := &b.Func.Config.Types 6166 // match: (Rsh8Ux16 <t> x y) 6167 // result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 6168 for { 6169 t := v.Type 6170 x := v_0 6171 y := v_1 6172 v.reset(OpMIPSCMOVZ) 6173 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 6174 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6175 v1.AddArg(x) 6176 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6177 v2.AddArg(y) 6178 v0.AddArg2(v1, v2) 6179 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6180 v3.AuxInt = int32ToAuxInt(0) 6181 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6182 v4.AuxInt = int32ToAuxInt(32) 6183 v4.AddArg(v2) 6184 v.AddArg3(v0, v3, v4) 6185 return true 6186 } 6187 } 6188 func rewriteValueMIPS_OpRsh8Ux32(v *Value) bool { 6189 v_1 := v.Args[1] 6190 v_0 := v.Args[0] 6191 b := v.Block 6192 typ := &b.Func.Config.Types 6193 // match: (Rsh8Ux32 <t> x y) 6194 // result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y)) 6195 for { 6196 t := v.Type 6197 x := v_0 6198 y := v_1 6199 v.reset(OpMIPSCMOVZ) 6200 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 6201 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6202 v1.AddArg(x) 6203 v0.AddArg2(v1, y) 6204 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6205 v2.AuxInt = int32ToAuxInt(0) 6206 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6207 v3.AuxInt = int32ToAuxInt(32) 6208 v3.AddArg(y) 6209 v.AddArg3(v0, v2, v3) 6210 return true 6211 } 6212 } 6213 func rewriteValueMIPS_OpRsh8Ux64(v *Value) bool { 6214 v_1 := v.Args[1] 6215 v_0 := v.Args[0] 6216 b := v.Block 6217 typ := &b.Func.Config.Types 6218 // match: (Rsh8Ux64 x (Const64 [c])) 6219 // cond: uint32(c) < 8 6220 // result: (SRLconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)]) 6221 for { 6222 x := v_0 6223 if v_1.Op != OpConst64 { 6224 break 6225 } 6226 c := auxIntToInt64(v_1.AuxInt) 6227 if !(uint32(c) < 8) { 6228 break 6229 } 6230 v.reset(OpMIPSSRLconst) 6231 v.AuxInt = int32ToAuxInt(int32(c + 24)) 6232 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 6233 v0.AuxInt = int32ToAuxInt(24) 6234 v0.AddArg(x) 6235 v.AddArg(v0) 6236 return true 6237 } 6238 // match: (Rsh8Ux64 _ (Const64 [c])) 6239 // cond: uint32(c) >= 8 6240 // result: (MOVWconst [0]) 6241 for { 6242 if v_1.Op != OpConst64 { 6243 break 6244 } 6245 c := auxIntToInt64(v_1.AuxInt) 6246 if !(uint32(c) >= 8) { 6247 break 6248 } 6249 v.reset(OpMIPSMOVWconst) 6250 v.AuxInt = int32ToAuxInt(0) 6251 return true 6252 } 6253 return false 6254 } 6255 func rewriteValueMIPS_OpRsh8Ux8(v *Value) bool { 6256 v_1 := v.Args[1] 6257 v_0 := v.Args[0] 6258 b := v.Block 6259 typ := &b.Func.Config.Types 6260 // match: (Rsh8Ux8 <t> x y) 6261 // result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 6262 for { 6263 t := v.Type 6264 x := v_0 6265 y := v_1 6266 v.reset(OpMIPSCMOVZ) 6267 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 6268 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6269 v1.AddArg(x) 6270 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6271 v2.AddArg(y) 6272 v0.AddArg2(v1, v2) 6273 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6274 v3.AuxInt = int32ToAuxInt(0) 6275 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6276 v4.AuxInt = int32ToAuxInt(32) 6277 v4.AddArg(v2) 6278 v.AddArg3(v0, v3, v4) 6279 return true 6280 } 6281 } 6282 func rewriteValueMIPS_OpRsh8x16(v *Value) bool { 6283 v_1 := v.Args[1] 6284 v_0 := v.Args[0] 6285 b := v.Block 6286 typ := &b.Func.Config.Types 6287 // match: (Rsh8x16 x y) 6288 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [31]) (SGTUconst [32] (ZeroExt16to32 y)))) 6289 for { 6290 x := v_0 6291 y := v_1 6292 v.reset(OpMIPSSRA) 6293 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6294 v0.AddArg(x) 6295 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 6296 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6297 v2.AddArg(y) 6298 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6299 v3.AuxInt = int32ToAuxInt(31) 6300 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6301 v4.AuxInt = int32ToAuxInt(32) 6302 v4.AddArg(v2) 6303 v1.AddArg3(v2, v3, v4) 6304 v.AddArg2(v0, v1) 6305 return true 6306 } 6307 } 6308 func rewriteValueMIPS_OpRsh8x32(v *Value) bool { 6309 v_1 := v.Args[1] 6310 v_0 := v.Args[0] 6311 b := v.Block 6312 typ := &b.Func.Config.Types 6313 // match: (Rsh8x32 x y) 6314 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [31]) (SGTUconst [32] y))) 6315 for { 6316 x := v_0 6317 y := v_1 6318 v.reset(OpMIPSSRA) 6319 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6320 v0.AddArg(x) 6321 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 6322 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6323 v2.AuxInt = int32ToAuxInt(31) 6324 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6325 v3.AuxInt = int32ToAuxInt(32) 6326 v3.AddArg(y) 6327 v1.AddArg3(y, v2, v3) 6328 v.AddArg2(v0, v1) 6329 return true 6330 } 6331 } 6332 func rewriteValueMIPS_OpRsh8x64(v *Value) bool { 6333 v_1 := v.Args[1] 6334 v_0 := v.Args[0] 6335 b := v.Block 6336 typ := &b.Func.Config.Types 6337 // match: (Rsh8x64 x (Const64 [c])) 6338 // cond: uint32(c) < 8 6339 // result: (SRAconst (SLLconst <typ.UInt32> x [24]) [int32(c+24)]) 6340 for { 6341 x := v_0 6342 if v_1.Op != OpConst64 { 6343 break 6344 } 6345 c := auxIntToInt64(v_1.AuxInt) 6346 if !(uint32(c) < 8) { 6347 break 6348 } 6349 v.reset(OpMIPSSRAconst) 6350 v.AuxInt = int32ToAuxInt(int32(c + 24)) 6351 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 6352 v0.AuxInt = int32ToAuxInt(24) 6353 v0.AddArg(x) 6354 v.AddArg(v0) 6355 return true 6356 } 6357 // match: (Rsh8x64 x (Const64 [c])) 6358 // cond: uint32(c) >= 8 6359 // result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31]) 6360 for { 6361 x := v_0 6362 if v_1.Op != OpConst64 { 6363 break 6364 } 6365 c := auxIntToInt64(v_1.AuxInt) 6366 if !(uint32(c) >= 8) { 6367 break 6368 } 6369 v.reset(OpMIPSSRAconst) 6370 v.AuxInt = int32ToAuxInt(31) 6371 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 6372 v0.AuxInt = int32ToAuxInt(24) 6373 v0.AddArg(x) 6374 v.AddArg(v0) 6375 return true 6376 } 6377 return false 6378 } 6379 func rewriteValueMIPS_OpRsh8x8(v *Value) bool { 6380 v_1 := v.Args[1] 6381 v_0 := v.Args[0] 6382 b := v.Block 6383 typ := &b.Func.Config.Types 6384 // match: (Rsh8x8 x y) 6385 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [31]) (SGTUconst [32] (ZeroExt8to32 y)))) 6386 for { 6387 x := v_0 6388 y := v_1 6389 v.reset(OpMIPSSRA) 6390 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6391 v0.AddArg(x) 6392 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 6393 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6394 v2.AddArg(y) 6395 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6396 v3.AuxInt = int32ToAuxInt(31) 6397 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 6398 v4.AuxInt = int32ToAuxInt(32) 6399 v4.AddArg(v2) 6400 v1.AddArg3(v2, v3, v4) 6401 v.AddArg2(v0, v1) 6402 return true 6403 } 6404 } 6405 func rewriteValueMIPS_OpSelect0(v *Value) bool { 6406 v_0 := v.Args[0] 6407 b := v.Block 6408 typ := &b.Func.Config.Types 6409 // match: (Select0 (Add32carry <t> x y)) 6410 // result: (ADD <t.FieldType(0)> x y) 6411 for { 6412 if v_0.Op != OpAdd32carry { 6413 break 6414 } 6415 t := v_0.Type 6416 y := v_0.Args[1] 6417 x := v_0.Args[0] 6418 v.reset(OpMIPSADD) 6419 v.Type = t.FieldType(0) 6420 v.AddArg2(x, y) 6421 return true 6422 } 6423 // match: (Select0 (Sub32carry <t> x y)) 6424 // result: (SUB <t.FieldType(0)> x y) 6425 for { 6426 if v_0.Op != OpSub32carry { 6427 break 6428 } 6429 t := v_0.Type 6430 y := v_0.Args[1] 6431 x := v_0.Args[0] 6432 v.reset(OpMIPSSUB) 6433 v.Type = t.FieldType(0) 6434 v.AddArg2(x, y) 6435 return true 6436 } 6437 // match: (Select0 (MULTU (MOVWconst [0]) _ )) 6438 // result: (MOVWconst [0]) 6439 for { 6440 if v_0.Op != OpMIPSMULTU { 6441 break 6442 } 6443 v_0_0 := v_0.Args[0] 6444 v_0_1 := v_0.Args[1] 6445 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6446 if v_0_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0_0.AuxInt) != 0 { 6447 continue 6448 } 6449 v.reset(OpMIPSMOVWconst) 6450 v.AuxInt = int32ToAuxInt(0) 6451 return true 6452 } 6453 break 6454 } 6455 // match: (Select0 (MULTU (MOVWconst [1]) _ )) 6456 // result: (MOVWconst [0]) 6457 for { 6458 if v_0.Op != OpMIPSMULTU { 6459 break 6460 } 6461 v_0_0 := v_0.Args[0] 6462 v_0_1 := v_0.Args[1] 6463 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6464 if v_0_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0_0.AuxInt) != 1 { 6465 continue 6466 } 6467 v.reset(OpMIPSMOVWconst) 6468 v.AuxInt = int32ToAuxInt(0) 6469 return true 6470 } 6471 break 6472 } 6473 // match: (Select0 (MULTU (MOVWconst [-1]) x )) 6474 // result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x) 6475 for { 6476 if v_0.Op != OpMIPSMULTU { 6477 break 6478 } 6479 _ = v_0.Args[1] 6480 v_0_0 := v_0.Args[0] 6481 v_0_1 := v_0.Args[1] 6482 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6483 if v_0_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0_0.AuxInt) != -1 { 6484 continue 6485 } 6486 x := v_0_1 6487 v.reset(OpMIPSCMOVZ) 6488 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type) 6489 v0.AuxInt = int32ToAuxInt(-1) 6490 v0.AddArg(x) 6491 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6492 v1.AuxInt = int32ToAuxInt(0) 6493 v.AddArg3(v0, v1, x) 6494 return true 6495 } 6496 break 6497 } 6498 // match: (Select0 (MULTU (MOVWconst [c]) x )) 6499 // cond: isPowerOfTwo64(int64(uint32(c))) 6500 // result: (SRLconst [int32(32-log2uint32(int64(c)))] x) 6501 for { 6502 if v_0.Op != OpMIPSMULTU { 6503 break 6504 } 6505 _ = v_0.Args[1] 6506 v_0_0 := v_0.Args[0] 6507 v_0_1 := v_0.Args[1] 6508 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6509 if v_0_0.Op != OpMIPSMOVWconst { 6510 continue 6511 } 6512 c := auxIntToInt32(v_0_0.AuxInt) 6513 x := v_0_1 6514 if !(isPowerOfTwo64(int64(uint32(c)))) { 6515 continue 6516 } 6517 v.reset(OpMIPSSRLconst) 6518 v.AuxInt = int32ToAuxInt(int32(32 - log2uint32(int64(c)))) 6519 v.AddArg(x) 6520 return true 6521 } 6522 break 6523 } 6524 // match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d]))) 6525 // result: (MOVWconst [int32((int64(uint32(c))*int64(uint32(d)))>>32)]) 6526 for { 6527 if v_0.Op != OpMIPSMULTU { 6528 break 6529 } 6530 _ = v_0.Args[1] 6531 v_0_0 := v_0.Args[0] 6532 v_0_1 := v_0.Args[1] 6533 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6534 if v_0_0.Op != OpMIPSMOVWconst { 6535 continue 6536 } 6537 c := auxIntToInt32(v_0_0.AuxInt) 6538 if v_0_1.Op != OpMIPSMOVWconst { 6539 continue 6540 } 6541 d := auxIntToInt32(v_0_1.AuxInt) 6542 v.reset(OpMIPSMOVWconst) 6543 v.AuxInt = int32ToAuxInt(int32((int64(uint32(c)) * int64(uint32(d))) >> 32)) 6544 return true 6545 } 6546 break 6547 } 6548 // match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d]))) 6549 // cond: d != 0 6550 // result: (MOVWconst [c%d]) 6551 for { 6552 if v_0.Op != OpMIPSDIV { 6553 break 6554 } 6555 _ = v_0.Args[1] 6556 v_0_0 := v_0.Args[0] 6557 if v_0_0.Op != OpMIPSMOVWconst { 6558 break 6559 } 6560 c := auxIntToInt32(v_0_0.AuxInt) 6561 v_0_1 := v_0.Args[1] 6562 if v_0_1.Op != OpMIPSMOVWconst { 6563 break 6564 } 6565 d := auxIntToInt32(v_0_1.AuxInt) 6566 if !(d != 0) { 6567 break 6568 } 6569 v.reset(OpMIPSMOVWconst) 6570 v.AuxInt = int32ToAuxInt(c % d) 6571 return true 6572 } 6573 // match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d]))) 6574 // cond: d != 0 6575 // result: (MOVWconst [int32(uint32(c)%uint32(d))]) 6576 for { 6577 if v_0.Op != OpMIPSDIVU { 6578 break 6579 } 6580 _ = v_0.Args[1] 6581 v_0_0 := v_0.Args[0] 6582 if v_0_0.Op != OpMIPSMOVWconst { 6583 break 6584 } 6585 c := auxIntToInt32(v_0_0.AuxInt) 6586 v_0_1 := v_0.Args[1] 6587 if v_0_1.Op != OpMIPSMOVWconst { 6588 break 6589 } 6590 d := auxIntToInt32(v_0_1.AuxInt) 6591 if !(d != 0) { 6592 break 6593 } 6594 v.reset(OpMIPSMOVWconst) 6595 v.AuxInt = int32ToAuxInt(int32(uint32(c) % uint32(d))) 6596 return true 6597 } 6598 return false 6599 } 6600 func rewriteValueMIPS_OpSelect1(v *Value) bool { 6601 v_0 := v.Args[0] 6602 b := v.Block 6603 typ := &b.Func.Config.Types 6604 // match: (Select1 (Add32carry <t> x y)) 6605 // result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y)) 6606 for { 6607 if v_0.Op != OpAdd32carry { 6608 break 6609 } 6610 t := v_0.Type 6611 y := v_0.Args[1] 6612 x := v_0.Args[0] 6613 v.reset(OpMIPSSGTU) 6614 v.Type = typ.Bool 6615 v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0)) 6616 v0.AddArg2(x, y) 6617 v.AddArg2(x, v0) 6618 return true 6619 } 6620 // match: (Select1 (Sub32carry <t> x y)) 6621 // result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x) 6622 for { 6623 if v_0.Op != OpSub32carry { 6624 break 6625 } 6626 t := v_0.Type 6627 y := v_0.Args[1] 6628 x := v_0.Args[0] 6629 v.reset(OpMIPSSGTU) 6630 v.Type = typ.Bool 6631 v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0)) 6632 v0.AddArg2(x, y) 6633 v.AddArg2(v0, x) 6634 return true 6635 } 6636 // match: (Select1 (MULTU (MOVWconst [0]) _ )) 6637 // result: (MOVWconst [0]) 6638 for { 6639 if v_0.Op != OpMIPSMULTU { 6640 break 6641 } 6642 v_0_0 := v_0.Args[0] 6643 v_0_1 := v_0.Args[1] 6644 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6645 if v_0_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0_0.AuxInt) != 0 { 6646 continue 6647 } 6648 v.reset(OpMIPSMOVWconst) 6649 v.AuxInt = int32ToAuxInt(0) 6650 return true 6651 } 6652 break 6653 } 6654 // match: (Select1 (MULTU (MOVWconst [1]) x )) 6655 // result: x 6656 for { 6657 if v_0.Op != OpMIPSMULTU { 6658 break 6659 } 6660 _ = v_0.Args[1] 6661 v_0_0 := v_0.Args[0] 6662 v_0_1 := v_0.Args[1] 6663 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6664 if v_0_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0_0.AuxInt) != 1 { 6665 continue 6666 } 6667 x := v_0_1 6668 v.copyOf(x) 6669 return true 6670 } 6671 break 6672 } 6673 // match: (Select1 (MULTU (MOVWconst [-1]) x )) 6674 // result: (NEG <x.Type> x) 6675 for { 6676 if v_0.Op != OpMIPSMULTU { 6677 break 6678 } 6679 _ = v_0.Args[1] 6680 v_0_0 := v_0.Args[0] 6681 v_0_1 := v_0.Args[1] 6682 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6683 if v_0_0.Op != OpMIPSMOVWconst || auxIntToInt32(v_0_0.AuxInt) != -1 { 6684 continue 6685 } 6686 x := v_0_1 6687 v.reset(OpMIPSNEG) 6688 v.Type = x.Type 6689 v.AddArg(x) 6690 return true 6691 } 6692 break 6693 } 6694 // match: (Select1 (MULTU (MOVWconst [c]) x )) 6695 // cond: isPowerOfTwo64(int64(uint32(c))) 6696 // result: (SLLconst [int32(log2uint32(int64(c)))] x) 6697 for { 6698 if v_0.Op != OpMIPSMULTU { 6699 break 6700 } 6701 _ = v_0.Args[1] 6702 v_0_0 := v_0.Args[0] 6703 v_0_1 := v_0.Args[1] 6704 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6705 if v_0_0.Op != OpMIPSMOVWconst { 6706 continue 6707 } 6708 c := auxIntToInt32(v_0_0.AuxInt) 6709 x := v_0_1 6710 if !(isPowerOfTwo64(int64(uint32(c)))) { 6711 continue 6712 } 6713 v.reset(OpMIPSSLLconst) 6714 v.AuxInt = int32ToAuxInt(int32(log2uint32(int64(c)))) 6715 v.AddArg(x) 6716 return true 6717 } 6718 break 6719 } 6720 // match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d]))) 6721 // result: (MOVWconst [int32(uint32(c)*uint32(d))]) 6722 for { 6723 if v_0.Op != OpMIPSMULTU { 6724 break 6725 } 6726 _ = v_0.Args[1] 6727 v_0_0 := v_0.Args[0] 6728 v_0_1 := v_0.Args[1] 6729 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 6730 if v_0_0.Op != OpMIPSMOVWconst { 6731 continue 6732 } 6733 c := auxIntToInt32(v_0_0.AuxInt) 6734 if v_0_1.Op != OpMIPSMOVWconst { 6735 continue 6736 } 6737 d := auxIntToInt32(v_0_1.AuxInt) 6738 v.reset(OpMIPSMOVWconst) 6739 v.AuxInt = int32ToAuxInt(int32(uint32(c) * uint32(d))) 6740 return true 6741 } 6742 break 6743 } 6744 // match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d]))) 6745 // cond: d != 0 6746 // result: (MOVWconst [c/d]) 6747 for { 6748 if v_0.Op != OpMIPSDIV { 6749 break 6750 } 6751 _ = v_0.Args[1] 6752 v_0_0 := v_0.Args[0] 6753 if v_0_0.Op != OpMIPSMOVWconst { 6754 break 6755 } 6756 c := auxIntToInt32(v_0_0.AuxInt) 6757 v_0_1 := v_0.Args[1] 6758 if v_0_1.Op != OpMIPSMOVWconst { 6759 break 6760 } 6761 d := auxIntToInt32(v_0_1.AuxInt) 6762 if !(d != 0) { 6763 break 6764 } 6765 v.reset(OpMIPSMOVWconst) 6766 v.AuxInt = int32ToAuxInt(c / d) 6767 return true 6768 } 6769 // match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d]))) 6770 // cond: d != 0 6771 // result: (MOVWconst [int32(uint32(c)/uint32(d))]) 6772 for { 6773 if v_0.Op != OpMIPSDIVU { 6774 break 6775 } 6776 _ = v_0.Args[1] 6777 v_0_0 := v_0.Args[0] 6778 if v_0_0.Op != OpMIPSMOVWconst { 6779 break 6780 } 6781 c := auxIntToInt32(v_0_0.AuxInt) 6782 v_0_1 := v_0.Args[1] 6783 if v_0_1.Op != OpMIPSMOVWconst { 6784 break 6785 } 6786 d := auxIntToInt32(v_0_1.AuxInt) 6787 if !(d != 0) { 6788 break 6789 } 6790 v.reset(OpMIPSMOVWconst) 6791 v.AuxInt = int32ToAuxInt(int32(uint32(c) / uint32(d))) 6792 return true 6793 } 6794 return false 6795 } 6796 func rewriteValueMIPS_OpSignmask(v *Value) bool { 6797 v_0 := v.Args[0] 6798 // match: (Signmask x) 6799 // result: (SRAconst x [31]) 6800 for { 6801 x := v_0 6802 v.reset(OpMIPSSRAconst) 6803 v.AuxInt = int32ToAuxInt(31) 6804 v.AddArg(x) 6805 return true 6806 } 6807 } 6808 func rewriteValueMIPS_OpSlicemask(v *Value) bool { 6809 v_0 := v.Args[0] 6810 b := v.Block 6811 // match: (Slicemask <t> x) 6812 // result: (SRAconst (NEG <t> x) [31]) 6813 for { 6814 t := v.Type 6815 x := v_0 6816 v.reset(OpMIPSSRAconst) 6817 v.AuxInt = int32ToAuxInt(31) 6818 v0 := b.NewValue0(v.Pos, OpMIPSNEG, t) 6819 v0.AddArg(x) 6820 v.AddArg(v0) 6821 return true 6822 } 6823 } 6824 func rewriteValueMIPS_OpStore(v *Value) bool { 6825 v_2 := v.Args[2] 6826 v_1 := v.Args[1] 6827 v_0 := v.Args[0] 6828 // match: (Store {t} ptr val mem) 6829 // cond: t.Size() == 1 6830 // result: (MOVBstore ptr val mem) 6831 for { 6832 t := auxToType(v.Aux) 6833 ptr := v_0 6834 val := v_1 6835 mem := v_2 6836 if !(t.Size() == 1) { 6837 break 6838 } 6839 v.reset(OpMIPSMOVBstore) 6840 v.AddArg3(ptr, val, mem) 6841 return true 6842 } 6843 // match: (Store {t} ptr val mem) 6844 // cond: t.Size() == 2 6845 // result: (MOVHstore ptr val mem) 6846 for { 6847 t := auxToType(v.Aux) 6848 ptr := v_0 6849 val := v_1 6850 mem := v_2 6851 if !(t.Size() == 2) { 6852 break 6853 } 6854 v.reset(OpMIPSMOVHstore) 6855 v.AddArg3(ptr, val, mem) 6856 return true 6857 } 6858 // match: (Store {t} ptr val mem) 6859 // cond: t.Size() == 4 && !t.IsFloat() 6860 // result: (MOVWstore ptr val mem) 6861 for { 6862 t := auxToType(v.Aux) 6863 ptr := v_0 6864 val := v_1 6865 mem := v_2 6866 if !(t.Size() == 4 && !t.IsFloat()) { 6867 break 6868 } 6869 v.reset(OpMIPSMOVWstore) 6870 v.AddArg3(ptr, val, mem) 6871 return true 6872 } 6873 // match: (Store {t} ptr val mem) 6874 // cond: t.Size() == 4 && t.IsFloat() 6875 // result: (MOVFstore ptr val mem) 6876 for { 6877 t := auxToType(v.Aux) 6878 ptr := v_0 6879 val := v_1 6880 mem := v_2 6881 if !(t.Size() == 4 && t.IsFloat()) { 6882 break 6883 } 6884 v.reset(OpMIPSMOVFstore) 6885 v.AddArg3(ptr, val, mem) 6886 return true 6887 } 6888 // match: (Store {t} ptr val mem) 6889 // cond: t.Size() == 8 && t.IsFloat() 6890 // result: (MOVDstore ptr val mem) 6891 for { 6892 t := auxToType(v.Aux) 6893 ptr := v_0 6894 val := v_1 6895 mem := v_2 6896 if !(t.Size() == 8 && t.IsFloat()) { 6897 break 6898 } 6899 v.reset(OpMIPSMOVDstore) 6900 v.AddArg3(ptr, val, mem) 6901 return true 6902 } 6903 return false 6904 } 6905 func rewriteValueMIPS_OpSub32withcarry(v *Value) bool { 6906 v_2 := v.Args[2] 6907 v_1 := v.Args[1] 6908 v_0 := v.Args[0] 6909 b := v.Block 6910 // match: (Sub32withcarry <t> x y c) 6911 // result: (SUB (SUB <t> x y) c) 6912 for { 6913 t := v.Type 6914 x := v_0 6915 y := v_1 6916 c := v_2 6917 v.reset(OpMIPSSUB) 6918 v0 := b.NewValue0(v.Pos, OpMIPSSUB, t) 6919 v0.AddArg2(x, y) 6920 v.AddArg2(v0, c) 6921 return true 6922 } 6923 } 6924 func rewriteValueMIPS_OpZero(v *Value) bool { 6925 v_1 := v.Args[1] 6926 v_0 := v.Args[0] 6927 b := v.Block 6928 config := b.Func.Config 6929 typ := &b.Func.Config.Types 6930 // match: (Zero [0] _ mem) 6931 // result: mem 6932 for { 6933 if auxIntToInt64(v.AuxInt) != 0 { 6934 break 6935 } 6936 mem := v_1 6937 v.copyOf(mem) 6938 return true 6939 } 6940 // match: (Zero [1] ptr mem) 6941 // result: (MOVBstore ptr (MOVWconst [0]) mem) 6942 for { 6943 if auxIntToInt64(v.AuxInt) != 1 { 6944 break 6945 } 6946 ptr := v_0 6947 mem := v_1 6948 v.reset(OpMIPSMOVBstore) 6949 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6950 v0.AuxInt = int32ToAuxInt(0) 6951 v.AddArg3(ptr, v0, mem) 6952 return true 6953 } 6954 // match: (Zero [2] {t} ptr mem) 6955 // cond: t.Alignment()%2 == 0 6956 // result: (MOVHstore ptr (MOVWconst [0]) mem) 6957 for { 6958 if auxIntToInt64(v.AuxInt) != 2 { 6959 break 6960 } 6961 t := auxToType(v.Aux) 6962 ptr := v_0 6963 mem := v_1 6964 if !(t.Alignment()%2 == 0) { 6965 break 6966 } 6967 v.reset(OpMIPSMOVHstore) 6968 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6969 v0.AuxInt = int32ToAuxInt(0) 6970 v.AddArg3(ptr, v0, mem) 6971 return true 6972 } 6973 // match: (Zero [2] ptr mem) 6974 // result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)) 6975 for { 6976 if auxIntToInt64(v.AuxInt) != 2 { 6977 break 6978 } 6979 ptr := v_0 6980 mem := v_1 6981 v.reset(OpMIPSMOVBstore) 6982 v.AuxInt = int32ToAuxInt(1) 6983 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 6984 v0.AuxInt = int32ToAuxInt(0) 6985 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6986 v1.AuxInt = int32ToAuxInt(0) 6987 v1.AddArg3(ptr, v0, mem) 6988 v.AddArg3(ptr, v0, v1) 6989 return true 6990 } 6991 // match: (Zero [4] {t} ptr mem) 6992 // cond: t.Alignment()%4 == 0 6993 // result: (MOVWstore ptr (MOVWconst [0]) mem) 6994 for { 6995 if auxIntToInt64(v.AuxInt) != 4 { 6996 break 6997 } 6998 t := auxToType(v.Aux) 6999 ptr := v_0 7000 mem := v_1 7001 if !(t.Alignment()%4 == 0) { 7002 break 7003 } 7004 v.reset(OpMIPSMOVWstore) 7005 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7006 v0.AuxInt = int32ToAuxInt(0) 7007 v.AddArg3(ptr, v0, mem) 7008 return true 7009 } 7010 // match: (Zero [4] {t} ptr mem) 7011 // cond: t.Alignment()%2 == 0 7012 // result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)) 7013 for { 7014 if auxIntToInt64(v.AuxInt) != 4 { 7015 break 7016 } 7017 t := auxToType(v.Aux) 7018 ptr := v_0 7019 mem := v_1 7020 if !(t.Alignment()%2 == 0) { 7021 break 7022 } 7023 v.reset(OpMIPSMOVHstore) 7024 v.AuxInt = int32ToAuxInt(2) 7025 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7026 v0.AuxInt = int32ToAuxInt(0) 7027 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 7028 v1.AuxInt = int32ToAuxInt(0) 7029 v1.AddArg3(ptr, v0, mem) 7030 v.AddArg3(ptr, v0, v1) 7031 return true 7032 } 7033 // match: (Zero [4] ptr mem) 7034 // result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))) 7035 for { 7036 if auxIntToInt64(v.AuxInt) != 4 { 7037 break 7038 } 7039 ptr := v_0 7040 mem := v_1 7041 v.reset(OpMIPSMOVBstore) 7042 v.AuxInt = int32ToAuxInt(3) 7043 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7044 v0.AuxInt = int32ToAuxInt(0) 7045 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 7046 v1.AuxInt = int32ToAuxInt(2) 7047 v2 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 7048 v2.AuxInt = int32ToAuxInt(1) 7049 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 7050 v3.AuxInt = int32ToAuxInt(0) 7051 v3.AddArg3(ptr, v0, mem) 7052 v2.AddArg3(ptr, v0, v3) 7053 v1.AddArg3(ptr, v0, v2) 7054 v.AddArg3(ptr, v0, v1) 7055 return true 7056 } 7057 // match: (Zero [3] ptr mem) 7058 // result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))) 7059 for { 7060 if auxIntToInt64(v.AuxInt) != 3 { 7061 break 7062 } 7063 ptr := v_0 7064 mem := v_1 7065 v.reset(OpMIPSMOVBstore) 7066 v.AuxInt = int32ToAuxInt(2) 7067 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7068 v0.AuxInt = int32ToAuxInt(0) 7069 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 7070 v1.AuxInt = int32ToAuxInt(1) 7071 v2 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 7072 v2.AuxInt = int32ToAuxInt(0) 7073 v2.AddArg3(ptr, v0, mem) 7074 v1.AddArg3(ptr, v0, v2) 7075 v.AddArg3(ptr, v0, v1) 7076 return true 7077 } 7078 // match: (Zero [6] {t} ptr mem) 7079 // cond: t.Alignment()%2 == 0 7080 // result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))) 7081 for { 7082 if auxIntToInt64(v.AuxInt) != 6 { 7083 break 7084 } 7085 t := auxToType(v.Aux) 7086 ptr := v_0 7087 mem := v_1 7088 if !(t.Alignment()%2 == 0) { 7089 break 7090 } 7091 v.reset(OpMIPSMOVHstore) 7092 v.AuxInt = int32ToAuxInt(4) 7093 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7094 v0.AuxInt = int32ToAuxInt(0) 7095 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 7096 v1.AuxInt = int32ToAuxInt(2) 7097 v2 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 7098 v2.AuxInt = int32ToAuxInt(0) 7099 v2.AddArg3(ptr, v0, mem) 7100 v1.AddArg3(ptr, v0, v2) 7101 v.AddArg3(ptr, v0, v1) 7102 return true 7103 } 7104 // match: (Zero [8] {t} ptr mem) 7105 // cond: t.Alignment()%4 == 0 7106 // result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)) 7107 for { 7108 if auxIntToInt64(v.AuxInt) != 8 { 7109 break 7110 } 7111 t := auxToType(v.Aux) 7112 ptr := v_0 7113 mem := v_1 7114 if !(t.Alignment()%4 == 0) { 7115 break 7116 } 7117 v.reset(OpMIPSMOVWstore) 7118 v.AuxInt = int32ToAuxInt(4) 7119 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7120 v0.AuxInt = int32ToAuxInt(0) 7121 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 7122 v1.AuxInt = int32ToAuxInt(0) 7123 v1.AddArg3(ptr, v0, mem) 7124 v.AddArg3(ptr, v0, v1) 7125 return true 7126 } 7127 // match: (Zero [12] {t} ptr mem) 7128 // cond: t.Alignment()%4 == 0 7129 // result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))) 7130 for { 7131 if auxIntToInt64(v.AuxInt) != 12 { 7132 break 7133 } 7134 t := auxToType(v.Aux) 7135 ptr := v_0 7136 mem := v_1 7137 if !(t.Alignment()%4 == 0) { 7138 break 7139 } 7140 v.reset(OpMIPSMOVWstore) 7141 v.AuxInt = int32ToAuxInt(8) 7142 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7143 v0.AuxInt = int32ToAuxInt(0) 7144 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 7145 v1.AuxInt = int32ToAuxInt(4) 7146 v2 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 7147 v2.AuxInt = int32ToAuxInt(0) 7148 v2.AddArg3(ptr, v0, mem) 7149 v1.AddArg3(ptr, v0, v2) 7150 v.AddArg3(ptr, v0, v1) 7151 return true 7152 } 7153 // match: (Zero [16] {t} ptr mem) 7154 // cond: t.Alignment()%4 == 0 7155 // result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))) 7156 for { 7157 if auxIntToInt64(v.AuxInt) != 16 { 7158 break 7159 } 7160 t := auxToType(v.Aux) 7161 ptr := v_0 7162 mem := v_1 7163 if !(t.Alignment()%4 == 0) { 7164 break 7165 } 7166 v.reset(OpMIPSMOVWstore) 7167 v.AuxInt = int32ToAuxInt(12) 7168 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7169 v0.AuxInt = int32ToAuxInt(0) 7170 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 7171 v1.AuxInt = int32ToAuxInt(8) 7172 v2 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 7173 v2.AuxInt = int32ToAuxInt(4) 7174 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 7175 v3.AuxInt = int32ToAuxInt(0) 7176 v3.AddArg3(ptr, v0, mem) 7177 v2.AddArg3(ptr, v0, v3) 7178 v1.AddArg3(ptr, v0, v2) 7179 v.AddArg3(ptr, v0, v1) 7180 return true 7181 } 7182 // match: (Zero [s] {t} ptr mem) 7183 // cond: (s > 16 || t.Alignment()%4 != 0) 7184 // result: (LoweredZero [int32(t.Alignment())] ptr (ADDconst <ptr.Type> ptr [int32(s-moveSize(t.Alignment(), config))]) mem) 7185 for { 7186 s := auxIntToInt64(v.AuxInt) 7187 t := auxToType(v.Aux) 7188 ptr := v_0 7189 mem := v_1 7190 if !(s > 16 || t.Alignment()%4 != 0) { 7191 break 7192 } 7193 v.reset(OpMIPSLoweredZero) 7194 v.AuxInt = int32ToAuxInt(int32(t.Alignment())) 7195 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type) 7196 v0.AuxInt = int32ToAuxInt(int32(s - moveSize(t.Alignment(), config))) 7197 v0.AddArg(ptr) 7198 v.AddArg3(ptr, v0, mem) 7199 return true 7200 } 7201 return false 7202 } 7203 func rewriteValueMIPS_OpZeromask(v *Value) bool { 7204 v_0 := v.Args[0] 7205 b := v.Block 7206 typ := &b.Func.Config.Types 7207 // match: (Zeromask x) 7208 // result: (NEG (SGTU x (MOVWconst [0]))) 7209 for { 7210 x := v_0 7211 v.reset(OpMIPSNEG) 7212 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 7213 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7214 v1.AuxInt = int32ToAuxInt(0) 7215 v0.AddArg2(x, v1) 7216 v.AddArg(v0) 7217 return true 7218 } 7219 } 7220 func rewriteBlockMIPS(b *Block) bool { 7221 switch b.Kind { 7222 case BlockMIPSEQ: 7223 // match: (EQ (FPFlagTrue cmp) yes no) 7224 // result: (FPF cmp yes no) 7225 for b.Controls[0].Op == OpMIPSFPFlagTrue { 7226 v_0 := b.Controls[0] 7227 cmp := v_0.Args[0] 7228 b.resetWithControl(BlockMIPSFPF, cmp) 7229 return true 7230 } 7231 // match: (EQ (FPFlagFalse cmp) yes no) 7232 // result: (FPT cmp yes no) 7233 for b.Controls[0].Op == OpMIPSFPFlagFalse { 7234 v_0 := b.Controls[0] 7235 cmp := v_0.Args[0] 7236 b.resetWithControl(BlockMIPSFPT, cmp) 7237 return true 7238 } 7239 // match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no) 7240 // result: (NE cmp yes no) 7241 for b.Controls[0].Op == OpMIPSXORconst { 7242 v_0 := b.Controls[0] 7243 if auxIntToInt32(v_0.AuxInt) != 1 { 7244 break 7245 } 7246 cmp := v_0.Args[0] 7247 if cmp.Op != OpMIPSSGT { 7248 break 7249 } 7250 b.resetWithControl(BlockMIPSNE, cmp) 7251 return true 7252 } 7253 // match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no) 7254 // result: (NE cmp yes no) 7255 for b.Controls[0].Op == OpMIPSXORconst { 7256 v_0 := b.Controls[0] 7257 if auxIntToInt32(v_0.AuxInt) != 1 { 7258 break 7259 } 7260 cmp := v_0.Args[0] 7261 if cmp.Op != OpMIPSSGTU { 7262 break 7263 } 7264 b.resetWithControl(BlockMIPSNE, cmp) 7265 return true 7266 } 7267 // match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no) 7268 // result: (NE cmp yes no) 7269 for b.Controls[0].Op == OpMIPSXORconst { 7270 v_0 := b.Controls[0] 7271 if auxIntToInt32(v_0.AuxInt) != 1 { 7272 break 7273 } 7274 cmp := v_0.Args[0] 7275 if cmp.Op != OpMIPSSGTconst { 7276 break 7277 } 7278 b.resetWithControl(BlockMIPSNE, cmp) 7279 return true 7280 } 7281 // match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no) 7282 // result: (NE cmp yes no) 7283 for b.Controls[0].Op == OpMIPSXORconst { 7284 v_0 := b.Controls[0] 7285 if auxIntToInt32(v_0.AuxInt) != 1 { 7286 break 7287 } 7288 cmp := v_0.Args[0] 7289 if cmp.Op != OpMIPSSGTUconst { 7290 break 7291 } 7292 b.resetWithControl(BlockMIPSNE, cmp) 7293 return true 7294 } 7295 // match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no) 7296 // result: (NE cmp yes no) 7297 for b.Controls[0].Op == OpMIPSXORconst { 7298 v_0 := b.Controls[0] 7299 if auxIntToInt32(v_0.AuxInt) != 1 { 7300 break 7301 } 7302 cmp := v_0.Args[0] 7303 if cmp.Op != OpMIPSSGTzero { 7304 break 7305 } 7306 b.resetWithControl(BlockMIPSNE, cmp) 7307 return true 7308 } 7309 // match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no) 7310 // result: (NE cmp yes no) 7311 for b.Controls[0].Op == OpMIPSXORconst { 7312 v_0 := b.Controls[0] 7313 if auxIntToInt32(v_0.AuxInt) != 1 { 7314 break 7315 } 7316 cmp := v_0.Args[0] 7317 if cmp.Op != OpMIPSSGTUzero { 7318 break 7319 } 7320 b.resetWithControl(BlockMIPSNE, cmp) 7321 return true 7322 } 7323 // match: (EQ (SGTUconst [1] x) yes no) 7324 // result: (NE x yes no) 7325 for b.Controls[0].Op == OpMIPSSGTUconst { 7326 v_0 := b.Controls[0] 7327 if auxIntToInt32(v_0.AuxInt) != 1 { 7328 break 7329 } 7330 x := v_0.Args[0] 7331 b.resetWithControl(BlockMIPSNE, x) 7332 return true 7333 } 7334 // match: (EQ (SGTUzero x) yes no) 7335 // result: (EQ x yes no) 7336 for b.Controls[0].Op == OpMIPSSGTUzero { 7337 v_0 := b.Controls[0] 7338 x := v_0.Args[0] 7339 b.resetWithControl(BlockMIPSEQ, x) 7340 return true 7341 } 7342 // match: (EQ (SGTconst [0] x) yes no) 7343 // result: (GEZ x yes no) 7344 for b.Controls[0].Op == OpMIPSSGTconst { 7345 v_0 := b.Controls[0] 7346 if auxIntToInt32(v_0.AuxInt) != 0 { 7347 break 7348 } 7349 x := v_0.Args[0] 7350 b.resetWithControl(BlockMIPSGEZ, x) 7351 return true 7352 } 7353 // match: (EQ (SGTzero x) yes no) 7354 // result: (LEZ x yes no) 7355 for b.Controls[0].Op == OpMIPSSGTzero { 7356 v_0 := b.Controls[0] 7357 x := v_0.Args[0] 7358 b.resetWithControl(BlockMIPSLEZ, x) 7359 return true 7360 } 7361 // match: (EQ (MOVWconst [0]) yes no) 7362 // result: (First yes no) 7363 for b.Controls[0].Op == OpMIPSMOVWconst { 7364 v_0 := b.Controls[0] 7365 if auxIntToInt32(v_0.AuxInt) != 0 { 7366 break 7367 } 7368 b.Reset(BlockFirst) 7369 return true 7370 } 7371 // match: (EQ (MOVWconst [c]) yes no) 7372 // cond: c != 0 7373 // result: (First no yes) 7374 for b.Controls[0].Op == OpMIPSMOVWconst { 7375 v_0 := b.Controls[0] 7376 c := auxIntToInt32(v_0.AuxInt) 7377 if !(c != 0) { 7378 break 7379 } 7380 b.Reset(BlockFirst) 7381 b.swapSuccessors() 7382 return true 7383 } 7384 case BlockMIPSGEZ: 7385 // match: (GEZ (MOVWconst [c]) yes no) 7386 // cond: c >= 0 7387 // result: (First yes no) 7388 for b.Controls[0].Op == OpMIPSMOVWconst { 7389 v_0 := b.Controls[0] 7390 c := auxIntToInt32(v_0.AuxInt) 7391 if !(c >= 0) { 7392 break 7393 } 7394 b.Reset(BlockFirst) 7395 return true 7396 } 7397 // match: (GEZ (MOVWconst [c]) yes no) 7398 // cond: c < 0 7399 // result: (First no yes) 7400 for b.Controls[0].Op == OpMIPSMOVWconst { 7401 v_0 := b.Controls[0] 7402 c := auxIntToInt32(v_0.AuxInt) 7403 if !(c < 0) { 7404 break 7405 } 7406 b.Reset(BlockFirst) 7407 b.swapSuccessors() 7408 return true 7409 } 7410 case BlockMIPSGTZ: 7411 // match: (GTZ (MOVWconst [c]) yes no) 7412 // cond: c > 0 7413 // result: (First yes no) 7414 for b.Controls[0].Op == OpMIPSMOVWconst { 7415 v_0 := b.Controls[0] 7416 c := auxIntToInt32(v_0.AuxInt) 7417 if !(c > 0) { 7418 break 7419 } 7420 b.Reset(BlockFirst) 7421 return true 7422 } 7423 // match: (GTZ (MOVWconst [c]) yes no) 7424 // cond: c <= 0 7425 // result: (First no yes) 7426 for b.Controls[0].Op == OpMIPSMOVWconst { 7427 v_0 := b.Controls[0] 7428 c := auxIntToInt32(v_0.AuxInt) 7429 if !(c <= 0) { 7430 break 7431 } 7432 b.Reset(BlockFirst) 7433 b.swapSuccessors() 7434 return true 7435 } 7436 case BlockIf: 7437 // match: (If cond yes no) 7438 // result: (NE cond yes no) 7439 for { 7440 cond := b.Controls[0] 7441 b.resetWithControl(BlockMIPSNE, cond) 7442 return true 7443 } 7444 case BlockMIPSLEZ: 7445 // match: (LEZ (MOVWconst [c]) yes no) 7446 // cond: c <= 0 7447 // result: (First yes no) 7448 for b.Controls[0].Op == OpMIPSMOVWconst { 7449 v_0 := b.Controls[0] 7450 c := auxIntToInt32(v_0.AuxInt) 7451 if !(c <= 0) { 7452 break 7453 } 7454 b.Reset(BlockFirst) 7455 return true 7456 } 7457 // match: (LEZ (MOVWconst [c]) yes no) 7458 // cond: c > 0 7459 // result: (First no yes) 7460 for b.Controls[0].Op == OpMIPSMOVWconst { 7461 v_0 := b.Controls[0] 7462 c := auxIntToInt32(v_0.AuxInt) 7463 if !(c > 0) { 7464 break 7465 } 7466 b.Reset(BlockFirst) 7467 b.swapSuccessors() 7468 return true 7469 } 7470 case BlockMIPSLTZ: 7471 // match: (LTZ (MOVWconst [c]) yes no) 7472 // cond: c < 0 7473 // result: (First yes no) 7474 for b.Controls[0].Op == OpMIPSMOVWconst { 7475 v_0 := b.Controls[0] 7476 c := auxIntToInt32(v_0.AuxInt) 7477 if !(c < 0) { 7478 break 7479 } 7480 b.Reset(BlockFirst) 7481 return true 7482 } 7483 // match: (LTZ (MOVWconst [c]) yes no) 7484 // cond: c >= 0 7485 // result: (First no yes) 7486 for b.Controls[0].Op == OpMIPSMOVWconst { 7487 v_0 := b.Controls[0] 7488 c := auxIntToInt32(v_0.AuxInt) 7489 if !(c >= 0) { 7490 break 7491 } 7492 b.Reset(BlockFirst) 7493 b.swapSuccessors() 7494 return true 7495 } 7496 case BlockMIPSNE: 7497 // match: (NE (FPFlagTrue cmp) yes no) 7498 // result: (FPT cmp yes no) 7499 for b.Controls[0].Op == OpMIPSFPFlagTrue { 7500 v_0 := b.Controls[0] 7501 cmp := v_0.Args[0] 7502 b.resetWithControl(BlockMIPSFPT, cmp) 7503 return true 7504 } 7505 // match: (NE (FPFlagFalse cmp) yes no) 7506 // result: (FPF cmp yes no) 7507 for b.Controls[0].Op == OpMIPSFPFlagFalse { 7508 v_0 := b.Controls[0] 7509 cmp := v_0.Args[0] 7510 b.resetWithControl(BlockMIPSFPF, cmp) 7511 return true 7512 } 7513 // match: (NE (XORconst [1] cmp:(SGT _ _)) yes no) 7514 // result: (EQ cmp yes no) 7515 for b.Controls[0].Op == OpMIPSXORconst { 7516 v_0 := b.Controls[0] 7517 if auxIntToInt32(v_0.AuxInt) != 1 { 7518 break 7519 } 7520 cmp := v_0.Args[0] 7521 if cmp.Op != OpMIPSSGT { 7522 break 7523 } 7524 b.resetWithControl(BlockMIPSEQ, cmp) 7525 return true 7526 } 7527 // match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no) 7528 // result: (EQ cmp yes no) 7529 for b.Controls[0].Op == OpMIPSXORconst { 7530 v_0 := b.Controls[0] 7531 if auxIntToInt32(v_0.AuxInt) != 1 { 7532 break 7533 } 7534 cmp := v_0.Args[0] 7535 if cmp.Op != OpMIPSSGTU { 7536 break 7537 } 7538 b.resetWithControl(BlockMIPSEQ, cmp) 7539 return true 7540 } 7541 // match: (NE (XORconst [1] cmp:(SGTconst _)) yes no) 7542 // result: (EQ cmp yes no) 7543 for b.Controls[0].Op == OpMIPSXORconst { 7544 v_0 := b.Controls[0] 7545 if auxIntToInt32(v_0.AuxInt) != 1 { 7546 break 7547 } 7548 cmp := v_0.Args[0] 7549 if cmp.Op != OpMIPSSGTconst { 7550 break 7551 } 7552 b.resetWithControl(BlockMIPSEQ, cmp) 7553 return true 7554 } 7555 // match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no) 7556 // result: (EQ cmp yes no) 7557 for b.Controls[0].Op == OpMIPSXORconst { 7558 v_0 := b.Controls[0] 7559 if auxIntToInt32(v_0.AuxInt) != 1 { 7560 break 7561 } 7562 cmp := v_0.Args[0] 7563 if cmp.Op != OpMIPSSGTUconst { 7564 break 7565 } 7566 b.resetWithControl(BlockMIPSEQ, cmp) 7567 return true 7568 } 7569 // match: (NE (XORconst [1] cmp:(SGTzero _)) yes no) 7570 // result: (EQ cmp yes no) 7571 for b.Controls[0].Op == OpMIPSXORconst { 7572 v_0 := b.Controls[0] 7573 if auxIntToInt32(v_0.AuxInt) != 1 { 7574 break 7575 } 7576 cmp := v_0.Args[0] 7577 if cmp.Op != OpMIPSSGTzero { 7578 break 7579 } 7580 b.resetWithControl(BlockMIPSEQ, cmp) 7581 return true 7582 } 7583 // match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no) 7584 // result: (EQ cmp yes no) 7585 for b.Controls[0].Op == OpMIPSXORconst { 7586 v_0 := b.Controls[0] 7587 if auxIntToInt32(v_0.AuxInt) != 1 { 7588 break 7589 } 7590 cmp := v_0.Args[0] 7591 if cmp.Op != OpMIPSSGTUzero { 7592 break 7593 } 7594 b.resetWithControl(BlockMIPSEQ, cmp) 7595 return true 7596 } 7597 // match: (NE (SGTUconst [1] x) yes no) 7598 // result: (EQ x yes no) 7599 for b.Controls[0].Op == OpMIPSSGTUconst { 7600 v_0 := b.Controls[0] 7601 if auxIntToInt32(v_0.AuxInt) != 1 { 7602 break 7603 } 7604 x := v_0.Args[0] 7605 b.resetWithControl(BlockMIPSEQ, x) 7606 return true 7607 } 7608 // match: (NE (SGTUzero x) yes no) 7609 // result: (NE x yes no) 7610 for b.Controls[0].Op == OpMIPSSGTUzero { 7611 v_0 := b.Controls[0] 7612 x := v_0.Args[0] 7613 b.resetWithControl(BlockMIPSNE, x) 7614 return true 7615 } 7616 // match: (NE (SGTconst [0] x) yes no) 7617 // result: (LTZ x yes no) 7618 for b.Controls[0].Op == OpMIPSSGTconst { 7619 v_0 := b.Controls[0] 7620 if auxIntToInt32(v_0.AuxInt) != 0 { 7621 break 7622 } 7623 x := v_0.Args[0] 7624 b.resetWithControl(BlockMIPSLTZ, x) 7625 return true 7626 } 7627 // match: (NE (SGTzero x) yes no) 7628 // result: (GTZ x yes no) 7629 for b.Controls[0].Op == OpMIPSSGTzero { 7630 v_0 := b.Controls[0] 7631 x := v_0.Args[0] 7632 b.resetWithControl(BlockMIPSGTZ, x) 7633 return true 7634 } 7635 // match: (NE (MOVWconst [0]) yes no) 7636 // result: (First no yes) 7637 for b.Controls[0].Op == OpMIPSMOVWconst { 7638 v_0 := b.Controls[0] 7639 if auxIntToInt32(v_0.AuxInt) != 0 { 7640 break 7641 } 7642 b.Reset(BlockFirst) 7643 b.swapSuccessors() 7644 return true 7645 } 7646 // match: (NE (MOVWconst [c]) yes no) 7647 // cond: c != 0 7648 // result: (First yes no) 7649 for b.Controls[0].Op == OpMIPSMOVWconst { 7650 v_0 := b.Controls[0] 7651 c := auxIntToInt32(v_0.AuxInt) 7652 if !(c != 0) { 7653 break 7654 } 7655 b.Reset(BlockFirst) 7656 return true 7657 } 7658 } 7659 return false 7660 }