github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/ssa/rewriteMIPS.go (about) 1 // Code generated from gen/MIPS.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "fmt" 7 import "math" 8 import "cmd/internal/obj" 9 import "cmd/internal/objabi" 10 import "cmd/compile/internal/types" 11 12 var _ = fmt.Println // in case not otherwise used 13 var _ = math.MinInt8 // in case not otherwise used 14 var _ = obj.ANOP // in case not otherwise used 15 var _ = objabi.GOROOT // in case not otherwise used 16 var _ = types.TypeMem // in case not otherwise used 17 18 func rewriteValueMIPS(v *Value) bool { 19 switch v.Op { 20 case OpAdd16: 21 return rewriteValueMIPS_OpAdd16_0(v) 22 case OpAdd32: 23 return rewriteValueMIPS_OpAdd32_0(v) 24 case OpAdd32F: 25 return rewriteValueMIPS_OpAdd32F_0(v) 26 case OpAdd32withcarry: 27 return rewriteValueMIPS_OpAdd32withcarry_0(v) 28 case OpAdd64F: 29 return rewriteValueMIPS_OpAdd64F_0(v) 30 case OpAdd8: 31 return rewriteValueMIPS_OpAdd8_0(v) 32 case OpAddPtr: 33 return rewriteValueMIPS_OpAddPtr_0(v) 34 case OpAddr: 35 return rewriteValueMIPS_OpAddr_0(v) 36 case OpAnd16: 37 return rewriteValueMIPS_OpAnd16_0(v) 38 case OpAnd32: 39 return rewriteValueMIPS_OpAnd32_0(v) 40 case OpAnd8: 41 return rewriteValueMIPS_OpAnd8_0(v) 42 case OpAndB: 43 return rewriteValueMIPS_OpAndB_0(v) 44 case OpAtomicAdd32: 45 return rewriteValueMIPS_OpAtomicAdd32_0(v) 46 case OpAtomicAnd8: 47 return rewriteValueMIPS_OpAtomicAnd8_0(v) 48 case OpAtomicCompareAndSwap32: 49 return rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v) 50 case OpAtomicExchange32: 51 return rewriteValueMIPS_OpAtomicExchange32_0(v) 52 case OpAtomicLoad32: 53 return rewriteValueMIPS_OpAtomicLoad32_0(v) 54 case OpAtomicLoadPtr: 55 return rewriteValueMIPS_OpAtomicLoadPtr_0(v) 56 case OpAtomicOr8: 57 return rewriteValueMIPS_OpAtomicOr8_0(v) 58 case OpAtomicStore32: 59 return rewriteValueMIPS_OpAtomicStore32_0(v) 60 case OpAtomicStorePtrNoWB: 61 return rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v) 62 case OpAvg32u: 63 return rewriteValueMIPS_OpAvg32u_0(v) 64 case OpBitLen32: 65 return rewriteValueMIPS_OpBitLen32_0(v) 66 case OpClosureCall: 67 return rewriteValueMIPS_OpClosureCall_0(v) 68 case OpCom16: 69 return rewriteValueMIPS_OpCom16_0(v) 70 case OpCom32: 71 return rewriteValueMIPS_OpCom32_0(v) 72 case OpCom8: 73 return rewriteValueMIPS_OpCom8_0(v) 74 case OpConst16: 75 return rewriteValueMIPS_OpConst16_0(v) 76 case OpConst32: 77 return rewriteValueMIPS_OpConst32_0(v) 78 case OpConst32F: 79 return rewriteValueMIPS_OpConst32F_0(v) 80 case OpConst64F: 81 return rewriteValueMIPS_OpConst64F_0(v) 82 case OpConst8: 83 return rewriteValueMIPS_OpConst8_0(v) 84 case OpConstBool: 85 return rewriteValueMIPS_OpConstBool_0(v) 86 case OpConstNil: 87 return rewriteValueMIPS_OpConstNil_0(v) 88 case OpCtz32: 89 return rewriteValueMIPS_OpCtz32_0(v) 90 case OpCtz32NonZero: 91 return rewriteValueMIPS_OpCtz32NonZero_0(v) 92 case OpCvt32Fto32: 93 return rewriteValueMIPS_OpCvt32Fto32_0(v) 94 case OpCvt32Fto64F: 95 return rewriteValueMIPS_OpCvt32Fto64F_0(v) 96 case OpCvt32to32F: 97 return rewriteValueMIPS_OpCvt32to32F_0(v) 98 case OpCvt32to64F: 99 return rewriteValueMIPS_OpCvt32to64F_0(v) 100 case OpCvt64Fto32: 101 return rewriteValueMIPS_OpCvt64Fto32_0(v) 102 case OpCvt64Fto32F: 103 return rewriteValueMIPS_OpCvt64Fto32F_0(v) 104 case OpDiv16: 105 return rewriteValueMIPS_OpDiv16_0(v) 106 case OpDiv16u: 107 return rewriteValueMIPS_OpDiv16u_0(v) 108 case OpDiv32: 109 return rewriteValueMIPS_OpDiv32_0(v) 110 case OpDiv32F: 111 return rewriteValueMIPS_OpDiv32F_0(v) 112 case OpDiv32u: 113 return rewriteValueMIPS_OpDiv32u_0(v) 114 case OpDiv64F: 115 return rewriteValueMIPS_OpDiv64F_0(v) 116 case OpDiv8: 117 return rewriteValueMIPS_OpDiv8_0(v) 118 case OpDiv8u: 119 return rewriteValueMIPS_OpDiv8u_0(v) 120 case OpEq16: 121 return rewriteValueMIPS_OpEq16_0(v) 122 case OpEq32: 123 return rewriteValueMIPS_OpEq32_0(v) 124 case OpEq32F: 125 return rewriteValueMIPS_OpEq32F_0(v) 126 case OpEq64F: 127 return rewriteValueMIPS_OpEq64F_0(v) 128 case OpEq8: 129 return rewriteValueMIPS_OpEq8_0(v) 130 case OpEqB: 131 return rewriteValueMIPS_OpEqB_0(v) 132 case OpEqPtr: 133 return rewriteValueMIPS_OpEqPtr_0(v) 134 case OpGeq16: 135 return rewriteValueMIPS_OpGeq16_0(v) 136 case OpGeq16U: 137 return rewriteValueMIPS_OpGeq16U_0(v) 138 case OpGeq32: 139 return rewriteValueMIPS_OpGeq32_0(v) 140 case OpGeq32F: 141 return rewriteValueMIPS_OpGeq32F_0(v) 142 case OpGeq32U: 143 return rewriteValueMIPS_OpGeq32U_0(v) 144 case OpGeq64F: 145 return rewriteValueMIPS_OpGeq64F_0(v) 146 case OpGeq8: 147 return rewriteValueMIPS_OpGeq8_0(v) 148 case OpGeq8U: 149 return rewriteValueMIPS_OpGeq8U_0(v) 150 case OpGetCallerPC: 151 return rewriteValueMIPS_OpGetCallerPC_0(v) 152 case OpGetCallerSP: 153 return rewriteValueMIPS_OpGetCallerSP_0(v) 154 case OpGetClosurePtr: 155 return rewriteValueMIPS_OpGetClosurePtr_0(v) 156 case OpGreater16: 157 return rewriteValueMIPS_OpGreater16_0(v) 158 case OpGreater16U: 159 return rewriteValueMIPS_OpGreater16U_0(v) 160 case OpGreater32: 161 return rewriteValueMIPS_OpGreater32_0(v) 162 case OpGreater32F: 163 return rewriteValueMIPS_OpGreater32F_0(v) 164 case OpGreater32U: 165 return rewriteValueMIPS_OpGreater32U_0(v) 166 case OpGreater64F: 167 return rewriteValueMIPS_OpGreater64F_0(v) 168 case OpGreater8: 169 return rewriteValueMIPS_OpGreater8_0(v) 170 case OpGreater8U: 171 return rewriteValueMIPS_OpGreater8U_0(v) 172 case OpHmul32: 173 return rewriteValueMIPS_OpHmul32_0(v) 174 case OpHmul32u: 175 return rewriteValueMIPS_OpHmul32u_0(v) 176 case OpInterCall: 177 return rewriteValueMIPS_OpInterCall_0(v) 178 case OpIsInBounds: 179 return rewriteValueMIPS_OpIsInBounds_0(v) 180 case OpIsNonNil: 181 return rewriteValueMIPS_OpIsNonNil_0(v) 182 case OpIsSliceInBounds: 183 return rewriteValueMIPS_OpIsSliceInBounds_0(v) 184 case OpLeq16: 185 return rewriteValueMIPS_OpLeq16_0(v) 186 case OpLeq16U: 187 return rewriteValueMIPS_OpLeq16U_0(v) 188 case OpLeq32: 189 return rewriteValueMIPS_OpLeq32_0(v) 190 case OpLeq32F: 191 return rewriteValueMIPS_OpLeq32F_0(v) 192 case OpLeq32U: 193 return rewriteValueMIPS_OpLeq32U_0(v) 194 case OpLeq64F: 195 return rewriteValueMIPS_OpLeq64F_0(v) 196 case OpLeq8: 197 return rewriteValueMIPS_OpLeq8_0(v) 198 case OpLeq8U: 199 return rewriteValueMIPS_OpLeq8U_0(v) 200 case OpLess16: 201 return rewriteValueMIPS_OpLess16_0(v) 202 case OpLess16U: 203 return rewriteValueMIPS_OpLess16U_0(v) 204 case OpLess32: 205 return rewriteValueMIPS_OpLess32_0(v) 206 case OpLess32F: 207 return rewriteValueMIPS_OpLess32F_0(v) 208 case OpLess32U: 209 return rewriteValueMIPS_OpLess32U_0(v) 210 case OpLess64F: 211 return rewriteValueMIPS_OpLess64F_0(v) 212 case OpLess8: 213 return rewriteValueMIPS_OpLess8_0(v) 214 case OpLess8U: 215 return rewriteValueMIPS_OpLess8U_0(v) 216 case OpLoad: 217 return rewriteValueMIPS_OpLoad_0(v) 218 case OpLocalAddr: 219 return rewriteValueMIPS_OpLocalAddr_0(v) 220 case OpLsh16x16: 221 return rewriteValueMIPS_OpLsh16x16_0(v) 222 case OpLsh16x32: 223 return rewriteValueMIPS_OpLsh16x32_0(v) 224 case OpLsh16x64: 225 return rewriteValueMIPS_OpLsh16x64_0(v) 226 case OpLsh16x8: 227 return rewriteValueMIPS_OpLsh16x8_0(v) 228 case OpLsh32x16: 229 return rewriteValueMIPS_OpLsh32x16_0(v) 230 case OpLsh32x32: 231 return rewriteValueMIPS_OpLsh32x32_0(v) 232 case OpLsh32x64: 233 return rewriteValueMIPS_OpLsh32x64_0(v) 234 case OpLsh32x8: 235 return rewriteValueMIPS_OpLsh32x8_0(v) 236 case OpLsh8x16: 237 return rewriteValueMIPS_OpLsh8x16_0(v) 238 case OpLsh8x32: 239 return rewriteValueMIPS_OpLsh8x32_0(v) 240 case OpLsh8x64: 241 return rewriteValueMIPS_OpLsh8x64_0(v) 242 case OpLsh8x8: 243 return rewriteValueMIPS_OpLsh8x8_0(v) 244 case OpMIPSADD: 245 return rewriteValueMIPS_OpMIPSADD_0(v) 246 case OpMIPSADDconst: 247 return rewriteValueMIPS_OpMIPSADDconst_0(v) 248 case OpMIPSAND: 249 return rewriteValueMIPS_OpMIPSAND_0(v) 250 case OpMIPSANDconst: 251 return rewriteValueMIPS_OpMIPSANDconst_0(v) 252 case OpMIPSCMOVZ: 253 return rewriteValueMIPS_OpMIPSCMOVZ_0(v) 254 case OpMIPSCMOVZzero: 255 return rewriteValueMIPS_OpMIPSCMOVZzero_0(v) 256 case OpMIPSLoweredAtomicAdd: 257 return rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v) 258 case OpMIPSLoweredAtomicStore: 259 return rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v) 260 case OpMIPSMOVBUload: 261 return rewriteValueMIPS_OpMIPSMOVBUload_0(v) 262 case OpMIPSMOVBUreg: 263 return rewriteValueMIPS_OpMIPSMOVBUreg_0(v) 264 case OpMIPSMOVBload: 265 return rewriteValueMIPS_OpMIPSMOVBload_0(v) 266 case OpMIPSMOVBreg: 267 return rewriteValueMIPS_OpMIPSMOVBreg_0(v) 268 case OpMIPSMOVBstore: 269 return rewriteValueMIPS_OpMIPSMOVBstore_0(v) 270 case OpMIPSMOVBstorezero: 271 return rewriteValueMIPS_OpMIPSMOVBstorezero_0(v) 272 case OpMIPSMOVDload: 273 return rewriteValueMIPS_OpMIPSMOVDload_0(v) 274 case OpMIPSMOVDstore: 275 return rewriteValueMIPS_OpMIPSMOVDstore_0(v) 276 case OpMIPSMOVFload: 277 return rewriteValueMIPS_OpMIPSMOVFload_0(v) 278 case OpMIPSMOVFstore: 279 return rewriteValueMIPS_OpMIPSMOVFstore_0(v) 280 case OpMIPSMOVHUload: 281 return rewriteValueMIPS_OpMIPSMOVHUload_0(v) 282 case OpMIPSMOVHUreg: 283 return rewriteValueMIPS_OpMIPSMOVHUreg_0(v) 284 case OpMIPSMOVHload: 285 return rewriteValueMIPS_OpMIPSMOVHload_0(v) 286 case OpMIPSMOVHreg: 287 return rewriteValueMIPS_OpMIPSMOVHreg_0(v) 288 case OpMIPSMOVHstore: 289 return rewriteValueMIPS_OpMIPSMOVHstore_0(v) 290 case OpMIPSMOVHstorezero: 291 return rewriteValueMIPS_OpMIPSMOVHstorezero_0(v) 292 case OpMIPSMOVWload: 293 return rewriteValueMIPS_OpMIPSMOVWload_0(v) 294 case OpMIPSMOVWreg: 295 return rewriteValueMIPS_OpMIPSMOVWreg_0(v) 296 case OpMIPSMOVWstore: 297 return rewriteValueMIPS_OpMIPSMOVWstore_0(v) 298 case OpMIPSMOVWstorezero: 299 return rewriteValueMIPS_OpMIPSMOVWstorezero_0(v) 300 case OpMIPSMUL: 301 return rewriteValueMIPS_OpMIPSMUL_0(v) 302 case OpMIPSNEG: 303 return rewriteValueMIPS_OpMIPSNEG_0(v) 304 case OpMIPSNOR: 305 return rewriteValueMIPS_OpMIPSNOR_0(v) 306 case OpMIPSNORconst: 307 return rewriteValueMIPS_OpMIPSNORconst_0(v) 308 case OpMIPSOR: 309 return rewriteValueMIPS_OpMIPSOR_0(v) 310 case OpMIPSORconst: 311 return rewriteValueMIPS_OpMIPSORconst_0(v) 312 case OpMIPSSGT: 313 return rewriteValueMIPS_OpMIPSSGT_0(v) 314 case OpMIPSSGTU: 315 return rewriteValueMIPS_OpMIPSSGTU_0(v) 316 case OpMIPSSGTUconst: 317 return rewriteValueMIPS_OpMIPSSGTUconst_0(v) 318 case OpMIPSSGTUzero: 319 return rewriteValueMIPS_OpMIPSSGTUzero_0(v) 320 case OpMIPSSGTconst: 321 return rewriteValueMIPS_OpMIPSSGTconst_0(v) || rewriteValueMIPS_OpMIPSSGTconst_10(v) 322 case OpMIPSSGTzero: 323 return rewriteValueMIPS_OpMIPSSGTzero_0(v) 324 case OpMIPSSLL: 325 return rewriteValueMIPS_OpMIPSSLL_0(v) 326 case OpMIPSSLLconst: 327 return rewriteValueMIPS_OpMIPSSLLconst_0(v) 328 case OpMIPSSRA: 329 return rewriteValueMIPS_OpMIPSSRA_0(v) 330 case OpMIPSSRAconst: 331 return rewriteValueMIPS_OpMIPSSRAconst_0(v) 332 case OpMIPSSRL: 333 return rewriteValueMIPS_OpMIPSSRL_0(v) 334 case OpMIPSSRLconst: 335 return rewriteValueMIPS_OpMIPSSRLconst_0(v) 336 case OpMIPSSUB: 337 return rewriteValueMIPS_OpMIPSSUB_0(v) 338 case OpMIPSSUBconst: 339 return rewriteValueMIPS_OpMIPSSUBconst_0(v) 340 case OpMIPSXOR: 341 return rewriteValueMIPS_OpMIPSXOR_0(v) 342 case OpMIPSXORconst: 343 return rewriteValueMIPS_OpMIPSXORconst_0(v) 344 case OpMod16: 345 return rewriteValueMIPS_OpMod16_0(v) 346 case OpMod16u: 347 return rewriteValueMIPS_OpMod16u_0(v) 348 case OpMod32: 349 return rewriteValueMIPS_OpMod32_0(v) 350 case OpMod32u: 351 return rewriteValueMIPS_OpMod32u_0(v) 352 case OpMod8: 353 return rewriteValueMIPS_OpMod8_0(v) 354 case OpMod8u: 355 return rewriteValueMIPS_OpMod8u_0(v) 356 case OpMove: 357 return rewriteValueMIPS_OpMove_0(v) || rewriteValueMIPS_OpMove_10(v) 358 case OpMul16: 359 return rewriteValueMIPS_OpMul16_0(v) 360 case OpMul32: 361 return rewriteValueMIPS_OpMul32_0(v) 362 case OpMul32F: 363 return rewriteValueMIPS_OpMul32F_0(v) 364 case OpMul32uhilo: 365 return rewriteValueMIPS_OpMul32uhilo_0(v) 366 case OpMul64F: 367 return rewriteValueMIPS_OpMul64F_0(v) 368 case OpMul8: 369 return rewriteValueMIPS_OpMul8_0(v) 370 case OpNeg16: 371 return rewriteValueMIPS_OpNeg16_0(v) 372 case OpNeg32: 373 return rewriteValueMIPS_OpNeg32_0(v) 374 case OpNeg32F: 375 return rewriteValueMIPS_OpNeg32F_0(v) 376 case OpNeg64F: 377 return rewriteValueMIPS_OpNeg64F_0(v) 378 case OpNeg8: 379 return rewriteValueMIPS_OpNeg8_0(v) 380 case OpNeq16: 381 return rewriteValueMIPS_OpNeq16_0(v) 382 case OpNeq32: 383 return rewriteValueMIPS_OpNeq32_0(v) 384 case OpNeq32F: 385 return rewriteValueMIPS_OpNeq32F_0(v) 386 case OpNeq64F: 387 return rewriteValueMIPS_OpNeq64F_0(v) 388 case OpNeq8: 389 return rewriteValueMIPS_OpNeq8_0(v) 390 case OpNeqB: 391 return rewriteValueMIPS_OpNeqB_0(v) 392 case OpNeqPtr: 393 return rewriteValueMIPS_OpNeqPtr_0(v) 394 case OpNilCheck: 395 return rewriteValueMIPS_OpNilCheck_0(v) 396 case OpNot: 397 return rewriteValueMIPS_OpNot_0(v) 398 case OpOffPtr: 399 return rewriteValueMIPS_OpOffPtr_0(v) 400 case OpOr16: 401 return rewriteValueMIPS_OpOr16_0(v) 402 case OpOr32: 403 return rewriteValueMIPS_OpOr32_0(v) 404 case OpOr8: 405 return rewriteValueMIPS_OpOr8_0(v) 406 case OpOrB: 407 return rewriteValueMIPS_OpOrB_0(v) 408 case OpRound32F: 409 return rewriteValueMIPS_OpRound32F_0(v) 410 case OpRound64F: 411 return rewriteValueMIPS_OpRound64F_0(v) 412 case OpRsh16Ux16: 413 return rewriteValueMIPS_OpRsh16Ux16_0(v) 414 case OpRsh16Ux32: 415 return rewriteValueMIPS_OpRsh16Ux32_0(v) 416 case OpRsh16Ux64: 417 return rewriteValueMIPS_OpRsh16Ux64_0(v) 418 case OpRsh16Ux8: 419 return rewriteValueMIPS_OpRsh16Ux8_0(v) 420 case OpRsh16x16: 421 return rewriteValueMIPS_OpRsh16x16_0(v) 422 case OpRsh16x32: 423 return rewriteValueMIPS_OpRsh16x32_0(v) 424 case OpRsh16x64: 425 return rewriteValueMIPS_OpRsh16x64_0(v) 426 case OpRsh16x8: 427 return rewriteValueMIPS_OpRsh16x8_0(v) 428 case OpRsh32Ux16: 429 return rewriteValueMIPS_OpRsh32Ux16_0(v) 430 case OpRsh32Ux32: 431 return rewriteValueMIPS_OpRsh32Ux32_0(v) 432 case OpRsh32Ux64: 433 return rewriteValueMIPS_OpRsh32Ux64_0(v) 434 case OpRsh32Ux8: 435 return rewriteValueMIPS_OpRsh32Ux8_0(v) 436 case OpRsh32x16: 437 return rewriteValueMIPS_OpRsh32x16_0(v) 438 case OpRsh32x32: 439 return rewriteValueMIPS_OpRsh32x32_0(v) 440 case OpRsh32x64: 441 return rewriteValueMIPS_OpRsh32x64_0(v) 442 case OpRsh32x8: 443 return rewriteValueMIPS_OpRsh32x8_0(v) 444 case OpRsh8Ux16: 445 return rewriteValueMIPS_OpRsh8Ux16_0(v) 446 case OpRsh8Ux32: 447 return rewriteValueMIPS_OpRsh8Ux32_0(v) 448 case OpRsh8Ux64: 449 return rewriteValueMIPS_OpRsh8Ux64_0(v) 450 case OpRsh8Ux8: 451 return rewriteValueMIPS_OpRsh8Ux8_0(v) 452 case OpRsh8x16: 453 return rewriteValueMIPS_OpRsh8x16_0(v) 454 case OpRsh8x32: 455 return rewriteValueMIPS_OpRsh8x32_0(v) 456 case OpRsh8x64: 457 return rewriteValueMIPS_OpRsh8x64_0(v) 458 case OpRsh8x8: 459 return rewriteValueMIPS_OpRsh8x8_0(v) 460 case OpSelect0: 461 return rewriteValueMIPS_OpSelect0_0(v) || rewriteValueMIPS_OpSelect0_10(v) 462 case OpSelect1: 463 return rewriteValueMIPS_OpSelect1_0(v) || rewriteValueMIPS_OpSelect1_10(v) 464 case OpSignExt16to32: 465 return rewriteValueMIPS_OpSignExt16to32_0(v) 466 case OpSignExt8to16: 467 return rewriteValueMIPS_OpSignExt8to16_0(v) 468 case OpSignExt8to32: 469 return rewriteValueMIPS_OpSignExt8to32_0(v) 470 case OpSignmask: 471 return rewriteValueMIPS_OpSignmask_0(v) 472 case OpSlicemask: 473 return rewriteValueMIPS_OpSlicemask_0(v) 474 case OpSqrt: 475 return rewriteValueMIPS_OpSqrt_0(v) 476 case OpStaticCall: 477 return rewriteValueMIPS_OpStaticCall_0(v) 478 case OpStore: 479 return rewriteValueMIPS_OpStore_0(v) 480 case OpSub16: 481 return rewriteValueMIPS_OpSub16_0(v) 482 case OpSub32: 483 return rewriteValueMIPS_OpSub32_0(v) 484 case OpSub32F: 485 return rewriteValueMIPS_OpSub32F_0(v) 486 case OpSub32withcarry: 487 return rewriteValueMIPS_OpSub32withcarry_0(v) 488 case OpSub64F: 489 return rewriteValueMIPS_OpSub64F_0(v) 490 case OpSub8: 491 return rewriteValueMIPS_OpSub8_0(v) 492 case OpSubPtr: 493 return rewriteValueMIPS_OpSubPtr_0(v) 494 case OpTrunc16to8: 495 return rewriteValueMIPS_OpTrunc16to8_0(v) 496 case OpTrunc32to16: 497 return rewriteValueMIPS_OpTrunc32to16_0(v) 498 case OpTrunc32to8: 499 return rewriteValueMIPS_OpTrunc32to8_0(v) 500 case OpWB: 501 return rewriteValueMIPS_OpWB_0(v) 502 case OpXor16: 503 return rewriteValueMIPS_OpXor16_0(v) 504 case OpXor32: 505 return rewriteValueMIPS_OpXor32_0(v) 506 case OpXor8: 507 return rewriteValueMIPS_OpXor8_0(v) 508 case OpZero: 509 return rewriteValueMIPS_OpZero_0(v) || rewriteValueMIPS_OpZero_10(v) 510 case OpZeroExt16to32: 511 return rewriteValueMIPS_OpZeroExt16to32_0(v) 512 case OpZeroExt8to16: 513 return rewriteValueMIPS_OpZeroExt8to16_0(v) 514 case OpZeroExt8to32: 515 return rewriteValueMIPS_OpZeroExt8to32_0(v) 516 case OpZeromask: 517 return rewriteValueMIPS_OpZeromask_0(v) 518 } 519 return false 520 } 521 func rewriteValueMIPS_OpAdd16_0(v *Value) bool { 522 // match: (Add16 x y) 523 // cond: 524 // result: (ADD x y) 525 for { 526 _ = v.Args[1] 527 x := v.Args[0] 528 y := v.Args[1] 529 v.reset(OpMIPSADD) 530 v.AddArg(x) 531 v.AddArg(y) 532 return true 533 } 534 } 535 func rewriteValueMIPS_OpAdd32_0(v *Value) bool { 536 // match: (Add32 x y) 537 // cond: 538 // result: (ADD x y) 539 for { 540 _ = v.Args[1] 541 x := v.Args[0] 542 y := v.Args[1] 543 v.reset(OpMIPSADD) 544 v.AddArg(x) 545 v.AddArg(y) 546 return true 547 } 548 } 549 func rewriteValueMIPS_OpAdd32F_0(v *Value) bool { 550 // match: (Add32F x y) 551 // cond: 552 // result: (ADDF x y) 553 for { 554 _ = v.Args[1] 555 x := v.Args[0] 556 y := v.Args[1] 557 v.reset(OpMIPSADDF) 558 v.AddArg(x) 559 v.AddArg(y) 560 return true 561 } 562 } 563 func rewriteValueMIPS_OpAdd32withcarry_0(v *Value) bool { 564 b := v.Block 565 _ = b 566 // match: (Add32withcarry <t> x y c) 567 // cond: 568 // result: (ADD c (ADD <t> x y)) 569 for { 570 t := v.Type 571 _ = v.Args[2] 572 x := v.Args[0] 573 y := v.Args[1] 574 c := v.Args[2] 575 v.reset(OpMIPSADD) 576 v.AddArg(c) 577 v0 := b.NewValue0(v.Pos, OpMIPSADD, t) 578 v0.AddArg(x) 579 v0.AddArg(y) 580 v.AddArg(v0) 581 return true 582 } 583 } 584 func rewriteValueMIPS_OpAdd64F_0(v *Value) bool { 585 // match: (Add64F x y) 586 // cond: 587 // result: (ADDD x y) 588 for { 589 _ = v.Args[1] 590 x := v.Args[0] 591 y := v.Args[1] 592 v.reset(OpMIPSADDD) 593 v.AddArg(x) 594 v.AddArg(y) 595 return true 596 } 597 } 598 func rewriteValueMIPS_OpAdd8_0(v *Value) bool { 599 // match: (Add8 x y) 600 // cond: 601 // result: (ADD x y) 602 for { 603 _ = v.Args[1] 604 x := v.Args[0] 605 y := v.Args[1] 606 v.reset(OpMIPSADD) 607 v.AddArg(x) 608 v.AddArg(y) 609 return true 610 } 611 } 612 func rewriteValueMIPS_OpAddPtr_0(v *Value) bool { 613 // match: (AddPtr x y) 614 // cond: 615 // result: (ADD x y) 616 for { 617 _ = v.Args[1] 618 x := v.Args[0] 619 y := v.Args[1] 620 v.reset(OpMIPSADD) 621 v.AddArg(x) 622 v.AddArg(y) 623 return true 624 } 625 } 626 func rewriteValueMIPS_OpAddr_0(v *Value) bool { 627 // match: (Addr {sym} base) 628 // cond: 629 // result: (MOVWaddr {sym} base) 630 for { 631 sym := v.Aux 632 base := v.Args[0] 633 v.reset(OpMIPSMOVWaddr) 634 v.Aux = sym 635 v.AddArg(base) 636 return true 637 } 638 } 639 func rewriteValueMIPS_OpAnd16_0(v *Value) bool { 640 // match: (And16 x y) 641 // cond: 642 // result: (AND x y) 643 for { 644 _ = v.Args[1] 645 x := v.Args[0] 646 y := v.Args[1] 647 v.reset(OpMIPSAND) 648 v.AddArg(x) 649 v.AddArg(y) 650 return true 651 } 652 } 653 func rewriteValueMIPS_OpAnd32_0(v *Value) bool { 654 // match: (And32 x y) 655 // cond: 656 // result: (AND x y) 657 for { 658 _ = v.Args[1] 659 x := v.Args[0] 660 y := v.Args[1] 661 v.reset(OpMIPSAND) 662 v.AddArg(x) 663 v.AddArg(y) 664 return true 665 } 666 } 667 func rewriteValueMIPS_OpAnd8_0(v *Value) bool { 668 // match: (And8 x y) 669 // cond: 670 // result: (AND x y) 671 for { 672 _ = v.Args[1] 673 x := v.Args[0] 674 y := v.Args[1] 675 v.reset(OpMIPSAND) 676 v.AddArg(x) 677 v.AddArg(y) 678 return true 679 } 680 } 681 func rewriteValueMIPS_OpAndB_0(v *Value) bool { 682 // match: (AndB x y) 683 // cond: 684 // result: (AND x y) 685 for { 686 _ = v.Args[1] 687 x := v.Args[0] 688 y := v.Args[1] 689 v.reset(OpMIPSAND) 690 v.AddArg(x) 691 v.AddArg(y) 692 return true 693 } 694 } 695 func rewriteValueMIPS_OpAtomicAdd32_0(v *Value) bool { 696 // match: (AtomicAdd32 ptr val mem) 697 // cond: 698 // result: (LoweredAtomicAdd ptr val mem) 699 for { 700 _ = v.Args[2] 701 ptr := v.Args[0] 702 val := v.Args[1] 703 mem := v.Args[2] 704 v.reset(OpMIPSLoweredAtomicAdd) 705 v.AddArg(ptr) 706 v.AddArg(val) 707 v.AddArg(mem) 708 return true 709 } 710 } 711 func rewriteValueMIPS_OpAtomicAnd8_0(v *Value) bool { 712 b := v.Block 713 _ = b 714 config := b.Func.Config 715 _ = config 716 typ := &b.Func.Config.Types 717 _ = typ 718 // match: (AtomicAnd8 ptr val mem) 719 // cond: !config.BigEndian 720 // result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))))) mem) 721 for { 722 _ = v.Args[2] 723 ptr := v.Args[0] 724 val := v.Args[1] 725 mem := v.Args[2] 726 if !(!config.BigEndian) { 727 break 728 } 729 v.reset(OpMIPSLoweredAtomicAnd) 730 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 731 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 732 v1.AuxInt = ^3 733 v0.AddArg(v1) 734 v0.AddArg(ptr) 735 v.AddArg(v0) 736 v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32) 737 v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 738 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 739 v4.AddArg(val) 740 v3.AddArg(v4) 741 v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 742 v5.AuxInt = 3 743 v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 744 v6.AuxInt = 3 745 v6.AddArg(ptr) 746 v5.AddArg(v6) 747 v3.AddArg(v5) 748 v2.AddArg(v3) 749 v7 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32) 750 v7.AuxInt = 0 751 v8 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 752 v9 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 753 v9.AuxInt = 0xff 754 v8.AddArg(v9) 755 v10 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 756 v10.AuxInt = 3 757 v11 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 758 v11.AuxInt = 3 759 v11.AddArg(ptr) 760 v10.AddArg(v11) 761 v8.AddArg(v10) 762 v7.AddArg(v8) 763 v2.AddArg(v7) 764 v.AddArg(v2) 765 v.AddArg(mem) 766 return true 767 } 768 // match: (AtomicAnd8 ptr val mem) 769 // cond: config.BigEndian 770 // result: (LoweredAtomicAnd (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (OR <typ.UInt32> (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) (NORconst [0] <typ.UInt32> (SLL <typ.UInt32> (MOVWconst [0xff]) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))))) mem) 771 for { 772 _ = v.Args[2] 773 ptr := v.Args[0] 774 val := v.Args[1] 775 mem := v.Args[2] 776 if !(config.BigEndian) { 777 break 778 } 779 v.reset(OpMIPSLoweredAtomicAnd) 780 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 781 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 782 v1.AuxInt = ^3 783 v0.AddArg(v1) 784 v0.AddArg(ptr) 785 v.AddArg(v0) 786 v2 := b.NewValue0(v.Pos, OpMIPSOR, typ.UInt32) 787 v3 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 788 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 789 v4.AddArg(val) 790 v3.AddArg(v4) 791 v5 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 792 v5.AuxInt = 3 793 v6 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 794 v6.AuxInt = 3 795 v7 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32) 796 v7.AuxInt = 3 797 v7.AddArg(ptr) 798 v6.AddArg(v7) 799 v5.AddArg(v6) 800 v3.AddArg(v5) 801 v2.AddArg(v3) 802 v8 := b.NewValue0(v.Pos, OpMIPSNORconst, typ.UInt32) 803 v8.AuxInt = 0 804 v9 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 805 v10 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 806 v10.AuxInt = 0xff 807 v9.AddArg(v10) 808 v11 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 809 v11.AuxInt = 3 810 v12 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 811 v12.AuxInt = 3 812 v13 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32) 813 v13.AuxInt = 3 814 v13.AddArg(ptr) 815 v12.AddArg(v13) 816 v11.AddArg(v12) 817 v9.AddArg(v11) 818 v8.AddArg(v9) 819 v2.AddArg(v8) 820 v.AddArg(v2) 821 v.AddArg(mem) 822 return true 823 } 824 return false 825 } 826 func rewriteValueMIPS_OpAtomicCompareAndSwap32_0(v *Value) bool { 827 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 828 // cond: 829 // result: (LoweredAtomicCas ptr old new_ mem) 830 for { 831 _ = v.Args[3] 832 ptr := v.Args[0] 833 old := v.Args[1] 834 new_ := v.Args[2] 835 mem := v.Args[3] 836 v.reset(OpMIPSLoweredAtomicCas) 837 v.AddArg(ptr) 838 v.AddArg(old) 839 v.AddArg(new_) 840 v.AddArg(mem) 841 return true 842 } 843 } 844 func rewriteValueMIPS_OpAtomicExchange32_0(v *Value) bool { 845 // match: (AtomicExchange32 ptr val mem) 846 // cond: 847 // result: (LoweredAtomicExchange ptr val mem) 848 for { 849 _ = v.Args[2] 850 ptr := v.Args[0] 851 val := v.Args[1] 852 mem := v.Args[2] 853 v.reset(OpMIPSLoweredAtomicExchange) 854 v.AddArg(ptr) 855 v.AddArg(val) 856 v.AddArg(mem) 857 return true 858 } 859 } 860 func rewriteValueMIPS_OpAtomicLoad32_0(v *Value) bool { 861 // match: (AtomicLoad32 ptr mem) 862 // cond: 863 // result: (LoweredAtomicLoad ptr mem) 864 for { 865 _ = v.Args[1] 866 ptr := v.Args[0] 867 mem := v.Args[1] 868 v.reset(OpMIPSLoweredAtomicLoad) 869 v.AddArg(ptr) 870 v.AddArg(mem) 871 return true 872 } 873 } 874 func rewriteValueMIPS_OpAtomicLoadPtr_0(v *Value) bool { 875 // match: (AtomicLoadPtr ptr mem) 876 // cond: 877 // result: (LoweredAtomicLoad ptr mem) 878 for { 879 _ = v.Args[1] 880 ptr := v.Args[0] 881 mem := v.Args[1] 882 v.reset(OpMIPSLoweredAtomicLoad) 883 v.AddArg(ptr) 884 v.AddArg(mem) 885 return true 886 } 887 } 888 func rewriteValueMIPS_OpAtomicOr8_0(v *Value) bool { 889 b := v.Block 890 _ = b 891 config := b.Func.Config 892 _ = config 893 typ := &b.Func.Config.Types 894 _ = typ 895 // match: (AtomicOr8 ptr val mem) 896 // cond: !config.BigEndian 897 // result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] ptr))) mem) 898 for { 899 _ = v.Args[2] 900 ptr := v.Args[0] 901 val := v.Args[1] 902 mem := v.Args[2] 903 if !(!config.BigEndian) { 904 break 905 } 906 v.reset(OpMIPSLoweredAtomicOr) 907 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 908 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 909 v1.AuxInt = ^3 910 v0.AddArg(v1) 911 v0.AddArg(ptr) 912 v.AddArg(v0) 913 v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 914 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 915 v3.AddArg(val) 916 v2.AddArg(v3) 917 v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 918 v4.AuxInt = 3 919 v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 920 v5.AuxInt = 3 921 v5.AddArg(ptr) 922 v4.AddArg(v5) 923 v2.AddArg(v4) 924 v.AddArg(v2) 925 v.AddArg(mem) 926 return true 927 } 928 // match: (AtomicOr8 ptr val mem) 929 // cond: config.BigEndian 930 // result: (LoweredAtomicOr (AND <typ.UInt32Ptr> (MOVWconst [^3]) ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLconst <typ.UInt32> [3] (ANDconst <typ.UInt32> [3] (XORconst <typ.UInt32> [3] ptr)))) mem) 931 for { 932 _ = v.Args[2] 933 ptr := v.Args[0] 934 val := v.Args[1] 935 mem := v.Args[2] 936 if !(config.BigEndian) { 937 break 938 } 939 v.reset(OpMIPSLoweredAtomicOr) 940 v0 := b.NewValue0(v.Pos, OpMIPSAND, typ.UInt32Ptr) 941 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 942 v1.AuxInt = ^3 943 v0.AddArg(v1) 944 v0.AddArg(ptr) 945 v.AddArg(v0) 946 v2 := b.NewValue0(v.Pos, OpMIPSSLL, typ.UInt32) 947 v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 948 v3.AddArg(val) 949 v2.AddArg(v3) 950 v4 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 951 v4.AuxInt = 3 952 v5 := b.NewValue0(v.Pos, OpMIPSANDconst, typ.UInt32) 953 v5.AuxInt = 3 954 v6 := b.NewValue0(v.Pos, OpMIPSXORconst, typ.UInt32) 955 v6.AuxInt = 3 956 v6.AddArg(ptr) 957 v5.AddArg(v6) 958 v4.AddArg(v5) 959 v2.AddArg(v4) 960 v.AddArg(v2) 961 v.AddArg(mem) 962 return true 963 } 964 return false 965 } 966 func rewriteValueMIPS_OpAtomicStore32_0(v *Value) bool { 967 // match: (AtomicStore32 ptr val mem) 968 // cond: 969 // result: (LoweredAtomicStore ptr val mem) 970 for { 971 _ = v.Args[2] 972 ptr := v.Args[0] 973 val := v.Args[1] 974 mem := v.Args[2] 975 v.reset(OpMIPSLoweredAtomicStore) 976 v.AddArg(ptr) 977 v.AddArg(val) 978 v.AddArg(mem) 979 return true 980 } 981 } 982 func rewriteValueMIPS_OpAtomicStorePtrNoWB_0(v *Value) bool { 983 // match: (AtomicStorePtrNoWB ptr val mem) 984 // cond: 985 // result: (LoweredAtomicStore ptr val mem) 986 for { 987 _ = v.Args[2] 988 ptr := v.Args[0] 989 val := v.Args[1] 990 mem := v.Args[2] 991 v.reset(OpMIPSLoweredAtomicStore) 992 v.AddArg(ptr) 993 v.AddArg(val) 994 v.AddArg(mem) 995 return true 996 } 997 } 998 func rewriteValueMIPS_OpAvg32u_0(v *Value) bool { 999 b := v.Block 1000 _ = b 1001 // match: (Avg32u <t> x y) 1002 // cond: 1003 // result: (ADD (SRLconst <t> (SUB <t> x y) [1]) y) 1004 for { 1005 t := v.Type 1006 _ = v.Args[1] 1007 x := v.Args[0] 1008 y := v.Args[1] 1009 v.reset(OpMIPSADD) 1010 v0 := b.NewValue0(v.Pos, OpMIPSSRLconst, t) 1011 v0.AuxInt = 1 1012 v1 := b.NewValue0(v.Pos, OpMIPSSUB, t) 1013 v1.AddArg(x) 1014 v1.AddArg(y) 1015 v0.AddArg(v1) 1016 v.AddArg(v0) 1017 v.AddArg(y) 1018 return true 1019 } 1020 } 1021 func rewriteValueMIPS_OpBitLen32_0(v *Value) bool { 1022 b := v.Block 1023 _ = b 1024 typ := &b.Func.Config.Types 1025 _ = typ 1026 // match: (BitLen32 <t> x) 1027 // cond: 1028 // result: (SUB (MOVWconst [32]) (CLZ <t> x)) 1029 for { 1030 t := v.Type 1031 x := v.Args[0] 1032 v.reset(OpMIPSSUB) 1033 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1034 v0.AuxInt = 32 1035 v.AddArg(v0) 1036 v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t) 1037 v1.AddArg(x) 1038 v.AddArg(v1) 1039 return true 1040 } 1041 } 1042 func rewriteValueMIPS_OpClosureCall_0(v *Value) bool { 1043 // match: (ClosureCall [argwid] entry closure mem) 1044 // cond: 1045 // result: (CALLclosure [argwid] entry closure mem) 1046 for { 1047 argwid := v.AuxInt 1048 _ = v.Args[2] 1049 entry := v.Args[0] 1050 closure := v.Args[1] 1051 mem := v.Args[2] 1052 v.reset(OpMIPSCALLclosure) 1053 v.AuxInt = argwid 1054 v.AddArg(entry) 1055 v.AddArg(closure) 1056 v.AddArg(mem) 1057 return true 1058 } 1059 } 1060 func rewriteValueMIPS_OpCom16_0(v *Value) bool { 1061 // match: (Com16 x) 1062 // cond: 1063 // result: (NORconst [0] x) 1064 for { 1065 x := v.Args[0] 1066 v.reset(OpMIPSNORconst) 1067 v.AuxInt = 0 1068 v.AddArg(x) 1069 return true 1070 } 1071 } 1072 func rewriteValueMIPS_OpCom32_0(v *Value) bool { 1073 // match: (Com32 x) 1074 // cond: 1075 // result: (NORconst [0] x) 1076 for { 1077 x := v.Args[0] 1078 v.reset(OpMIPSNORconst) 1079 v.AuxInt = 0 1080 v.AddArg(x) 1081 return true 1082 } 1083 } 1084 func rewriteValueMIPS_OpCom8_0(v *Value) bool { 1085 // match: (Com8 x) 1086 // cond: 1087 // result: (NORconst [0] x) 1088 for { 1089 x := v.Args[0] 1090 v.reset(OpMIPSNORconst) 1091 v.AuxInt = 0 1092 v.AddArg(x) 1093 return true 1094 } 1095 } 1096 func rewriteValueMIPS_OpConst16_0(v *Value) bool { 1097 // match: (Const16 [val]) 1098 // cond: 1099 // result: (MOVWconst [val]) 1100 for { 1101 val := v.AuxInt 1102 v.reset(OpMIPSMOVWconst) 1103 v.AuxInt = val 1104 return true 1105 } 1106 } 1107 func rewriteValueMIPS_OpConst32_0(v *Value) bool { 1108 // match: (Const32 [val]) 1109 // cond: 1110 // result: (MOVWconst [val]) 1111 for { 1112 val := v.AuxInt 1113 v.reset(OpMIPSMOVWconst) 1114 v.AuxInt = val 1115 return true 1116 } 1117 } 1118 func rewriteValueMIPS_OpConst32F_0(v *Value) bool { 1119 // match: (Const32F [val]) 1120 // cond: 1121 // result: (MOVFconst [val]) 1122 for { 1123 val := v.AuxInt 1124 v.reset(OpMIPSMOVFconst) 1125 v.AuxInt = val 1126 return true 1127 } 1128 } 1129 func rewriteValueMIPS_OpConst64F_0(v *Value) bool { 1130 // match: (Const64F [val]) 1131 // cond: 1132 // result: (MOVDconst [val]) 1133 for { 1134 val := v.AuxInt 1135 v.reset(OpMIPSMOVDconst) 1136 v.AuxInt = val 1137 return true 1138 } 1139 } 1140 func rewriteValueMIPS_OpConst8_0(v *Value) bool { 1141 // match: (Const8 [val]) 1142 // cond: 1143 // result: (MOVWconst [val]) 1144 for { 1145 val := v.AuxInt 1146 v.reset(OpMIPSMOVWconst) 1147 v.AuxInt = val 1148 return true 1149 } 1150 } 1151 func rewriteValueMIPS_OpConstBool_0(v *Value) bool { 1152 // match: (ConstBool [b]) 1153 // cond: 1154 // result: (MOVWconst [b]) 1155 for { 1156 b := v.AuxInt 1157 v.reset(OpMIPSMOVWconst) 1158 v.AuxInt = b 1159 return true 1160 } 1161 } 1162 func rewriteValueMIPS_OpConstNil_0(v *Value) bool { 1163 // match: (ConstNil) 1164 // cond: 1165 // result: (MOVWconst [0]) 1166 for { 1167 v.reset(OpMIPSMOVWconst) 1168 v.AuxInt = 0 1169 return true 1170 } 1171 } 1172 func rewriteValueMIPS_OpCtz32_0(v *Value) bool { 1173 b := v.Block 1174 _ = b 1175 typ := &b.Func.Config.Types 1176 _ = typ 1177 // match: (Ctz32 <t> x) 1178 // cond: 1179 // result: (SUB (MOVWconst [32]) (CLZ <t> (SUBconst <t> [1] (AND <t> x (NEG <t> x))))) 1180 for { 1181 t := v.Type 1182 x := v.Args[0] 1183 v.reset(OpMIPSSUB) 1184 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 1185 v0.AuxInt = 32 1186 v.AddArg(v0) 1187 v1 := b.NewValue0(v.Pos, OpMIPSCLZ, t) 1188 v2 := b.NewValue0(v.Pos, OpMIPSSUBconst, t) 1189 v2.AuxInt = 1 1190 v3 := b.NewValue0(v.Pos, OpMIPSAND, t) 1191 v3.AddArg(x) 1192 v4 := b.NewValue0(v.Pos, OpMIPSNEG, t) 1193 v4.AddArg(x) 1194 v3.AddArg(v4) 1195 v2.AddArg(v3) 1196 v1.AddArg(v2) 1197 v.AddArg(v1) 1198 return true 1199 } 1200 } 1201 func rewriteValueMIPS_OpCtz32NonZero_0(v *Value) bool { 1202 // match: (Ctz32NonZero x) 1203 // cond: 1204 // result: (Ctz32 x) 1205 for { 1206 x := v.Args[0] 1207 v.reset(OpCtz32) 1208 v.AddArg(x) 1209 return true 1210 } 1211 } 1212 func rewriteValueMIPS_OpCvt32Fto32_0(v *Value) bool { 1213 // match: (Cvt32Fto32 x) 1214 // cond: 1215 // result: (TRUNCFW x) 1216 for { 1217 x := v.Args[0] 1218 v.reset(OpMIPSTRUNCFW) 1219 v.AddArg(x) 1220 return true 1221 } 1222 } 1223 func rewriteValueMIPS_OpCvt32Fto64F_0(v *Value) bool { 1224 // match: (Cvt32Fto64F x) 1225 // cond: 1226 // result: (MOVFD x) 1227 for { 1228 x := v.Args[0] 1229 v.reset(OpMIPSMOVFD) 1230 v.AddArg(x) 1231 return true 1232 } 1233 } 1234 func rewriteValueMIPS_OpCvt32to32F_0(v *Value) bool { 1235 // match: (Cvt32to32F x) 1236 // cond: 1237 // result: (MOVWF x) 1238 for { 1239 x := v.Args[0] 1240 v.reset(OpMIPSMOVWF) 1241 v.AddArg(x) 1242 return true 1243 } 1244 } 1245 func rewriteValueMIPS_OpCvt32to64F_0(v *Value) bool { 1246 // match: (Cvt32to64F x) 1247 // cond: 1248 // result: (MOVWD x) 1249 for { 1250 x := v.Args[0] 1251 v.reset(OpMIPSMOVWD) 1252 v.AddArg(x) 1253 return true 1254 } 1255 } 1256 func rewriteValueMIPS_OpCvt64Fto32_0(v *Value) bool { 1257 // match: (Cvt64Fto32 x) 1258 // cond: 1259 // result: (TRUNCDW x) 1260 for { 1261 x := v.Args[0] 1262 v.reset(OpMIPSTRUNCDW) 1263 v.AddArg(x) 1264 return true 1265 } 1266 } 1267 func rewriteValueMIPS_OpCvt64Fto32F_0(v *Value) bool { 1268 // match: (Cvt64Fto32F x) 1269 // cond: 1270 // result: (MOVDF x) 1271 for { 1272 x := v.Args[0] 1273 v.reset(OpMIPSMOVDF) 1274 v.AddArg(x) 1275 return true 1276 } 1277 } 1278 func rewriteValueMIPS_OpDiv16_0(v *Value) bool { 1279 b := v.Block 1280 _ = b 1281 typ := &b.Func.Config.Types 1282 _ = typ 1283 // match: (Div16 x y) 1284 // cond: 1285 // result: (Select1 (DIV (SignExt16to32 x) (SignExt16to32 y))) 1286 for { 1287 _ = v.Args[1] 1288 x := v.Args[0] 1289 y := v.Args[1] 1290 v.reset(OpSelect1) 1291 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 1292 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1293 v1.AddArg(x) 1294 v0.AddArg(v1) 1295 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1296 v2.AddArg(y) 1297 v0.AddArg(v2) 1298 v.AddArg(v0) 1299 return true 1300 } 1301 } 1302 func rewriteValueMIPS_OpDiv16u_0(v *Value) bool { 1303 b := v.Block 1304 _ = b 1305 typ := &b.Func.Config.Types 1306 _ = typ 1307 // match: (Div16u x y) 1308 // cond: 1309 // result: (Select1 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y))) 1310 for { 1311 _ = v.Args[1] 1312 x := v.Args[0] 1313 y := v.Args[1] 1314 v.reset(OpSelect1) 1315 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 1316 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1317 v1.AddArg(x) 1318 v0.AddArg(v1) 1319 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1320 v2.AddArg(y) 1321 v0.AddArg(v2) 1322 v.AddArg(v0) 1323 return true 1324 } 1325 } 1326 func rewriteValueMIPS_OpDiv32_0(v *Value) bool { 1327 b := v.Block 1328 _ = b 1329 typ := &b.Func.Config.Types 1330 _ = typ 1331 // match: (Div32 x y) 1332 // cond: 1333 // result: (Select1 (DIV x y)) 1334 for { 1335 _ = v.Args[1] 1336 x := v.Args[0] 1337 y := v.Args[1] 1338 v.reset(OpSelect1) 1339 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 1340 v0.AddArg(x) 1341 v0.AddArg(y) 1342 v.AddArg(v0) 1343 return true 1344 } 1345 } 1346 func rewriteValueMIPS_OpDiv32F_0(v *Value) bool { 1347 // match: (Div32F x y) 1348 // cond: 1349 // result: (DIVF x y) 1350 for { 1351 _ = v.Args[1] 1352 x := v.Args[0] 1353 y := v.Args[1] 1354 v.reset(OpMIPSDIVF) 1355 v.AddArg(x) 1356 v.AddArg(y) 1357 return true 1358 } 1359 } 1360 func rewriteValueMIPS_OpDiv32u_0(v *Value) bool { 1361 b := v.Block 1362 _ = b 1363 typ := &b.Func.Config.Types 1364 _ = typ 1365 // match: (Div32u x y) 1366 // cond: 1367 // result: (Select1 (DIVU x y)) 1368 for { 1369 _ = v.Args[1] 1370 x := v.Args[0] 1371 y := v.Args[1] 1372 v.reset(OpSelect1) 1373 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 1374 v0.AddArg(x) 1375 v0.AddArg(y) 1376 v.AddArg(v0) 1377 return true 1378 } 1379 } 1380 func rewriteValueMIPS_OpDiv64F_0(v *Value) bool { 1381 // match: (Div64F x y) 1382 // cond: 1383 // result: (DIVD x y) 1384 for { 1385 _ = v.Args[1] 1386 x := v.Args[0] 1387 y := v.Args[1] 1388 v.reset(OpMIPSDIVD) 1389 v.AddArg(x) 1390 v.AddArg(y) 1391 return true 1392 } 1393 } 1394 func rewriteValueMIPS_OpDiv8_0(v *Value) bool { 1395 b := v.Block 1396 _ = b 1397 typ := &b.Func.Config.Types 1398 _ = typ 1399 // match: (Div8 x y) 1400 // cond: 1401 // result: (Select1 (DIV (SignExt8to32 x) (SignExt8to32 y))) 1402 for { 1403 _ = v.Args[1] 1404 x := v.Args[0] 1405 y := v.Args[1] 1406 v.reset(OpSelect1) 1407 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 1408 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1409 v1.AddArg(x) 1410 v0.AddArg(v1) 1411 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1412 v2.AddArg(y) 1413 v0.AddArg(v2) 1414 v.AddArg(v0) 1415 return true 1416 } 1417 } 1418 func rewriteValueMIPS_OpDiv8u_0(v *Value) bool { 1419 b := v.Block 1420 _ = b 1421 typ := &b.Func.Config.Types 1422 _ = typ 1423 // match: (Div8u x y) 1424 // cond: 1425 // result: (Select1 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y))) 1426 for { 1427 _ = v.Args[1] 1428 x := v.Args[0] 1429 y := v.Args[1] 1430 v.reset(OpSelect1) 1431 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 1432 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1433 v1.AddArg(x) 1434 v0.AddArg(v1) 1435 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1436 v2.AddArg(y) 1437 v0.AddArg(v2) 1438 v.AddArg(v0) 1439 return true 1440 } 1441 } 1442 func rewriteValueMIPS_OpEq16_0(v *Value) bool { 1443 b := v.Block 1444 _ = b 1445 typ := &b.Func.Config.Types 1446 _ = typ 1447 // match: (Eq16 x y) 1448 // cond: 1449 // result: (SGTUconst [1] (XOR (ZeroExt16to32 x) (ZeroExt16to32 y))) 1450 for { 1451 _ = v.Args[1] 1452 x := v.Args[0] 1453 y := v.Args[1] 1454 v.reset(OpMIPSSGTUconst) 1455 v.AuxInt = 1 1456 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1457 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1458 v1.AddArg(x) 1459 v0.AddArg(v1) 1460 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1461 v2.AddArg(y) 1462 v0.AddArg(v2) 1463 v.AddArg(v0) 1464 return true 1465 } 1466 } 1467 func rewriteValueMIPS_OpEq32_0(v *Value) bool { 1468 b := v.Block 1469 _ = b 1470 typ := &b.Func.Config.Types 1471 _ = typ 1472 // match: (Eq32 x y) 1473 // cond: 1474 // result: (SGTUconst [1] (XOR x y)) 1475 for { 1476 _ = v.Args[1] 1477 x := v.Args[0] 1478 y := v.Args[1] 1479 v.reset(OpMIPSSGTUconst) 1480 v.AuxInt = 1 1481 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1482 v0.AddArg(x) 1483 v0.AddArg(y) 1484 v.AddArg(v0) 1485 return true 1486 } 1487 } 1488 func rewriteValueMIPS_OpEq32F_0(v *Value) bool { 1489 b := v.Block 1490 _ = b 1491 // match: (Eq32F x y) 1492 // cond: 1493 // result: (FPFlagTrue (CMPEQF x y)) 1494 for { 1495 _ = v.Args[1] 1496 x := v.Args[0] 1497 y := v.Args[1] 1498 v.reset(OpMIPSFPFlagTrue) 1499 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags) 1500 v0.AddArg(x) 1501 v0.AddArg(y) 1502 v.AddArg(v0) 1503 return true 1504 } 1505 } 1506 func rewriteValueMIPS_OpEq64F_0(v *Value) bool { 1507 b := v.Block 1508 _ = b 1509 // match: (Eq64F x y) 1510 // cond: 1511 // result: (FPFlagTrue (CMPEQD x y)) 1512 for { 1513 _ = v.Args[1] 1514 x := v.Args[0] 1515 y := v.Args[1] 1516 v.reset(OpMIPSFPFlagTrue) 1517 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags) 1518 v0.AddArg(x) 1519 v0.AddArg(y) 1520 v.AddArg(v0) 1521 return true 1522 } 1523 } 1524 func rewriteValueMIPS_OpEq8_0(v *Value) bool { 1525 b := v.Block 1526 _ = b 1527 typ := &b.Func.Config.Types 1528 _ = typ 1529 // match: (Eq8 x y) 1530 // cond: 1531 // result: (SGTUconst [1] (XOR (ZeroExt8to32 x) (ZeroExt8to32 y))) 1532 for { 1533 _ = v.Args[1] 1534 x := v.Args[0] 1535 y := v.Args[1] 1536 v.reset(OpMIPSSGTUconst) 1537 v.AuxInt = 1 1538 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1539 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1540 v1.AddArg(x) 1541 v0.AddArg(v1) 1542 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1543 v2.AddArg(y) 1544 v0.AddArg(v2) 1545 v.AddArg(v0) 1546 return true 1547 } 1548 } 1549 func rewriteValueMIPS_OpEqB_0(v *Value) bool { 1550 b := v.Block 1551 _ = b 1552 typ := &b.Func.Config.Types 1553 _ = typ 1554 // match: (EqB x y) 1555 // cond: 1556 // result: (XORconst [1] (XOR <typ.Bool> x y)) 1557 for { 1558 _ = v.Args[1] 1559 x := v.Args[0] 1560 y := v.Args[1] 1561 v.reset(OpMIPSXORconst) 1562 v.AuxInt = 1 1563 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.Bool) 1564 v0.AddArg(x) 1565 v0.AddArg(y) 1566 v.AddArg(v0) 1567 return true 1568 } 1569 } 1570 func rewriteValueMIPS_OpEqPtr_0(v *Value) bool { 1571 b := v.Block 1572 _ = b 1573 typ := &b.Func.Config.Types 1574 _ = typ 1575 // match: (EqPtr x y) 1576 // cond: 1577 // result: (SGTUconst [1] (XOR x y)) 1578 for { 1579 _ = v.Args[1] 1580 x := v.Args[0] 1581 y := v.Args[1] 1582 v.reset(OpMIPSSGTUconst) 1583 v.AuxInt = 1 1584 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 1585 v0.AddArg(x) 1586 v0.AddArg(y) 1587 v.AddArg(v0) 1588 return true 1589 } 1590 } 1591 func rewriteValueMIPS_OpGeq16_0(v *Value) bool { 1592 b := v.Block 1593 _ = b 1594 typ := &b.Func.Config.Types 1595 _ = typ 1596 // match: (Geq16 x y) 1597 // cond: 1598 // result: (XORconst [1] (SGT (SignExt16to32 y) (SignExt16to32 x))) 1599 for { 1600 _ = v.Args[1] 1601 x := v.Args[0] 1602 y := v.Args[1] 1603 v.reset(OpMIPSXORconst) 1604 v.AuxInt = 1 1605 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 1606 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1607 v1.AddArg(y) 1608 v0.AddArg(v1) 1609 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1610 v2.AddArg(x) 1611 v0.AddArg(v2) 1612 v.AddArg(v0) 1613 return true 1614 } 1615 } 1616 func rewriteValueMIPS_OpGeq16U_0(v *Value) bool { 1617 b := v.Block 1618 _ = b 1619 typ := &b.Func.Config.Types 1620 _ = typ 1621 // match: (Geq16U x y) 1622 // cond: 1623 // result: (XORconst [1] (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x))) 1624 for { 1625 _ = v.Args[1] 1626 x := v.Args[0] 1627 y := v.Args[1] 1628 v.reset(OpMIPSXORconst) 1629 v.AuxInt = 1 1630 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1631 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1632 v1.AddArg(y) 1633 v0.AddArg(v1) 1634 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1635 v2.AddArg(x) 1636 v0.AddArg(v2) 1637 v.AddArg(v0) 1638 return true 1639 } 1640 } 1641 func rewriteValueMIPS_OpGeq32_0(v *Value) bool { 1642 b := v.Block 1643 _ = b 1644 typ := &b.Func.Config.Types 1645 _ = typ 1646 // match: (Geq32 x y) 1647 // cond: 1648 // result: (XORconst [1] (SGT y x)) 1649 for { 1650 _ = v.Args[1] 1651 x := v.Args[0] 1652 y := v.Args[1] 1653 v.reset(OpMIPSXORconst) 1654 v.AuxInt = 1 1655 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 1656 v0.AddArg(y) 1657 v0.AddArg(x) 1658 v.AddArg(v0) 1659 return true 1660 } 1661 } 1662 func rewriteValueMIPS_OpGeq32F_0(v *Value) bool { 1663 b := v.Block 1664 _ = b 1665 // match: (Geq32F x y) 1666 // cond: 1667 // result: (FPFlagTrue (CMPGEF x y)) 1668 for { 1669 _ = v.Args[1] 1670 x := v.Args[0] 1671 y := v.Args[1] 1672 v.reset(OpMIPSFPFlagTrue) 1673 v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags) 1674 v0.AddArg(x) 1675 v0.AddArg(y) 1676 v.AddArg(v0) 1677 return true 1678 } 1679 } 1680 func rewriteValueMIPS_OpGeq32U_0(v *Value) bool { 1681 b := v.Block 1682 _ = b 1683 typ := &b.Func.Config.Types 1684 _ = typ 1685 // match: (Geq32U x y) 1686 // cond: 1687 // result: (XORconst [1] (SGTU y x)) 1688 for { 1689 _ = v.Args[1] 1690 x := v.Args[0] 1691 y := v.Args[1] 1692 v.reset(OpMIPSXORconst) 1693 v.AuxInt = 1 1694 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1695 v0.AddArg(y) 1696 v0.AddArg(x) 1697 v.AddArg(v0) 1698 return true 1699 } 1700 } 1701 func rewriteValueMIPS_OpGeq64F_0(v *Value) bool { 1702 b := v.Block 1703 _ = b 1704 // match: (Geq64F x y) 1705 // cond: 1706 // result: (FPFlagTrue (CMPGED x y)) 1707 for { 1708 _ = v.Args[1] 1709 x := v.Args[0] 1710 y := v.Args[1] 1711 v.reset(OpMIPSFPFlagTrue) 1712 v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags) 1713 v0.AddArg(x) 1714 v0.AddArg(y) 1715 v.AddArg(v0) 1716 return true 1717 } 1718 } 1719 func rewriteValueMIPS_OpGeq8_0(v *Value) bool { 1720 b := v.Block 1721 _ = b 1722 typ := &b.Func.Config.Types 1723 _ = typ 1724 // match: (Geq8 x y) 1725 // cond: 1726 // result: (XORconst [1] (SGT (SignExt8to32 y) (SignExt8to32 x))) 1727 for { 1728 _ = v.Args[1] 1729 x := v.Args[0] 1730 y := v.Args[1] 1731 v.reset(OpMIPSXORconst) 1732 v.AuxInt = 1 1733 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 1734 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1735 v1.AddArg(y) 1736 v0.AddArg(v1) 1737 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1738 v2.AddArg(x) 1739 v0.AddArg(v2) 1740 v.AddArg(v0) 1741 return true 1742 } 1743 } 1744 func rewriteValueMIPS_OpGeq8U_0(v *Value) bool { 1745 b := v.Block 1746 _ = b 1747 typ := &b.Func.Config.Types 1748 _ = typ 1749 // match: (Geq8U x y) 1750 // cond: 1751 // result: (XORconst [1] (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x))) 1752 for { 1753 _ = v.Args[1] 1754 x := v.Args[0] 1755 y := v.Args[1] 1756 v.reset(OpMIPSXORconst) 1757 v.AuxInt = 1 1758 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 1759 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1760 v1.AddArg(y) 1761 v0.AddArg(v1) 1762 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1763 v2.AddArg(x) 1764 v0.AddArg(v2) 1765 v.AddArg(v0) 1766 return true 1767 } 1768 } 1769 func rewriteValueMIPS_OpGetCallerPC_0(v *Value) bool { 1770 // match: (GetCallerPC) 1771 // cond: 1772 // result: (LoweredGetCallerPC) 1773 for { 1774 v.reset(OpMIPSLoweredGetCallerPC) 1775 return true 1776 } 1777 } 1778 func rewriteValueMIPS_OpGetCallerSP_0(v *Value) bool { 1779 // match: (GetCallerSP) 1780 // cond: 1781 // result: (LoweredGetCallerSP) 1782 for { 1783 v.reset(OpMIPSLoweredGetCallerSP) 1784 return true 1785 } 1786 } 1787 func rewriteValueMIPS_OpGetClosurePtr_0(v *Value) bool { 1788 // match: (GetClosurePtr) 1789 // cond: 1790 // result: (LoweredGetClosurePtr) 1791 for { 1792 v.reset(OpMIPSLoweredGetClosurePtr) 1793 return true 1794 } 1795 } 1796 func rewriteValueMIPS_OpGreater16_0(v *Value) bool { 1797 b := v.Block 1798 _ = b 1799 typ := &b.Func.Config.Types 1800 _ = typ 1801 // match: (Greater16 x y) 1802 // cond: 1803 // result: (SGT (SignExt16to32 x) (SignExt16to32 y)) 1804 for { 1805 _ = v.Args[1] 1806 x := v.Args[0] 1807 y := v.Args[1] 1808 v.reset(OpMIPSSGT) 1809 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1810 v0.AddArg(x) 1811 v.AddArg(v0) 1812 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 1813 v1.AddArg(y) 1814 v.AddArg(v1) 1815 return true 1816 } 1817 } 1818 func rewriteValueMIPS_OpGreater16U_0(v *Value) bool { 1819 b := v.Block 1820 _ = b 1821 typ := &b.Func.Config.Types 1822 _ = typ 1823 // match: (Greater16U x y) 1824 // cond: 1825 // result: (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y)) 1826 for { 1827 _ = v.Args[1] 1828 x := v.Args[0] 1829 y := v.Args[1] 1830 v.reset(OpMIPSSGTU) 1831 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1832 v0.AddArg(x) 1833 v.AddArg(v0) 1834 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 1835 v1.AddArg(y) 1836 v.AddArg(v1) 1837 return true 1838 } 1839 } 1840 func rewriteValueMIPS_OpGreater32_0(v *Value) bool { 1841 // match: (Greater32 x y) 1842 // cond: 1843 // result: (SGT x y) 1844 for { 1845 _ = v.Args[1] 1846 x := v.Args[0] 1847 y := v.Args[1] 1848 v.reset(OpMIPSSGT) 1849 v.AddArg(x) 1850 v.AddArg(y) 1851 return true 1852 } 1853 } 1854 func rewriteValueMIPS_OpGreater32F_0(v *Value) bool { 1855 b := v.Block 1856 _ = b 1857 // match: (Greater32F x y) 1858 // cond: 1859 // result: (FPFlagTrue (CMPGTF x y)) 1860 for { 1861 _ = v.Args[1] 1862 x := v.Args[0] 1863 y := v.Args[1] 1864 v.reset(OpMIPSFPFlagTrue) 1865 v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags) 1866 v0.AddArg(x) 1867 v0.AddArg(y) 1868 v.AddArg(v0) 1869 return true 1870 } 1871 } 1872 func rewriteValueMIPS_OpGreater32U_0(v *Value) bool { 1873 // match: (Greater32U x y) 1874 // cond: 1875 // result: (SGTU x y) 1876 for { 1877 _ = v.Args[1] 1878 x := v.Args[0] 1879 y := v.Args[1] 1880 v.reset(OpMIPSSGTU) 1881 v.AddArg(x) 1882 v.AddArg(y) 1883 return true 1884 } 1885 } 1886 func rewriteValueMIPS_OpGreater64F_0(v *Value) bool { 1887 b := v.Block 1888 _ = b 1889 // match: (Greater64F x y) 1890 // cond: 1891 // result: (FPFlagTrue (CMPGTD x y)) 1892 for { 1893 _ = v.Args[1] 1894 x := v.Args[0] 1895 y := v.Args[1] 1896 v.reset(OpMIPSFPFlagTrue) 1897 v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags) 1898 v0.AddArg(x) 1899 v0.AddArg(y) 1900 v.AddArg(v0) 1901 return true 1902 } 1903 } 1904 func rewriteValueMIPS_OpGreater8_0(v *Value) bool { 1905 b := v.Block 1906 _ = b 1907 typ := &b.Func.Config.Types 1908 _ = typ 1909 // match: (Greater8 x y) 1910 // cond: 1911 // result: (SGT (SignExt8to32 x) (SignExt8to32 y)) 1912 for { 1913 _ = v.Args[1] 1914 x := v.Args[0] 1915 y := v.Args[1] 1916 v.reset(OpMIPSSGT) 1917 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1918 v0.AddArg(x) 1919 v.AddArg(v0) 1920 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 1921 v1.AddArg(y) 1922 v.AddArg(v1) 1923 return true 1924 } 1925 } 1926 func rewriteValueMIPS_OpGreater8U_0(v *Value) bool { 1927 b := v.Block 1928 _ = b 1929 typ := &b.Func.Config.Types 1930 _ = typ 1931 // match: (Greater8U x y) 1932 // cond: 1933 // result: (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y)) 1934 for { 1935 _ = v.Args[1] 1936 x := v.Args[0] 1937 y := v.Args[1] 1938 v.reset(OpMIPSSGTU) 1939 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1940 v0.AddArg(x) 1941 v.AddArg(v0) 1942 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 1943 v1.AddArg(y) 1944 v.AddArg(v1) 1945 return true 1946 } 1947 } 1948 func rewriteValueMIPS_OpHmul32_0(v *Value) bool { 1949 b := v.Block 1950 _ = b 1951 typ := &b.Func.Config.Types 1952 _ = typ 1953 // match: (Hmul32 x y) 1954 // cond: 1955 // result: (Select0 (MULT x y)) 1956 for { 1957 _ = v.Args[1] 1958 x := v.Args[0] 1959 y := v.Args[1] 1960 v.reset(OpSelect0) 1961 v0 := b.NewValue0(v.Pos, OpMIPSMULT, types.NewTuple(typ.Int32, typ.Int32)) 1962 v0.AddArg(x) 1963 v0.AddArg(y) 1964 v.AddArg(v0) 1965 return true 1966 } 1967 } 1968 func rewriteValueMIPS_OpHmul32u_0(v *Value) bool { 1969 b := v.Block 1970 _ = b 1971 typ := &b.Func.Config.Types 1972 _ = typ 1973 // match: (Hmul32u x y) 1974 // cond: 1975 // result: (Select0 (MULTU x y)) 1976 for { 1977 _ = v.Args[1] 1978 x := v.Args[0] 1979 y := v.Args[1] 1980 v.reset(OpSelect0) 1981 v0 := b.NewValue0(v.Pos, OpMIPSMULTU, types.NewTuple(typ.UInt32, typ.UInt32)) 1982 v0.AddArg(x) 1983 v0.AddArg(y) 1984 v.AddArg(v0) 1985 return true 1986 } 1987 } 1988 func rewriteValueMIPS_OpInterCall_0(v *Value) bool { 1989 // match: (InterCall [argwid] entry mem) 1990 // cond: 1991 // result: (CALLinter [argwid] entry mem) 1992 for { 1993 argwid := v.AuxInt 1994 _ = v.Args[1] 1995 entry := v.Args[0] 1996 mem := v.Args[1] 1997 v.reset(OpMIPSCALLinter) 1998 v.AuxInt = argwid 1999 v.AddArg(entry) 2000 v.AddArg(mem) 2001 return true 2002 } 2003 } 2004 func rewriteValueMIPS_OpIsInBounds_0(v *Value) bool { 2005 // match: (IsInBounds idx len) 2006 // cond: 2007 // result: (SGTU len idx) 2008 for { 2009 _ = v.Args[1] 2010 idx := v.Args[0] 2011 len := v.Args[1] 2012 v.reset(OpMIPSSGTU) 2013 v.AddArg(len) 2014 v.AddArg(idx) 2015 return true 2016 } 2017 } 2018 func rewriteValueMIPS_OpIsNonNil_0(v *Value) bool { 2019 b := v.Block 2020 _ = b 2021 typ := &b.Func.Config.Types 2022 _ = typ 2023 // match: (IsNonNil ptr) 2024 // cond: 2025 // result: (SGTU ptr (MOVWconst [0])) 2026 for { 2027 ptr := v.Args[0] 2028 v.reset(OpMIPSSGTU) 2029 v.AddArg(ptr) 2030 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2031 v0.AuxInt = 0 2032 v.AddArg(v0) 2033 return true 2034 } 2035 } 2036 func rewriteValueMIPS_OpIsSliceInBounds_0(v *Value) bool { 2037 b := v.Block 2038 _ = b 2039 typ := &b.Func.Config.Types 2040 _ = typ 2041 // match: (IsSliceInBounds idx len) 2042 // cond: 2043 // result: (XORconst [1] (SGTU idx len)) 2044 for { 2045 _ = v.Args[1] 2046 idx := v.Args[0] 2047 len := v.Args[1] 2048 v.reset(OpMIPSXORconst) 2049 v.AuxInt = 1 2050 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 2051 v0.AddArg(idx) 2052 v0.AddArg(len) 2053 v.AddArg(v0) 2054 return true 2055 } 2056 } 2057 func rewriteValueMIPS_OpLeq16_0(v *Value) bool { 2058 b := v.Block 2059 _ = b 2060 typ := &b.Func.Config.Types 2061 _ = typ 2062 // match: (Leq16 x y) 2063 // cond: 2064 // result: (XORconst [1] (SGT (SignExt16to32 x) (SignExt16to32 y))) 2065 for { 2066 _ = v.Args[1] 2067 x := v.Args[0] 2068 y := v.Args[1] 2069 v.reset(OpMIPSXORconst) 2070 v.AuxInt = 1 2071 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 2072 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 2073 v1.AddArg(x) 2074 v0.AddArg(v1) 2075 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 2076 v2.AddArg(y) 2077 v0.AddArg(v2) 2078 v.AddArg(v0) 2079 return true 2080 } 2081 } 2082 func rewriteValueMIPS_OpLeq16U_0(v *Value) bool { 2083 b := v.Block 2084 _ = b 2085 typ := &b.Func.Config.Types 2086 _ = typ 2087 // match: (Leq16U x y) 2088 // cond: 2089 // result: (XORconst [1] (SGTU (ZeroExt16to32 x) (ZeroExt16to32 y))) 2090 for { 2091 _ = v.Args[1] 2092 x := v.Args[0] 2093 y := v.Args[1] 2094 v.reset(OpMIPSXORconst) 2095 v.AuxInt = 1 2096 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 2097 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2098 v1.AddArg(x) 2099 v0.AddArg(v1) 2100 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2101 v2.AddArg(y) 2102 v0.AddArg(v2) 2103 v.AddArg(v0) 2104 return true 2105 } 2106 } 2107 func rewriteValueMIPS_OpLeq32_0(v *Value) bool { 2108 b := v.Block 2109 _ = b 2110 typ := &b.Func.Config.Types 2111 _ = typ 2112 // match: (Leq32 x y) 2113 // cond: 2114 // result: (XORconst [1] (SGT x y)) 2115 for { 2116 _ = v.Args[1] 2117 x := v.Args[0] 2118 y := v.Args[1] 2119 v.reset(OpMIPSXORconst) 2120 v.AuxInt = 1 2121 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 2122 v0.AddArg(x) 2123 v0.AddArg(y) 2124 v.AddArg(v0) 2125 return true 2126 } 2127 } 2128 func rewriteValueMIPS_OpLeq32F_0(v *Value) bool { 2129 b := v.Block 2130 _ = b 2131 // match: (Leq32F x y) 2132 // cond: 2133 // result: (FPFlagTrue (CMPGEF y x)) 2134 for { 2135 _ = v.Args[1] 2136 x := v.Args[0] 2137 y := v.Args[1] 2138 v.reset(OpMIPSFPFlagTrue) 2139 v0 := b.NewValue0(v.Pos, OpMIPSCMPGEF, types.TypeFlags) 2140 v0.AddArg(y) 2141 v0.AddArg(x) 2142 v.AddArg(v0) 2143 return true 2144 } 2145 } 2146 func rewriteValueMIPS_OpLeq32U_0(v *Value) bool { 2147 b := v.Block 2148 _ = b 2149 typ := &b.Func.Config.Types 2150 _ = typ 2151 // match: (Leq32U x y) 2152 // cond: 2153 // result: (XORconst [1] (SGTU x y)) 2154 for { 2155 _ = v.Args[1] 2156 x := v.Args[0] 2157 y := v.Args[1] 2158 v.reset(OpMIPSXORconst) 2159 v.AuxInt = 1 2160 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 2161 v0.AddArg(x) 2162 v0.AddArg(y) 2163 v.AddArg(v0) 2164 return true 2165 } 2166 } 2167 func rewriteValueMIPS_OpLeq64F_0(v *Value) bool { 2168 b := v.Block 2169 _ = b 2170 // match: (Leq64F x y) 2171 // cond: 2172 // result: (FPFlagTrue (CMPGED y x)) 2173 for { 2174 _ = v.Args[1] 2175 x := v.Args[0] 2176 y := v.Args[1] 2177 v.reset(OpMIPSFPFlagTrue) 2178 v0 := b.NewValue0(v.Pos, OpMIPSCMPGED, types.TypeFlags) 2179 v0.AddArg(y) 2180 v0.AddArg(x) 2181 v.AddArg(v0) 2182 return true 2183 } 2184 } 2185 func rewriteValueMIPS_OpLeq8_0(v *Value) bool { 2186 b := v.Block 2187 _ = b 2188 typ := &b.Func.Config.Types 2189 _ = typ 2190 // match: (Leq8 x y) 2191 // cond: 2192 // result: (XORconst [1] (SGT (SignExt8to32 x) (SignExt8to32 y))) 2193 for { 2194 _ = v.Args[1] 2195 x := v.Args[0] 2196 y := v.Args[1] 2197 v.reset(OpMIPSXORconst) 2198 v.AuxInt = 1 2199 v0 := b.NewValue0(v.Pos, OpMIPSSGT, typ.Bool) 2200 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 2201 v1.AddArg(x) 2202 v0.AddArg(v1) 2203 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 2204 v2.AddArg(y) 2205 v0.AddArg(v2) 2206 v.AddArg(v0) 2207 return true 2208 } 2209 } 2210 func rewriteValueMIPS_OpLeq8U_0(v *Value) bool { 2211 b := v.Block 2212 _ = b 2213 typ := &b.Func.Config.Types 2214 _ = typ 2215 // match: (Leq8U x y) 2216 // cond: 2217 // result: (XORconst [1] (SGTU (ZeroExt8to32 x) (ZeroExt8to32 y))) 2218 for { 2219 _ = v.Args[1] 2220 x := v.Args[0] 2221 y := v.Args[1] 2222 v.reset(OpMIPSXORconst) 2223 v.AuxInt = 1 2224 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 2225 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2226 v1.AddArg(x) 2227 v0.AddArg(v1) 2228 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2229 v2.AddArg(y) 2230 v0.AddArg(v2) 2231 v.AddArg(v0) 2232 return true 2233 } 2234 } 2235 func rewriteValueMIPS_OpLess16_0(v *Value) bool { 2236 b := v.Block 2237 _ = b 2238 typ := &b.Func.Config.Types 2239 _ = typ 2240 // match: (Less16 x y) 2241 // cond: 2242 // result: (SGT (SignExt16to32 y) (SignExt16to32 x)) 2243 for { 2244 _ = v.Args[1] 2245 x := v.Args[0] 2246 y := v.Args[1] 2247 v.reset(OpMIPSSGT) 2248 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 2249 v0.AddArg(y) 2250 v.AddArg(v0) 2251 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 2252 v1.AddArg(x) 2253 v.AddArg(v1) 2254 return true 2255 } 2256 } 2257 func rewriteValueMIPS_OpLess16U_0(v *Value) bool { 2258 b := v.Block 2259 _ = b 2260 typ := &b.Func.Config.Types 2261 _ = typ 2262 // match: (Less16U x y) 2263 // cond: 2264 // result: (SGTU (ZeroExt16to32 y) (ZeroExt16to32 x)) 2265 for { 2266 _ = v.Args[1] 2267 x := v.Args[0] 2268 y := v.Args[1] 2269 v.reset(OpMIPSSGTU) 2270 v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2271 v0.AddArg(y) 2272 v.AddArg(v0) 2273 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2274 v1.AddArg(x) 2275 v.AddArg(v1) 2276 return true 2277 } 2278 } 2279 func rewriteValueMIPS_OpLess32_0(v *Value) bool { 2280 // match: (Less32 x y) 2281 // cond: 2282 // result: (SGT y x) 2283 for { 2284 _ = v.Args[1] 2285 x := v.Args[0] 2286 y := v.Args[1] 2287 v.reset(OpMIPSSGT) 2288 v.AddArg(y) 2289 v.AddArg(x) 2290 return true 2291 } 2292 } 2293 func rewriteValueMIPS_OpLess32F_0(v *Value) bool { 2294 b := v.Block 2295 _ = b 2296 // match: (Less32F x y) 2297 // cond: 2298 // result: (FPFlagTrue (CMPGTF y x)) 2299 for { 2300 _ = v.Args[1] 2301 x := v.Args[0] 2302 y := v.Args[1] 2303 v.reset(OpMIPSFPFlagTrue) 2304 v0 := b.NewValue0(v.Pos, OpMIPSCMPGTF, types.TypeFlags) 2305 v0.AddArg(y) 2306 v0.AddArg(x) 2307 v.AddArg(v0) 2308 return true 2309 } 2310 } 2311 func rewriteValueMIPS_OpLess32U_0(v *Value) bool { 2312 // match: (Less32U x y) 2313 // cond: 2314 // result: (SGTU y x) 2315 for { 2316 _ = v.Args[1] 2317 x := v.Args[0] 2318 y := v.Args[1] 2319 v.reset(OpMIPSSGTU) 2320 v.AddArg(y) 2321 v.AddArg(x) 2322 return true 2323 } 2324 } 2325 func rewriteValueMIPS_OpLess64F_0(v *Value) bool { 2326 b := v.Block 2327 _ = b 2328 // match: (Less64F x y) 2329 // cond: 2330 // result: (FPFlagTrue (CMPGTD y x)) 2331 for { 2332 _ = v.Args[1] 2333 x := v.Args[0] 2334 y := v.Args[1] 2335 v.reset(OpMIPSFPFlagTrue) 2336 v0 := b.NewValue0(v.Pos, OpMIPSCMPGTD, types.TypeFlags) 2337 v0.AddArg(y) 2338 v0.AddArg(x) 2339 v.AddArg(v0) 2340 return true 2341 } 2342 } 2343 func rewriteValueMIPS_OpLess8_0(v *Value) bool { 2344 b := v.Block 2345 _ = b 2346 typ := &b.Func.Config.Types 2347 _ = typ 2348 // match: (Less8 x y) 2349 // cond: 2350 // result: (SGT (SignExt8to32 y) (SignExt8to32 x)) 2351 for { 2352 _ = v.Args[1] 2353 x := v.Args[0] 2354 y := v.Args[1] 2355 v.reset(OpMIPSSGT) 2356 v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 2357 v0.AddArg(y) 2358 v.AddArg(v0) 2359 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 2360 v1.AddArg(x) 2361 v.AddArg(v1) 2362 return true 2363 } 2364 } 2365 func rewriteValueMIPS_OpLess8U_0(v *Value) bool { 2366 b := v.Block 2367 _ = b 2368 typ := &b.Func.Config.Types 2369 _ = typ 2370 // match: (Less8U x y) 2371 // cond: 2372 // result: (SGTU (ZeroExt8to32 y) (ZeroExt8to32 x)) 2373 for { 2374 _ = v.Args[1] 2375 x := v.Args[0] 2376 y := v.Args[1] 2377 v.reset(OpMIPSSGTU) 2378 v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2379 v0.AddArg(y) 2380 v.AddArg(v0) 2381 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2382 v1.AddArg(x) 2383 v.AddArg(v1) 2384 return true 2385 } 2386 } 2387 func rewriteValueMIPS_OpLoad_0(v *Value) bool { 2388 // match: (Load <t> ptr mem) 2389 // cond: t.IsBoolean() 2390 // result: (MOVBUload ptr mem) 2391 for { 2392 t := v.Type 2393 _ = v.Args[1] 2394 ptr := v.Args[0] 2395 mem := v.Args[1] 2396 if !(t.IsBoolean()) { 2397 break 2398 } 2399 v.reset(OpMIPSMOVBUload) 2400 v.AddArg(ptr) 2401 v.AddArg(mem) 2402 return true 2403 } 2404 // match: (Load <t> ptr mem) 2405 // cond: (is8BitInt(t) && isSigned(t)) 2406 // result: (MOVBload ptr mem) 2407 for { 2408 t := v.Type 2409 _ = v.Args[1] 2410 ptr := v.Args[0] 2411 mem := v.Args[1] 2412 if !(is8BitInt(t) && isSigned(t)) { 2413 break 2414 } 2415 v.reset(OpMIPSMOVBload) 2416 v.AddArg(ptr) 2417 v.AddArg(mem) 2418 return true 2419 } 2420 // match: (Load <t> ptr mem) 2421 // cond: (is8BitInt(t) && !isSigned(t)) 2422 // result: (MOVBUload ptr mem) 2423 for { 2424 t := v.Type 2425 _ = v.Args[1] 2426 ptr := v.Args[0] 2427 mem := v.Args[1] 2428 if !(is8BitInt(t) && !isSigned(t)) { 2429 break 2430 } 2431 v.reset(OpMIPSMOVBUload) 2432 v.AddArg(ptr) 2433 v.AddArg(mem) 2434 return true 2435 } 2436 // match: (Load <t> ptr mem) 2437 // cond: (is16BitInt(t) && isSigned(t)) 2438 // result: (MOVHload ptr mem) 2439 for { 2440 t := v.Type 2441 _ = v.Args[1] 2442 ptr := v.Args[0] 2443 mem := v.Args[1] 2444 if !(is16BitInt(t) && isSigned(t)) { 2445 break 2446 } 2447 v.reset(OpMIPSMOVHload) 2448 v.AddArg(ptr) 2449 v.AddArg(mem) 2450 return true 2451 } 2452 // match: (Load <t> ptr mem) 2453 // cond: (is16BitInt(t) && !isSigned(t)) 2454 // result: (MOVHUload ptr mem) 2455 for { 2456 t := v.Type 2457 _ = v.Args[1] 2458 ptr := v.Args[0] 2459 mem := v.Args[1] 2460 if !(is16BitInt(t) && !isSigned(t)) { 2461 break 2462 } 2463 v.reset(OpMIPSMOVHUload) 2464 v.AddArg(ptr) 2465 v.AddArg(mem) 2466 return true 2467 } 2468 // match: (Load <t> ptr mem) 2469 // cond: (is32BitInt(t) || isPtr(t)) 2470 // result: (MOVWload ptr mem) 2471 for { 2472 t := v.Type 2473 _ = v.Args[1] 2474 ptr := v.Args[0] 2475 mem := v.Args[1] 2476 if !(is32BitInt(t) || isPtr(t)) { 2477 break 2478 } 2479 v.reset(OpMIPSMOVWload) 2480 v.AddArg(ptr) 2481 v.AddArg(mem) 2482 return true 2483 } 2484 // match: (Load <t> ptr mem) 2485 // cond: is32BitFloat(t) 2486 // result: (MOVFload ptr mem) 2487 for { 2488 t := v.Type 2489 _ = v.Args[1] 2490 ptr := v.Args[0] 2491 mem := v.Args[1] 2492 if !(is32BitFloat(t)) { 2493 break 2494 } 2495 v.reset(OpMIPSMOVFload) 2496 v.AddArg(ptr) 2497 v.AddArg(mem) 2498 return true 2499 } 2500 // match: (Load <t> ptr mem) 2501 // cond: is64BitFloat(t) 2502 // result: (MOVDload ptr mem) 2503 for { 2504 t := v.Type 2505 _ = v.Args[1] 2506 ptr := v.Args[0] 2507 mem := v.Args[1] 2508 if !(is64BitFloat(t)) { 2509 break 2510 } 2511 v.reset(OpMIPSMOVDload) 2512 v.AddArg(ptr) 2513 v.AddArg(mem) 2514 return true 2515 } 2516 return false 2517 } 2518 func rewriteValueMIPS_OpLocalAddr_0(v *Value) bool { 2519 // match: (LocalAddr {sym} base _) 2520 // cond: 2521 // result: (MOVWaddr {sym} base) 2522 for { 2523 sym := v.Aux 2524 _ = v.Args[1] 2525 base := v.Args[0] 2526 v.reset(OpMIPSMOVWaddr) 2527 v.Aux = sym 2528 v.AddArg(base) 2529 return true 2530 } 2531 } 2532 func rewriteValueMIPS_OpLsh16x16_0(v *Value) bool { 2533 b := v.Block 2534 _ = b 2535 typ := &b.Func.Config.Types 2536 _ = typ 2537 // match: (Lsh16x16 <t> x y) 2538 // cond: 2539 // result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 2540 for { 2541 t := v.Type 2542 _ = v.Args[1] 2543 x := v.Args[0] 2544 y := v.Args[1] 2545 v.reset(OpMIPSCMOVZ) 2546 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2547 v0.AddArg(x) 2548 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2549 v1.AddArg(y) 2550 v0.AddArg(v1) 2551 v.AddArg(v0) 2552 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2553 v2.AuxInt = 0 2554 v.AddArg(v2) 2555 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2556 v3.AuxInt = 32 2557 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2558 v4.AddArg(y) 2559 v3.AddArg(v4) 2560 v.AddArg(v3) 2561 return true 2562 } 2563 } 2564 func rewriteValueMIPS_OpLsh16x32_0(v *Value) bool { 2565 b := v.Block 2566 _ = b 2567 typ := &b.Func.Config.Types 2568 _ = typ 2569 // match: (Lsh16x32 <t> x y) 2570 // cond: 2571 // result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 2572 for { 2573 t := v.Type 2574 _ = v.Args[1] 2575 x := v.Args[0] 2576 y := v.Args[1] 2577 v.reset(OpMIPSCMOVZ) 2578 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2579 v0.AddArg(x) 2580 v0.AddArg(y) 2581 v.AddArg(v0) 2582 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2583 v1.AuxInt = 0 2584 v.AddArg(v1) 2585 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2586 v2.AuxInt = 32 2587 v2.AddArg(y) 2588 v.AddArg(v2) 2589 return true 2590 } 2591 } 2592 func rewriteValueMIPS_OpLsh16x64_0(v *Value) bool { 2593 // match: (Lsh16x64 x (Const64 [c])) 2594 // cond: uint32(c) < 16 2595 // result: (SLLconst x [c]) 2596 for { 2597 _ = v.Args[1] 2598 x := v.Args[0] 2599 v_1 := v.Args[1] 2600 if v_1.Op != OpConst64 { 2601 break 2602 } 2603 c := v_1.AuxInt 2604 if !(uint32(c) < 16) { 2605 break 2606 } 2607 v.reset(OpMIPSSLLconst) 2608 v.AuxInt = c 2609 v.AddArg(x) 2610 return true 2611 } 2612 // match: (Lsh16x64 _ (Const64 [c])) 2613 // cond: uint32(c) >= 16 2614 // result: (MOVWconst [0]) 2615 for { 2616 _ = v.Args[1] 2617 v_1 := v.Args[1] 2618 if v_1.Op != OpConst64 { 2619 break 2620 } 2621 c := v_1.AuxInt 2622 if !(uint32(c) >= 16) { 2623 break 2624 } 2625 v.reset(OpMIPSMOVWconst) 2626 v.AuxInt = 0 2627 return true 2628 } 2629 return false 2630 } 2631 func rewriteValueMIPS_OpLsh16x8_0(v *Value) bool { 2632 b := v.Block 2633 _ = b 2634 typ := &b.Func.Config.Types 2635 _ = typ 2636 // match: (Lsh16x8 <t> x y) 2637 // cond: 2638 // result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 2639 for { 2640 t := v.Type 2641 _ = v.Args[1] 2642 x := v.Args[0] 2643 y := v.Args[1] 2644 v.reset(OpMIPSCMOVZ) 2645 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2646 v0.AddArg(x) 2647 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2648 v1.AddArg(y) 2649 v0.AddArg(v1) 2650 v.AddArg(v0) 2651 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2652 v2.AuxInt = 0 2653 v.AddArg(v2) 2654 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2655 v3.AuxInt = 32 2656 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2657 v4.AddArg(y) 2658 v3.AddArg(v4) 2659 v.AddArg(v3) 2660 return true 2661 } 2662 } 2663 func rewriteValueMIPS_OpLsh32x16_0(v *Value) bool { 2664 b := v.Block 2665 _ = b 2666 typ := &b.Func.Config.Types 2667 _ = typ 2668 // match: (Lsh32x16 <t> x y) 2669 // cond: 2670 // result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 2671 for { 2672 t := v.Type 2673 _ = v.Args[1] 2674 x := v.Args[0] 2675 y := v.Args[1] 2676 v.reset(OpMIPSCMOVZ) 2677 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2678 v0.AddArg(x) 2679 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2680 v1.AddArg(y) 2681 v0.AddArg(v1) 2682 v.AddArg(v0) 2683 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2684 v2.AuxInt = 0 2685 v.AddArg(v2) 2686 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2687 v3.AuxInt = 32 2688 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2689 v4.AddArg(y) 2690 v3.AddArg(v4) 2691 v.AddArg(v3) 2692 return true 2693 } 2694 } 2695 func rewriteValueMIPS_OpLsh32x32_0(v *Value) bool { 2696 b := v.Block 2697 _ = b 2698 typ := &b.Func.Config.Types 2699 _ = typ 2700 // match: (Lsh32x32 <t> x y) 2701 // cond: 2702 // result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 2703 for { 2704 t := v.Type 2705 _ = v.Args[1] 2706 x := v.Args[0] 2707 y := v.Args[1] 2708 v.reset(OpMIPSCMOVZ) 2709 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2710 v0.AddArg(x) 2711 v0.AddArg(y) 2712 v.AddArg(v0) 2713 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2714 v1.AuxInt = 0 2715 v.AddArg(v1) 2716 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2717 v2.AuxInt = 32 2718 v2.AddArg(y) 2719 v.AddArg(v2) 2720 return true 2721 } 2722 } 2723 func rewriteValueMIPS_OpLsh32x64_0(v *Value) bool { 2724 // match: (Lsh32x64 x (Const64 [c])) 2725 // cond: uint32(c) < 32 2726 // result: (SLLconst x [c]) 2727 for { 2728 _ = v.Args[1] 2729 x := v.Args[0] 2730 v_1 := v.Args[1] 2731 if v_1.Op != OpConst64 { 2732 break 2733 } 2734 c := v_1.AuxInt 2735 if !(uint32(c) < 32) { 2736 break 2737 } 2738 v.reset(OpMIPSSLLconst) 2739 v.AuxInt = c 2740 v.AddArg(x) 2741 return true 2742 } 2743 // match: (Lsh32x64 _ (Const64 [c])) 2744 // cond: uint32(c) >= 32 2745 // result: (MOVWconst [0]) 2746 for { 2747 _ = v.Args[1] 2748 v_1 := v.Args[1] 2749 if v_1.Op != OpConst64 { 2750 break 2751 } 2752 c := v_1.AuxInt 2753 if !(uint32(c) >= 32) { 2754 break 2755 } 2756 v.reset(OpMIPSMOVWconst) 2757 v.AuxInt = 0 2758 return true 2759 } 2760 return false 2761 } 2762 func rewriteValueMIPS_OpLsh32x8_0(v *Value) bool { 2763 b := v.Block 2764 _ = b 2765 typ := &b.Func.Config.Types 2766 _ = typ 2767 // match: (Lsh32x8 <t> x y) 2768 // cond: 2769 // result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 2770 for { 2771 t := v.Type 2772 _ = v.Args[1] 2773 x := v.Args[0] 2774 y := v.Args[1] 2775 v.reset(OpMIPSCMOVZ) 2776 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2777 v0.AddArg(x) 2778 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2779 v1.AddArg(y) 2780 v0.AddArg(v1) 2781 v.AddArg(v0) 2782 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2783 v2.AuxInt = 0 2784 v.AddArg(v2) 2785 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2786 v3.AuxInt = 32 2787 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2788 v4.AddArg(y) 2789 v3.AddArg(v4) 2790 v.AddArg(v3) 2791 return true 2792 } 2793 } 2794 func rewriteValueMIPS_OpLsh8x16_0(v *Value) bool { 2795 b := v.Block 2796 _ = b 2797 typ := &b.Func.Config.Types 2798 _ = typ 2799 // match: (Lsh8x16 <t> x y) 2800 // cond: 2801 // result: (CMOVZ (SLL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 2802 for { 2803 t := v.Type 2804 _ = v.Args[1] 2805 x := v.Args[0] 2806 y := v.Args[1] 2807 v.reset(OpMIPSCMOVZ) 2808 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2809 v0.AddArg(x) 2810 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2811 v1.AddArg(y) 2812 v0.AddArg(v1) 2813 v.AddArg(v0) 2814 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2815 v2.AuxInt = 0 2816 v.AddArg(v2) 2817 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2818 v3.AuxInt = 32 2819 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 2820 v4.AddArg(y) 2821 v3.AddArg(v4) 2822 v.AddArg(v3) 2823 return true 2824 } 2825 } 2826 func rewriteValueMIPS_OpLsh8x32_0(v *Value) bool { 2827 b := v.Block 2828 _ = b 2829 typ := &b.Func.Config.Types 2830 _ = typ 2831 // match: (Lsh8x32 <t> x y) 2832 // cond: 2833 // result: (CMOVZ (SLL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 2834 for { 2835 t := v.Type 2836 _ = v.Args[1] 2837 x := v.Args[0] 2838 y := v.Args[1] 2839 v.reset(OpMIPSCMOVZ) 2840 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2841 v0.AddArg(x) 2842 v0.AddArg(y) 2843 v.AddArg(v0) 2844 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2845 v1.AuxInt = 0 2846 v.AddArg(v1) 2847 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2848 v2.AuxInt = 32 2849 v2.AddArg(y) 2850 v.AddArg(v2) 2851 return true 2852 } 2853 } 2854 func rewriteValueMIPS_OpLsh8x64_0(v *Value) bool { 2855 // match: (Lsh8x64 x (Const64 [c])) 2856 // cond: uint32(c) < 8 2857 // result: (SLLconst x [c]) 2858 for { 2859 _ = v.Args[1] 2860 x := v.Args[0] 2861 v_1 := v.Args[1] 2862 if v_1.Op != OpConst64 { 2863 break 2864 } 2865 c := v_1.AuxInt 2866 if !(uint32(c) < 8) { 2867 break 2868 } 2869 v.reset(OpMIPSSLLconst) 2870 v.AuxInt = c 2871 v.AddArg(x) 2872 return true 2873 } 2874 // match: (Lsh8x64 _ (Const64 [c])) 2875 // cond: uint32(c) >= 8 2876 // result: (MOVWconst [0]) 2877 for { 2878 _ = v.Args[1] 2879 v_1 := v.Args[1] 2880 if v_1.Op != OpConst64 { 2881 break 2882 } 2883 c := v_1.AuxInt 2884 if !(uint32(c) >= 8) { 2885 break 2886 } 2887 v.reset(OpMIPSMOVWconst) 2888 v.AuxInt = 0 2889 return true 2890 } 2891 return false 2892 } 2893 func rewriteValueMIPS_OpLsh8x8_0(v *Value) bool { 2894 b := v.Block 2895 _ = b 2896 typ := &b.Func.Config.Types 2897 _ = typ 2898 // match: (Lsh8x8 <t> x y) 2899 // cond: 2900 // result: (CMOVZ (SLL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 2901 for { 2902 t := v.Type 2903 _ = v.Args[1] 2904 x := v.Args[0] 2905 y := v.Args[1] 2906 v.reset(OpMIPSCMOVZ) 2907 v0 := b.NewValue0(v.Pos, OpMIPSSLL, t) 2908 v0.AddArg(x) 2909 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2910 v1.AddArg(y) 2911 v0.AddArg(v1) 2912 v.AddArg(v0) 2913 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 2914 v2.AuxInt = 0 2915 v.AddArg(v2) 2916 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 2917 v3.AuxInt = 32 2918 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 2919 v4.AddArg(y) 2920 v3.AddArg(v4) 2921 v.AddArg(v3) 2922 return true 2923 } 2924 } 2925 func rewriteValueMIPS_OpMIPSADD_0(v *Value) bool { 2926 // match: (ADD x (MOVWconst [c])) 2927 // cond: 2928 // result: (ADDconst [c] x) 2929 for { 2930 _ = v.Args[1] 2931 x := v.Args[0] 2932 v_1 := v.Args[1] 2933 if v_1.Op != OpMIPSMOVWconst { 2934 break 2935 } 2936 c := v_1.AuxInt 2937 v.reset(OpMIPSADDconst) 2938 v.AuxInt = c 2939 v.AddArg(x) 2940 return true 2941 } 2942 // match: (ADD (MOVWconst [c]) x) 2943 // cond: 2944 // result: (ADDconst [c] x) 2945 for { 2946 _ = v.Args[1] 2947 v_0 := v.Args[0] 2948 if v_0.Op != OpMIPSMOVWconst { 2949 break 2950 } 2951 c := v_0.AuxInt 2952 x := v.Args[1] 2953 v.reset(OpMIPSADDconst) 2954 v.AuxInt = c 2955 v.AddArg(x) 2956 return true 2957 } 2958 // match: (ADD x (NEG y)) 2959 // cond: 2960 // result: (SUB x y) 2961 for { 2962 _ = v.Args[1] 2963 x := v.Args[0] 2964 v_1 := v.Args[1] 2965 if v_1.Op != OpMIPSNEG { 2966 break 2967 } 2968 y := v_1.Args[0] 2969 v.reset(OpMIPSSUB) 2970 v.AddArg(x) 2971 v.AddArg(y) 2972 return true 2973 } 2974 // match: (ADD (NEG y) x) 2975 // cond: 2976 // result: (SUB x y) 2977 for { 2978 _ = v.Args[1] 2979 v_0 := v.Args[0] 2980 if v_0.Op != OpMIPSNEG { 2981 break 2982 } 2983 y := v_0.Args[0] 2984 x := v.Args[1] 2985 v.reset(OpMIPSSUB) 2986 v.AddArg(x) 2987 v.AddArg(y) 2988 return true 2989 } 2990 return false 2991 } 2992 func rewriteValueMIPS_OpMIPSADDconst_0(v *Value) bool { 2993 // match: (ADDconst [off1] (MOVWaddr [off2] {sym} ptr)) 2994 // cond: 2995 // result: (MOVWaddr [off1+off2] {sym} ptr) 2996 for { 2997 off1 := v.AuxInt 2998 v_0 := v.Args[0] 2999 if v_0.Op != OpMIPSMOVWaddr { 3000 break 3001 } 3002 off2 := v_0.AuxInt 3003 sym := v_0.Aux 3004 ptr := v_0.Args[0] 3005 v.reset(OpMIPSMOVWaddr) 3006 v.AuxInt = off1 + off2 3007 v.Aux = sym 3008 v.AddArg(ptr) 3009 return true 3010 } 3011 // match: (ADDconst [0] x) 3012 // cond: 3013 // result: x 3014 for { 3015 if v.AuxInt != 0 { 3016 break 3017 } 3018 x := v.Args[0] 3019 v.reset(OpCopy) 3020 v.Type = x.Type 3021 v.AddArg(x) 3022 return true 3023 } 3024 // match: (ADDconst [c] (MOVWconst [d])) 3025 // cond: 3026 // result: (MOVWconst [int64(int32(c+d))]) 3027 for { 3028 c := v.AuxInt 3029 v_0 := v.Args[0] 3030 if v_0.Op != OpMIPSMOVWconst { 3031 break 3032 } 3033 d := v_0.AuxInt 3034 v.reset(OpMIPSMOVWconst) 3035 v.AuxInt = int64(int32(c + d)) 3036 return true 3037 } 3038 // match: (ADDconst [c] (ADDconst [d] x)) 3039 // cond: 3040 // result: (ADDconst [int64(int32(c+d))] x) 3041 for { 3042 c := v.AuxInt 3043 v_0 := v.Args[0] 3044 if v_0.Op != OpMIPSADDconst { 3045 break 3046 } 3047 d := v_0.AuxInt 3048 x := v_0.Args[0] 3049 v.reset(OpMIPSADDconst) 3050 v.AuxInt = int64(int32(c + d)) 3051 v.AddArg(x) 3052 return true 3053 } 3054 // match: (ADDconst [c] (SUBconst [d] x)) 3055 // cond: 3056 // result: (ADDconst [int64(int32(c-d))] x) 3057 for { 3058 c := v.AuxInt 3059 v_0 := v.Args[0] 3060 if v_0.Op != OpMIPSSUBconst { 3061 break 3062 } 3063 d := v_0.AuxInt 3064 x := v_0.Args[0] 3065 v.reset(OpMIPSADDconst) 3066 v.AuxInt = int64(int32(c - d)) 3067 v.AddArg(x) 3068 return true 3069 } 3070 return false 3071 } 3072 func rewriteValueMIPS_OpMIPSAND_0(v *Value) bool { 3073 b := v.Block 3074 _ = b 3075 // match: (AND x (MOVWconst [c])) 3076 // cond: 3077 // result: (ANDconst [c] x) 3078 for { 3079 _ = v.Args[1] 3080 x := v.Args[0] 3081 v_1 := v.Args[1] 3082 if v_1.Op != OpMIPSMOVWconst { 3083 break 3084 } 3085 c := v_1.AuxInt 3086 v.reset(OpMIPSANDconst) 3087 v.AuxInt = c 3088 v.AddArg(x) 3089 return true 3090 } 3091 // match: (AND (MOVWconst [c]) x) 3092 // cond: 3093 // result: (ANDconst [c] x) 3094 for { 3095 _ = v.Args[1] 3096 v_0 := v.Args[0] 3097 if v_0.Op != OpMIPSMOVWconst { 3098 break 3099 } 3100 c := v_0.AuxInt 3101 x := v.Args[1] 3102 v.reset(OpMIPSANDconst) 3103 v.AuxInt = c 3104 v.AddArg(x) 3105 return true 3106 } 3107 // match: (AND x x) 3108 // cond: 3109 // result: x 3110 for { 3111 _ = v.Args[1] 3112 x := v.Args[0] 3113 if x != v.Args[1] { 3114 break 3115 } 3116 v.reset(OpCopy) 3117 v.Type = x.Type 3118 v.AddArg(x) 3119 return true 3120 } 3121 // match: (AND (SGTUconst [1] x) (SGTUconst [1] y)) 3122 // cond: 3123 // result: (SGTUconst [1] (OR <x.Type> x y)) 3124 for { 3125 _ = v.Args[1] 3126 v_0 := v.Args[0] 3127 if v_0.Op != OpMIPSSGTUconst { 3128 break 3129 } 3130 if v_0.AuxInt != 1 { 3131 break 3132 } 3133 x := v_0.Args[0] 3134 v_1 := v.Args[1] 3135 if v_1.Op != OpMIPSSGTUconst { 3136 break 3137 } 3138 if v_1.AuxInt != 1 { 3139 break 3140 } 3141 y := v_1.Args[0] 3142 v.reset(OpMIPSSGTUconst) 3143 v.AuxInt = 1 3144 v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type) 3145 v0.AddArg(x) 3146 v0.AddArg(y) 3147 v.AddArg(v0) 3148 return true 3149 } 3150 // match: (AND (SGTUconst [1] y) (SGTUconst [1] x)) 3151 // cond: 3152 // result: (SGTUconst [1] (OR <x.Type> x y)) 3153 for { 3154 _ = v.Args[1] 3155 v_0 := v.Args[0] 3156 if v_0.Op != OpMIPSSGTUconst { 3157 break 3158 } 3159 if v_0.AuxInt != 1 { 3160 break 3161 } 3162 y := v_0.Args[0] 3163 v_1 := v.Args[1] 3164 if v_1.Op != OpMIPSSGTUconst { 3165 break 3166 } 3167 if v_1.AuxInt != 1 { 3168 break 3169 } 3170 x := v_1.Args[0] 3171 v.reset(OpMIPSSGTUconst) 3172 v.AuxInt = 1 3173 v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type) 3174 v0.AddArg(x) 3175 v0.AddArg(y) 3176 v.AddArg(v0) 3177 return true 3178 } 3179 return false 3180 } 3181 func rewriteValueMIPS_OpMIPSANDconst_0(v *Value) bool { 3182 // match: (ANDconst [0] _) 3183 // cond: 3184 // result: (MOVWconst [0]) 3185 for { 3186 if v.AuxInt != 0 { 3187 break 3188 } 3189 v.reset(OpMIPSMOVWconst) 3190 v.AuxInt = 0 3191 return true 3192 } 3193 // match: (ANDconst [-1] x) 3194 // cond: 3195 // result: x 3196 for { 3197 if v.AuxInt != -1 { 3198 break 3199 } 3200 x := v.Args[0] 3201 v.reset(OpCopy) 3202 v.Type = x.Type 3203 v.AddArg(x) 3204 return true 3205 } 3206 // match: (ANDconst [c] (MOVWconst [d])) 3207 // cond: 3208 // result: (MOVWconst [c&d]) 3209 for { 3210 c := v.AuxInt 3211 v_0 := v.Args[0] 3212 if v_0.Op != OpMIPSMOVWconst { 3213 break 3214 } 3215 d := v_0.AuxInt 3216 v.reset(OpMIPSMOVWconst) 3217 v.AuxInt = c & d 3218 return true 3219 } 3220 // match: (ANDconst [c] (ANDconst [d] x)) 3221 // cond: 3222 // result: (ANDconst [c&d] x) 3223 for { 3224 c := v.AuxInt 3225 v_0 := v.Args[0] 3226 if v_0.Op != OpMIPSANDconst { 3227 break 3228 } 3229 d := v_0.AuxInt 3230 x := v_0.Args[0] 3231 v.reset(OpMIPSANDconst) 3232 v.AuxInt = c & d 3233 v.AddArg(x) 3234 return true 3235 } 3236 return false 3237 } 3238 func rewriteValueMIPS_OpMIPSCMOVZ_0(v *Value) bool { 3239 b := v.Block 3240 _ = b 3241 // match: (CMOVZ _ b (MOVWconst [0])) 3242 // cond: 3243 // result: b 3244 for { 3245 _ = v.Args[2] 3246 b := v.Args[1] 3247 v_2 := v.Args[2] 3248 if v_2.Op != OpMIPSMOVWconst { 3249 break 3250 } 3251 if v_2.AuxInt != 0 { 3252 break 3253 } 3254 v.reset(OpCopy) 3255 v.Type = b.Type 3256 v.AddArg(b) 3257 return true 3258 } 3259 // match: (CMOVZ a _ (MOVWconst [c])) 3260 // cond: c!=0 3261 // result: a 3262 for { 3263 _ = v.Args[2] 3264 a := v.Args[0] 3265 v_2 := v.Args[2] 3266 if v_2.Op != OpMIPSMOVWconst { 3267 break 3268 } 3269 c := v_2.AuxInt 3270 if !(c != 0) { 3271 break 3272 } 3273 v.reset(OpCopy) 3274 v.Type = a.Type 3275 v.AddArg(a) 3276 return true 3277 } 3278 // match: (CMOVZ a (MOVWconst [0]) c) 3279 // cond: 3280 // result: (CMOVZzero a c) 3281 for { 3282 _ = v.Args[2] 3283 a := v.Args[0] 3284 v_1 := v.Args[1] 3285 if v_1.Op != OpMIPSMOVWconst { 3286 break 3287 } 3288 if v_1.AuxInt != 0 { 3289 break 3290 } 3291 c := v.Args[2] 3292 v.reset(OpMIPSCMOVZzero) 3293 v.AddArg(a) 3294 v.AddArg(c) 3295 return true 3296 } 3297 return false 3298 } 3299 func rewriteValueMIPS_OpMIPSCMOVZzero_0(v *Value) bool { 3300 // match: (CMOVZzero _ (MOVWconst [0])) 3301 // cond: 3302 // result: (MOVWconst [0]) 3303 for { 3304 _ = v.Args[1] 3305 v_1 := v.Args[1] 3306 if v_1.Op != OpMIPSMOVWconst { 3307 break 3308 } 3309 if v_1.AuxInt != 0 { 3310 break 3311 } 3312 v.reset(OpMIPSMOVWconst) 3313 v.AuxInt = 0 3314 return true 3315 } 3316 // match: (CMOVZzero a (MOVWconst [c])) 3317 // cond: c!=0 3318 // result: a 3319 for { 3320 _ = v.Args[1] 3321 a := v.Args[0] 3322 v_1 := v.Args[1] 3323 if v_1.Op != OpMIPSMOVWconst { 3324 break 3325 } 3326 c := v_1.AuxInt 3327 if !(c != 0) { 3328 break 3329 } 3330 v.reset(OpCopy) 3331 v.Type = a.Type 3332 v.AddArg(a) 3333 return true 3334 } 3335 return false 3336 } 3337 func rewriteValueMIPS_OpMIPSLoweredAtomicAdd_0(v *Value) bool { 3338 // match: (LoweredAtomicAdd ptr (MOVWconst [c]) mem) 3339 // cond: is16Bit(c) 3340 // result: (LoweredAtomicAddconst [c] ptr mem) 3341 for { 3342 _ = v.Args[2] 3343 ptr := v.Args[0] 3344 v_1 := v.Args[1] 3345 if v_1.Op != OpMIPSMOVWconst { 3346 break 3347 } 3348 c := v_1.AuxInt 3349 mem := v.Args[2] 3350 if !(is16Bit(c)) { 3351 break 3352 } 3353 v.reset(OpMIPSLoweredAtomicAddconst) 3354 v.AuxInt = c 3355 v.AddArg(ptr) 3356 v.AddArg(mem) 3357 return true 3358 } 3359 return false 3360 } 3361 func rewriteValueMIPS_OpMIPSLoweredAtomicStore_0(v *Value) bool { 3362 // match: (LoweredAtomicStore ptr (MOVWconst [0]) mem) 3363 // cond: 3364 // result: (LoweredAtomicStorezero ptr mem) 3365 for { 3366 _ = v.Args[2] 3367 ptr := v.Args[0] 3368 v_1 := v.Args[1] 3369 if v_1.Op != OpMIPSMOVWconst { 3370 break 3371 } 3372 if v_1.AuxInt != 0 { 3373 break 3374 } 3375 mem := v.Args[2] 3376 v.reset(OpMIPSLoweredAtomicStorezero) 3377 v.AddArg(ptr) 3378 v.AddArg(mem) 3379 return true 3380 } 3381 return false 3382 } 3383 func rewriteValueMIPS_OpMIPSMOVBUload_0(v *Value) bool { 3384 // match: (MOVBUload [off1] {sym} x:(ADDconst [off2] ptr) mem) 3385 // cond: (is16Bit(off1+off2) || x.Uses == 1) 3386 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3387 for { 3388 off1 := v.AuxInt 3389 sym := v.Aux 3390 _ = v.Args[1] 3391 x := v.Args[0] 3392 if x.Op != OpMIPSADDconst { 3393 break 3394 } 3395 off2 := x.AuxInt 3396 ptr := x.Args[0] 3397 mem := v.Args[1] 3398 if !(is16Bit(off1+off2) || x.Uses == 1) { 3399 break 3400 } 3401 v.reset(OpMIPSMOVBUload) 3402 v.AuxInt = off1 + off2 3403 v.Aux = sym 3404 v.AddArg(ptr) 3405 v.AddArg(mem) 3406 return true 3407 } 3408 // match: (MOVBUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3409 // cond: canMergeSym(sym1,sym2) 3410 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3411 for { 3412 off1 := v.AuxInt 3413 sym1 := v.Aux 3414 _ = v.Args[1] 3415 v_0 := v.Args[0] 3416 if v_0.Op != OpMIPSMOVWaddr { 3417 break 3418 } 3419 off2 := v_0.AuxInt 3420 sym2 := v_0.Aux 3421 ptr := v_0.Args[0] 3422 mem := v.Args[1] 3423 if !(canMergeSym(sym1, sym2)) { 3424 break 3425 } 3426 v.reset(OpMIPSMOVBUload) 3427 v.AuxInt = off1 + off2 3428 v.Aux = mergeSym(sym1, sym2) 3429 v.AddArg(ptr) 3430 v.AddArg(mem) 3431 return true 3432 } 3433 // match: (MOVBUload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) 3434 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3435 // result: (MOVBUreg x) 3436 for { 3437 off := v.AuxInt 3438 sym := v.Aux 3439 _ = v.Args[1] 3440 ptr := v.Args[0] 3441 v_1 := v.Args[1] 3442 if v_1.Op != OpMIPSMOVBstore { 3443 break 3444 } 3445 off2 := v_1.AuxInt 3446 sym2 := v_1.Aux 3447 _ = v_1.Args[2] 3448 ptr2 := v_1.Args[0] 3449 x := v_1.Args[1] 3450 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3451 break 3452 } 3453 v.reset(OpMIPSMOVBUreg) 3454 v.AddArg(x) 3455 return true 3456 } 3457 return false 3458 } 3459 func rewriteValueMIPS_OpMIPSMOVBUreg_0(v *Value) bool { 3460 b := v.Block 3461 _ = b 3462 // match: (MOVBUreg x:(MOVBUload _ _)) 3463 // cond: 3464 // result: (MOVWreg x) 3465 for { 3466 x := v.Args[0] 3467 if x.Op != OpMIPSMOVBUload { 3468 break 3469 } 3470 _ = x.Args[1] 3471 v.reset(OpMIPSMOVWreg) 3472 v.AddArg(x) 3473 return true 3474 } 3475 // match: (MOVBUreg x:(MOVBUreg _)) 3476 // cond: 3477 // result: (MOVWreg x) 3478 for { 3479 x := v.Args[0] 3480 if x.Op != OpMIPSMOVBUreg { 3481 break 3482 } 3483 v.reset(OpMIPSMOVWreg) 3484 v.AddArg(x) 3485 return true 3486 } 3487 // match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem)) 3488 // cond: x.Uses == 1 && clobber(x) 3489 // result: @x.Block (MOVBUload <t> [off] {sym} ptr mem) 3490 for { 3491 t := v.Type 3492 x := v.Args[0] 3493 if x.Op != OpMIPSMOVBload { 3494 break 3495 } 3496 off := x.AuxInt 3497 sym := x.Aux 3498 _ = x.Args[1] 3499 ptr := x.Args[0] 3500 mem := x.Args[1] 3501 if !(x.Uses == 1 && clobber(x)) { 3502 break 3503 } 3504 b = x.Block 3505 v0 := b.NewValue0(x.Pos, OpMIPSMOVBUload, t) 3506 v.reset(OpCopy) 3507 v.AddArg(v0) 3508 v0.AuxInt = off 3509 v0.Aux = sym 3510 v0.AddArg(ptr) 3511 v0.AddArg(mem) 3512 return true 3513 } 3514 // match: (MOVBUreg (ANDconst [c] x)) 3515 // cond: 3516 // result: (ANDconst [c&0xff] x) 3517 for { 3518 v_0 := v.Args[0] 3519 if v_0.Op != OpMIPSANDconst { 3520 break 3521 } 3522 c := v_0.AuxInt 3523 x := v_0.Args[0] 3524 v.reset(OpMIPSANDconst) 3525 v.AuxInt = c & 0xff 3526 v.AddArg(x) 3527 return true 3528 } 3529 // match: (MOVBUreg (MOVWconst [c])) 3530 // cond: 3531 // result: (MOVWconst [int64(uint8(c))]) 3532 for { 3533 v_0 := v.Args[0] 3534 if v_0.Op != OpMIPSMOVWconst { 3535 break 3536 } 3537 c := v_0.AuxInt 3538 v.reset(OpMIPSMOVWconst) 3539 v.AuxInt = int64(uint8(c)) 3540 return true 3541 } 3542 return false 3543 } 3544 func rewriteValueMIPS_OpMIPSMOVBload_0(v *Value) bool { 3545 // match: (MOVBload [off1] {sym} x:(ADDconst [off2] ptr) mem) 3546 // cond: (is16Bit(off1+off2) || x.Uses == 1) 3547 // result: (MOVBload [off1+off2] {sym} ptr mem) 3548 for { 3549 off1 := v.AuxInt 3550 sym := v.Aux 3551 _ = v.Args[1] 3552 x := v.Args[0] 3553 if x.Op != OpMIPSADDconst { 3554 break 3555 } 3556 off2 := x.AuxInt 3557 ptr := x.Args[0] 3558 mem := v.Args[1] 3559 if !(is16Bit(off1+off2) || x.Uses == 1) { 3560 break 3561 } 3562 v.reset(OpMIPSMOVBload) 3563 v.AuxInt = off1 + off2 3564 v.Aux = sym 3565 v.AddArg(ptr) 3566 v.AddArg(mem) 3567 return true 3568 } 3569 // match: (MOVBload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3570 // cond: canMergeSym(sym1,sym2) 3571 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3572 for { 3573 off1 := v.AuxInt 3574 sym1 := v.Aux 3575 _ = v.Args[1] 3576 v_0 := v.Args[0] 3577 if v_0.Op != OpMIPSMOVWaddr { 3578 break 3579 } 3580 off2 := v_0.AuxInt 3581 sym2 := v_0.Aux 3582 ptr := v_0.Args[0] 3583 mem := v.Args[1] 3584 if !(canMergeSym(sym1, sym2)) { 3585 break 3586 } 3587 v.reset(OpMIPSMOVBload) 3588 v.AuxInt = off1 + off2 3589 v.Aux = mergeSym(sym1, sym2) 3590 v.AddArg(ptr) 3591 v.AddArg(mem) 3592 return true 3593 } 3594 // match: (MOVBload [off] {sym} ptr (MOVBstore [off2] {sym2} ptr2 x _)) 3595 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 3596 // result: (MOVBreg x) 3597 for { 3598 off := v.AuxInt 3599 sym := v.Aux 3600 _ = v.Args[1] 3601 ptr := v.Args[0] 3602 v_1 := v.Args[1] 3603 if v_1.Op != OpMIPSMOVBstore { 3604 break 3605 } 3606 off2 := v_1.AuxInt 3607 sym2 := v_1.Aux 3608 _ = v_1.Args[2] 3609 ptr2 := v_1.Args[0] 3610 x := v_1.Args[1] 3611 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 3612 break 3613 } 3614 v.reset(OpMIPSMOVBreg) 3615 v.AddArg(x) 3616 return true 3617 } 3618 return false 3619 } 3620 func rewriteValueMIPS_OpMIPSMOVBreg_0(v *Value) bool { 3621 b := v.Block 3622 _ = b 3623 // match: (MOVBreg x:(MOVBload _ _)) 3624 // cond: 3625 // result: (MOVWreg x) 3626 for { 3627 x := v.Args[0] 3628 if x.Op != OpMIPSMOVBload { 3629 break 3630 } 3631 _ = x.Args[1] 3632 v.reset(OpMIPSMOVWreg) 3633 v.AddArg(x) 3634 return true 3635 } 3636 // match: (MOVBreg x:(MOVBreg _)) 3637 // cond: 3638 // result: (MOVWreg x) 3639 for { 3640 x := v.Args[0] 3641 if x.Op != OpMIPSMOVBreg { 3642 break 3643 } 3644 v.reset(OpMIPSMOVWreg) 3645 v.AddArg(x) 3646 return true 3647 } 3648 // match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem)) 3649 // cond: x.Uses == 1 && clobber(x) 3650 // result: @x.Block (MOVBload <t> [off] {sym} ptr mem) 3651 for { 3652 t := v.Type 3653 x := v.Args[0] 3654 if x.Op != OpMIPSMOVBUload { 3655 break 3656 } 3657 off := x.AuxInt 3658 sym := x.Aux 3659 _ = x.Args[1] 3660 ptr := x.Args[0] 3661 mem := x.Args[1] 3662 if !(x.Uses == 1 && clobber(x)) { 3663 break 3664 } 3665 b = x.Block 3666 v0 := b.NewValue0(x.Pos, OpMIPSMOVBload, t) 3667 v.reset(OpCopy) 3668 v.AddArg(v0) 3669 v0.AuxInt = off 3670 v0.Aux = sym 3671 v0.AddArg(ptr) 3672 v0.AddArg(mem) 3673 return true 3674 } 3675 // match: (MOVBreg (ANDconst [c] x)) 3676 // cond: c & 0x80 == 0 3677 // result: (ANDconst [c&0x7f] x) 3678 for { 3679 v_0 := v.Args[0] 3680 if v_0.Op != OpMIPSANDconst { 3681 break 3682 } 3683 c := v_0.AuxInt 3684 x := v_0.Args[0] 3685 if !(c&0x80 == 0) { 3686 break 3687 } 3688 v.reset(OpMIPSANDconst) 3689 v.AuxInt = c & 0x7f 3690 v.AddArg(x) 3691 return true 3692 } 3693 // match: (MOVBreg (MOVWconst [c])) 3694 // cond: 3695 // result: (MOVWconst [int64(int8(c))]) 3696 for { 3697 v_0 := v.Args[0] 3698 if v_0.Op != OpMIPSMOVWconst { 3699 break 3700 } 3701 c := v_0.AuxInt 3702 v.reset(OpMIPSMOVWconst) 3703 v.AuxInt = int64(int8(c)) 3704 return true 3705 } 3706 return false 3707 } 3708 func rewriteValueMIPS_OpMIPSMOVBstore_0(v *Value) bool { 3709 // match: (MOVBstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 3710 // cond: (is16Bit(off1+off2) || x.Uses == 1) 3711 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 3712 for { 3713 off1 := v.AuxInt 3714 sym := v.Aux 3715 _ = v.Args[2] 3716 x := v.Args[0] 3717 if x.Op != OpMIPSADDconst { 3718 break 3719 } 3720 off2 := x.AuxInt 3721 ptr := x.Args[0] 3722 val := v.Args[1] 3723 mem := v.Args[2] 3724 if !(is16Bit(off1+off2) || x.Uses == 1) { 3725 break 3726 } 3727 v.reset(OpMIPSMOVBstore) 3728 v.AuxInt = off1 + off2 3729 v.Aux = sym 3730 v.AddArg(ptr) 3731 v.AddArg(val) 3732 v.AddArg(mem) 3733 return true 3734 } 3735 // match: (MOVBstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 3736 // cond: canMergeSym(sym1,sym2) 3737 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3738 for { 3739 off1 := v.AuxInt 3740 sym1 := v.Aux 3741 _ = v.Args[2] 3742 v_0 := v.Args[0] 3743 if v_0.Op != OpMIPSMOVWaddr { 3744 break 3745 } 3746 off2 := v_0.AuxInt 3747 sym2 := v_0.Aux 3748 ptr := v_0.Args[0] 3749 val := v.Args[1] 3750 mem := v.Args[2] 3751 if !(canMergeSym(sym1, sym2)) { 3752 break 3753 } 3754 v.reset(OpMIPSMOVBstore) 3755 v.AuxInt = off1 + off2 3756 v.Aux = mergeSym(sym1, sym2) 3757 v.AddArg(ptr) 3758 v.AddArg(val) 3759 v.AddArg(mem) 3760 return true 3761 } 3762 // match: (MOVBstore [off] {sym} ptr (MOVWconst [0]) mem) 3763 // cond: 3764 // result: (MOVBstorezero [off] {sym} ptr mem) 3765 for { 3766 off := v.AuxInt 3767 sym := v.Aux 3768 _ = v.Args[2] 3769 ptr := v.Args[0] 3770 v_1 := v.Args[1] 3771 if v_1.Op != OpMIPSMOVWconst { 3772 break 3773 } 3774 if v_1.AuxInt != 0 { 3775 break 3776 } 3777 mem := v.Args[2] 3778 v.reset(OpMIPSMOVBstorezero) 3779 v.AuxInt = off 3780 v.Aux = sym 3781 v.AddArg(ptr) 3782 v.AddArg(mem) 3783 return true 3784 } 3785 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 3786 // cond: 3787 // result: (MOVBstore [off] {sym} ptr x mem) 3788 for { 3789 off := v.AuxInt 3790 sym := v.Aux 3791 _ = v.Args[2] 3792 ptr := v.Args[0] 3793 v_1 := v.Args[1] 3794 if v_1.Op != OpMIPSMOVBreg { 3795 break 3796 } 3797 x := v_1.Args[0] 3798 mem := v.Args[2] 3799 v.reset(OpMIPSMOVBstore) 3800 v.AuxInt = off 3801 v.Aux = sym 3802 v.AddArg(ptr) 3803 v.AddArg(x) 3804 v.AddArg(mem) 3805 return true 3806 } 3807 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 3808 // cond: 3809 // result: (MOVBstore [off] {sym} ptr x mem) 3810 for { 3811 off := v.AuxInt 3812 sym := v.Aux 3813 _ = v.Args[2] 3814 ptr := v.Args[0] 3815 v_1 := v.Args[1] 3816 if v_1.Op != OpMIPSMOVBUreg { 3817 break 3818 } 3819 x := v_1.Args[0] 3820 mem := v.Args[2] 3821 v.reset(OpMIPSMOVBstore) 3822 v.AuxInt = off 3823 v.Aux = sym 3824 v.AddArg(ptr) 3825 v.AddArg(x) 3826 v.AddArg(mem) 3827 return true 3828 } 3829 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 3830 // cond: 3831 // result: (MOVBstore [off] {sym} ptr x mem) 3832 for { 3833 off := v.AuxInt 3834 sym := v.Aux 3835 _ = v.Args[2] 3836 ptr := v.Args[0] 3837 v_1 := v.Args[1] 3838 if v_1.Op != OpMIPSMOVHreg { 3839 break 3840 } 3841 x := v_1.Args[0] 3842 mem := v.Args[2] 3843 v.reset(OpMIPSMOVBstore) 3844 v.AuxInt = off 3845 v.Aux = sym 3846 v.AddArg(ptr) 3847 v.AddArg(x) 3848 v.AddArg(mem) 3849 return true 3850 } 3851 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 3852 // cond: 3853 // result: (MOVBstore [off] {sym} ptr x mem) 3854 for { 3855 off := v.AuxInt 3856 sym := v.Aux 3857 _ = v.Args[2] 3858 ptr := v.Args[0] 3859 v_1 := v.Args[1] 3860 if v_1.Op != OpMIPSMOVHUreg { 3861 break 3862 } 3863 x := v_1.Args[0] 3864 mem := v.Args[2] 3865 v.reset(OpMIPSMOVBstore) 3866 v.AuxInt = off 3867 v.Aux = sym 3868 v.AddArg(ptr) 3869 v.AddArg(x) 3870 v.AddArg(mem) 3871 return true 3872 } 3873 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 3874 // cond: 3875 // result: (MOVBstore [off] {sym} ptr x mem) 3876 for { 3877 off := v.AuxInt 3878 sym := v.Aux 3879 _ = v.Args[2] 3880 ptr := v.Args[0] 3881 v_1 := v.Args[1] 3882 if v_1.Op != OpMIPSMOVWreg { 3883 break 3884 } 3885 x := v_1.Args[0] 3886 mem := v.Args[2] 3887 v.reset(OpMIPSMOVBstore) 3888 v.AuxInt = off 3889 v.Aux = sym 3890 v.AddArg(ptr) 3891 v.AddArg(x) 3892 v.AddArg(mem) 3893 return true 3894 } 3895 return false 3896 } 3897 func rewriteValueMIPS_OpMIPSMOVBstorezero_0(v *Value) bool { 3898 // match: (MOVBstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem) 3899 // cond: (is16Bit(off1+off2) || x.Uses == 1) 3900 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 3901 for { 3902 off1 := v.AuxInt 3903 sym := v.Aux 3904 _ = v.Args[1] 3905 x := v.Args[0] 3906 if x.Op != OpMIPSADDconst { 3907 break 3908 } 3909 off2 := x.AuxInt 3910 ptr := x.Args[0] 3911 mem := v.Args[1] 3912 if !(is16Bit(off1+off2) || x.Uses == 1) { 3913 break 3914 } 3915 v.reset(OpMIPSMOVBstorezero) 3916 v.AuxInt = off1 + off2 3917 v.Aux = sym 3918 v.AddArg(ptr) 3919 v.AddArg(mem) 3920 return true 3921 } 3922 // match: (MOVBstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3923 // cond: canMergeSym(sym1,sym2) 3924 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3925 for { 3926 off1 := v.AuxInt 3927 sym1 := v.Aux 3928 _ = v.Args[1] 3929 v_0 := v.Args[0] 3930 if v_0.Op != OpMIPSMOVWaddr { 3931 break 3932 } 3933 off2 := v_0.AuxInt 3934 sym2 := v_0.Aux 3935 ptr := v_0.Args[0] 3936 mem := v.Args[1] 3937 if !(canMergeSym(sym1, sym2)) { 3938 break 3939 } 3940 v.reset(OpMIPSMOVBstorezero) 3941 v.AuxInt = off1 + off2 3942 v.Aux = mergeSym(sym1, sym2) 3943 v.AddArg(ptr) 3944 v.AddArg(mem) 3945 return true 3946 } 3947 return false 3948 } 3949 func rewriteValueMIPS_OpMIPSMOVDload_0(v *Value) bool { 3950 // match: (MOVDload [off1] {sym} x:(ADDconst [off2] ptr) mem) 3951 // cond: (is16Bit(off1+off2) || x.Uses == 1) 3952 // result: (MOVDload [off1+off2] {sym} ptr mem) 3953 for { 3954 off1 := v.AuxInt 3955 sym := v.Aux 3956 _ = v.Args[1] 3957 x := v.Args[0] 3958 if x.Op != OpMIPSADDconst { 3959 break 3960 } 3961 off2 := x.AuxInt 3962 ptr := x.Args[0] 3963 mem := v.Args[1] 3964 if !(is16Bit(off1+off2) || x.Uses == 1) { 3965 break 3966 } 3967 v.reset(OpMIPSMOVDload) 3968 v.AuxInt = off1 + off2 3969 v.Aux = sym 3970 v.AddArg(ptr) 3971 v.AddArg(mem) 3972 return true 3973 } 3974 // match: (MOVDload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 3975 // cond: canMergeSym(sym1,sym2) 3976 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3977 for { 3978 off1 := v.AuxInt 3979 sym1 := v.Aux 3980 _ = v.Args[1] 3981 v_0 := v.Args[0] 3982 if v_0.Op != OpMIPSMOVWaddr { 3983 break 3984 } 3985 off2 := v_0.AuxInt 3986 sym2 := v_0.Aux 3987 ptr := v_0.Args[0] 3988 mem := v.Args[1] 3989 if !(canMergeSym(sym1, sym2)) { 3990 break 3991 } 3992 v.reset(OpMIPSMOVDload) 3993 v.AuxInt = off1 + off2 3994 v.Aux = mergeSym(sym1, sym2) 3995 v.AddArg(ptr) 3996 v.AddArg(mem) 3997 return true 3998 } 3999 // match: (MOVDload [off] {sym} ptr (MOVDstore [off2] {sym2} ptr2 x _)) 4000 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4001 // result: x 4002 for { 4003 off := v.AuxInt 4004 sym := v.Aux 4005 _ = v.Args[1] 4006 ptr := v.Args[0] 4007 v_1 := v.Args[1] 4008 if v_1.Op != OpMIPSMOVDstore { 4009 break 4010 } 4011 off2 := v_1.AuxInt 4012 sym2 := v_1.Aux 4013 _ = v_1.Args[2] 4014 ptr2 := v_1.Args[0] 4015 x := v_1.Args[1] 4016 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4017 break 4018 } 4019 v.reset(OpCopy) 4020 v.Type = x.Type 4021 v.AddArg(x) 4022 return true 4023 } 4024 return false 4025 } 4026 func rewriteValueMIPS_OpMIPSMOVDstore_0(v *Value) bool { 4027 // match: (MOVDstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 4028 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4029 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4030 for { 4031 off1 := v.AuxInt 4032 sym := v.Aux 4033 _ = v.Args[2] 4034 x := v.Args[0] 4035 if x.Op != OpMIPSADDconst { 4036 break 4037 } 4038 off2 := x.AuxInt 4039 ptr := x.Args[0] 4040 val := v.Args[1] 4041 mem := v.Args[2] 4042 if !(is16Bit(off1+off2) || x.Uses == 1) { 4043 break 4044 } 4045 v.reset(OpMIPSMOVDstore) 4046 v.AuxInt = off1 + off2 4047 v.Aux = sym 4048 v.AddArg(ptr) 4049 v.AddArg(val) 4050 v.AddArg(mem) 4051 return true 4052 } 4053 // match: (MOVDstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 4054 // cond: canMergeSym(sym1,sym2) 4055 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4056 for { 4057 off1 := v.AuxInt 4058 sym1 := v.Aux 4059 _ = v.Args[2] 4060 v_0 := v.Args[0] 4061 if v_0.Op != OpMIPSMOVWaddr { 4062 break 4063 } 4064 off2 := v_0.AuxInt 4065 sym2 := v_0.Aux 4066 ptr := v_0.Args[0] 4067 val := v.Args[1] 4068 mem := v.Args[2] 4069 if !(canMergeSym(sym1, sym2)) { 4070 break 4071 } 4072 v.reset(OpMIPSMOVDstore) 4073 v.AuxInt = off1 + off2 4074 v.Aux = mergeSym(sym1, sym2) 4075 v.AddArg(ptr) 4076 v.AddArg(val) 4077 v.AddArg(mem) 4078 return true 4079 } 4080 return false 4081 } 4082 func rewriteValueMIPS_OpMIPSMOVFload_0(v *Value) bool { 4083 // match: (MOVFload [off1] {sym} x:(ADDconst [off2] ptr) mem) 4084 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4085 // result: (MOVFload [off1+off2] {sym} ptr mem) 4086 for { 4087 off1 := v.AuxInt 4088 sym := v.Aux 4089 _ = v.Args[1] 4090 x := v.Args[0] 4091 if x.Op != OpMIPSADDconst { 4092 break 4093 } 4094 off2 := x.AuxInt 4095 ptr := x.Args[0] 4096 mem := v.Args[1] 4097 if !(is16Bit(off1+off2) || x.Uses == 1) { 4098 break 4099 } 4100 v.reset(OpMIPSMOVFload) 4101 v.AuxInt = off1 + off2 4102 v.Aux = sym 4103 v.AddArg(ptr) 4104 v.AddArg(mem) 4105 return true 4106 } 4107 // match: (MOVFload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 4108 // cond: canMergeSym(sym1,sym2) 4109 // result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4110 for { 4111 off1 := v.AuxInt 4112 sym1 := v.Aux 4113 _ = v.Args[1] 4114 v_0 := v.Args[0] 4115 if v_0.Op != OpMIPSMOVWaddr { 4116 break 4117 } 4118 off2 := v_0.AuxInt 4119 sym2 := v_0.Aux 4120 ptr := v_0.Args[0] 4121 mem := v.Args[1] 4122 if !(canMergeSym(sym1, sym2)) { 4123 break 4124 } 4125 v.reset(OpMIPSMOVFload) 4126 v.AuxInt = off1 + off2 4127 v.Aux = mergeSym(sym1, sym2) 4128 v.AddArg(ptr) 4129 v.AddArg(mem) 4130 return true 4131 } 4132 // match: (MOVFload [off] {sym} ptr (MOVFstore [off2] {sym2} ptr2 x _)) 4133 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4134 // result: x 4135 for { 4136 off := v.AuxInt 4137 sym := v.Aux 4138 _ = v.Args[1] 4139 ptr := v.Args[0] 4140 v_1 := v.Args[1] 4141 if v_1.Op != OpMIPSMOVFstore { 4142 break 4143 } 4144 off2 := v_1.AuxInt 4145 sym2 := v_1.Aux 4146 _ = v_1.Args[2] 4147 ptr2 := v_1.Args[0] 4148 x := v_1.Args[1] 4149 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4150 break 4151 } 4152 v.reset(OpCopy) 4153 v.Type = x.Type 4154 v.AddArg(x) 4155 return true 4156 } 4157 return false 4158 } 4159 func rewriteValueMIPS_OpMIPSMOVFstore_0(v *Value) bool { 4160 // match: (MOVFstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 4161 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4162 // result: (MOVFstore [off1+off2] {sym} ptr val mem) 4163 for { 4164 off1 := v.AuxInt 4165 sym := v.Aux 4166 _ = v.Args[2] 4167 x := v.Args[0] 4168 if x.Op != OpMIPSADDconst { 4169 break 4170 } 4171 off2 := x.AuxInt 4172 ptr := x.Args[0] 4173 val := v.Args[1] 4174 mem := v.Args[2] 4175 if !(is16Bit(off1+off2) || x.Uses == 1) { 4176 break 4177 } 4178 v.reset(OpMIPSMOVFstore) 4179 v.AuxInt = off1 + off2 4180 v.Aux = sym 4181 v.AddArg(ptr) 4182 v.AddArg(val) 4183 v.AddArg(mem) 4184 return true 4185 } 4186 // match: (MOVFstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 4187 // cond: canMergeSym(sym1,sym2) 4188 // result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4189 for { 4190 off1 := v.AuxInt 4191 sym1 := v.Aux 4192 _ = v.Args[2] 4193 v_0 := v.Args[0] 4194 if v_0.Op != OpMIPSMOVWaddr { 4195 break 4196 } 4197 off2 := v_0.AuxInt 4198 sym2 := v_0.Aux 4199 ptr := v_0.Args[0] 4200 val := v.Args[1] 4201 mem := v.Args[2] 4202 if !(canMergeSym(sym1, sym2)) { 4203 break 4204 } 4205 v.reset(OpMIPSMOVFstore) 4206 v.AuxInt = off1 + off2 4207 v.Aux = mergeSym(sym1, sym2) 4208 v.AddArg(ptr) 4209 v.AddArg(val) 4210 v.AddArg(mem) 4211 return true 4212 } 4213 return false 4214 } 4215 func rewriteValueMIPS_OpMIPSMOVHUload_0(v *Value) bool { 4216 // match: (MOVHUload [off1] {sym} x:(ADDconst [off2] ptr) mem) 4217 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4218 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4219 for { 4220 off1 := v.AuxInt 4221 sym := v.Aux 4222 _ = v.Args[1] 4223 x := v.Args[0] 4224 if x.Op != OpMIPSADDconst { 4225 break 4226 } 4227 off2 := x.AuxInt 4228 ptr := x.Args[0] 4229 mem := v.Args[1] 4230 if !(is16Bit(off1+off2) || x.Uses == 1) { 4231 break 4232 } 4233 v.reset(OpMIPSMOVHUload) 4234 v.AuxInt = off1 + off2 4235 v.Aux = sym 4236 v.AddArg(ptr) 4237 v.AddArg(mem) 4238 return true 4239 } 4240 // match: (MOVHUload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 4241 // cond: canMergeSym(sym1,sym2) 4242 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4243 for { 4244 off1 := v.AuxInt 4245 sym1 := v.Aux 4246 _ = v.Args[1] 4247 v_0 := v.Args[0] 4248 if v_0.Op != OpMIPSMOVWaddr { 4249 break 4250 } 4251 off2 := v_0.AuxInt 4252 sym2 := v_0.Aux 4253 ptr := v_0.Args[0] 4254 mem := v.Args[1] 4255 if !(canMergeSym(sym1, sym2)) { 4256 break 4257 } 4258 v.reset(OpMIPSMOVHUload) 4259 v.AuxInt = off1 + off2 4260 v.Aux = mergeSym(sym1, sym2) 4261 v.AddArg(ptr) 4262 v.AddArg(mem) 4263 return true 4264 } 4265 // match: (MOVHUload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _)) 4266 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4267 // result: (MOVHUreg x) 4268 for { 4269 off := v.AuxInt 4270 sym := v.Aux 4271 _ = v.Args[1] 4272 ptr := v.Args[0] 4273 v_1 := v.Args[1] 4274 if v_1.Op != OpMIPSMOVHstore { 4275 break 4276 } 4277 off2 := v_1.AuxInt 4278 sym2 := v_1.Aux 4279 _ = v_1.Args[2] 4280 ptr2 := v_1.Args[0] 4281 x := v_1.Args[1] 4282 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4283 break 4284 } 4285 v.reset(OpMIPSMOVHUreg) 4286 v.AddArg(x) 4287 return true 4288 } 4289 return false 4290 } 4291 func rewriteValueMIPS_OpMIPSMOVHUreg_0(v *Value) bool { 4292 b := v.Block 4293 _ = b 4294 // match: (MOVHUreg x:(MOVBUload _ _)) 4295 // cond: 4296 // result: (MOVWreg x) 4297 for { 4298 x := v.Args[0] 4299 if x.Op != OpMIPSMOVBUload { 4300 break 4301 } 4302 _ = x.Args[1] 4303 v.reset(OpMIPSMOVWreg) 4304 v.AddArg(x) 4305 return true 4306 } 4307 // match: (MOVHUreg x:(MOVHUload _ _)) 4308 // cond: 4309 // result: (MOVWreg x) 4310 for { 4311 x := v.Args[0] 4312 if x.Op != OpMIPSMOVHUload { 4313 break 4314 } 4315 _ = x.Args[1] 4316 v.reset(OpMIPSMOVWreg) 4317 v.AddArg(x) 4318 return true 4319 } 4320 // match: (MOVHUreg x:(MOVBUreg _)) 4321 // cond: 4322 // result: (MOVWreg x) 4323 for { 4324 x := v.Args[0] 4325 if x.Op != OpMIPSMOVBUreg { 4326 break 4327 } 4328 v.reset(OpMIPSMOVWreg) 4329 v.AddArg(x) 4330 return true 4331 } 4332 // match: (MOVHUreg x:(MOVHUreg _)) 4333 // cond: 4334 // result: (MOVWreg x) 4335 for { 4336 x := v.Args[0] 4337 if x.Op != OpMIPSMOVHUreg { 4338 break 4339 } 4340 v.reset(OpMIPSMOVWreg) 4341 v.AddArg(x) 4342 return true 4343 } 4344 // match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem)) 4345 // cond: x.Uses == 1 && clobber(x) 4346 // result: @x.Block (MOVHUload <t> [off] {sym} ptr mem) 4347 for { 4348 t := v.Type 4349 x := v.Args[0] 4350 if x.Op != OpMIPSMOVHload { 4351 break 4352 } 4353 off := x.AuxInt 4354 sym := x.Aux 4355 _ = x.Args[1] 4356 ptr := x.Args[0] 4357 mem := x.Args[1] 4358 if !(x.Uses == 1 && clobber(x)) { 4359 break 4360 } 4361 b = x.Block 4362 v0 := b.NewValue0(x.Pos, OpMIPSMOVHUload, t) 4363 v.reset(OpCopy) 4364 v.AddArg(v0) 4365 v0.AuxInt = off 4366 v0.Aux = sym 4367 v0.AddArg(ptr) 4368 v0.AddArg(mem) 4369 return true 4370 } 4371 // match: (MOVHUreg (ANDconst [c] x)) 4372 // cond: 4373 // result: (ANDconst [c&0xffff] x) 4374 for { 4375 v_0 := v.Args[0] 4376 if v_0.Op != OpMIPSANDconst { 4377 break 4378 } 4379 c := v_0.AuxInt 4380 x := v_0.Args[0] 4381 v.reset(OpMIPSANDconst) 4382 v.AuxInt = c & 0xffff 4383 v.AddArg(x) 4384 return true 4385 } 4386 // match: (MOVHUreg (MOVWconst [c])) 4387 // cond: 4388 // result: (MOVWconst [int64(uint16(c))]) 4389 for { 4390 v_0 := v.Args[0] 4391 if v_0.Op != OpMIPSMOVWconst { 4392 break 4393 } 4394 c := v_0.AuxInt 4395 v.reset(OpMIPSMOVWconst) 4396 v.AuxInt = int64(uint16(c)) 4397 return true 4398 } 4399 return false 4400 } 4401 func rewriteValueMIPS_OpMIPSMOVHload_0(v *Value) bool { 4402 // match: (MOVHload [off1] {sym} x:(ADDconst [off2] ptr) mem) 4403 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4404 // result: (MOVHload [off1+off2] {sym} ptr mem) 4405 for { 4406 off1 := v.AuxInt 4407 sym := v.Aux 4408 _ = v.Args[1] 4409 x := v.Args[0] 4410 if x.Op != OpMIPSADDconst { 4411 break 4412 } 4413 off2 := x.AuxInt 4414 ptr := x.Args[0] 4415 mem := v.Args[1] 4416 if !(is16Bit(off1+off2) || x.Uses == 1) { 4417 break 4418 } 4419 v.reset(OpMIPSMOVHload) 4420 v.AuxInt = off1 + off2 4421 v.Aux = sym 4422 v.AddArg(ptr) 4423 v.AddArg(mem) 4424 return true 4425 } 4426 // match: (MOVHload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 4427 // cond: canMergeSym(sym1,sym2) 4428 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4429 for { 4430 off1 := v.AuxInt 4431 sym1 := v.Aux 4432 _ = v.Args[1] 4433 v_0 := v.Args[0] 4434 if v_0.Op != OpMIPSMOVWaddr { 4435 break 4436 } 4437 off2 := v_0.AuxInt 4438 sym2 := v_0.Aux 4439 ptr := v_0.Args[0] 4440 mem := v.Args[1] 4441 if !(canMergeSym(sym1, sym2)) { 4442 break 4443 } 4444 v.reset(OpMIPSMOVHload) 4445 v.AuxInt = off1 + off2 4446 v.Aux = mergeSym(sym1, sym2) 4447 v.AddArg(ptr) 4448 v.AddArg(mem) 4449 return true 4450 } 4451 // match: (MOVHload [off] {sym} ptr (MOVHstore [off2] {sym2} ptr2 x _)) 4452 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4453 // result: (MOVHreg x) 4454 for { 4455 off := v.AuxInt 4456 sym := v.Aux 4457 _ = v.Args[1] 4458 ptr := v.Args[0] 4459 v_1 := v.Args[1] 4460 if v_1.Op != OpMIPSMOVHstore { 4461 break 4462 } 4463 off2 := v_1.AuxInt 4464 sym2 := v_1.Aux 4465 _ = v_1.Args[2] 4466 ptr2 := v_1.Args[0] 4467 x := v_1.Args[1] 4468 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4469 break 4470 } 4471 v.reset(OpMIPSMOVHreg) 4472 v.AddArg(x) 4473 return true 4474 } 4475 return false 4476 } 4477 func rewriteValueMIPS_OpMIPSMOVHreg_0(v *Value) bool { 4478 b := v.Block 4479 _ = b 4480 // match: (MOVHreg x:(MOVBload _ _)) 4481 // cond: 4482 // result: (MOVWreg x) 4483 for { 4484 x := v.Args[0] 4485 if x.Op != OpMIPSMOVBload { 4486 break 4487 } 4488 _ = x.Args[1] 4489 v.reset(OpMIPSMOVWreg) 4490 v.AddArg(x) 4491 return true 4492 } 4493 // match: (MOVHreg x:(MOVBUload _ _)) 4494 // cond: 4495 // result: (MOVWreg x) 4496 for { 4497 x := v.Args[0] 4498 if x.Op != OpMIPSMOVBUload { 4499 break 4500 } 4501 _ = x.Args[1] 4502 v.reset(OpMIPSMOVWreg) 4503 v.AddArg(x) 4504 return true 4505 } 4506 // match: (MOVHreg x:(MOVHload _ _)) 4507 // cond: 4508 // result: (MOVWreg x) 4509 for { 4510 x := v.Args[0] 4511 if x.Op != OpMIPSMOVHload { 4512 break 4513 } 4514 _ = x.Args[1] 4515 v.reset(OpMIPSMOVWreg) 4516 v.AddArg(x) 4517 return true 4518 } 4519 // match: (MOVHreg x:(MOVBreg _)) 4520 // cond: 4521 // result: (MOVWreg x) 4522 for { 4523 x := v.Args[0] 4524 if x.Op != OpMIPSMOVBreg { 4525 break 4526 } 4527 v.reset(OpMIPSMOVWreg) 4528 v.AddArg(x) 4529 return true 4530 } 4531 // match: (MOVHreg x:(MOVBUreg _)) 4532 // cond: 4533 // result: (MOVWreg x) 4534 for { 4535 x := v.Args[0] 4536 if x.Op != OpMIPSMOVBUreg { 4537 break 4538 } 4539 v.reset(OpMIPSMOVWreg) 4540 v.AddArg(x) 4541 return true 4542 } 4543 // match: (MOVHreg x:(MOVHreg _)) 4544 // cond: 4545 // result: (MOVWreg x) 4546 for { 4547 x := v.Args[0] 4548 if x.Op != OpMIPSMOVHreg { 4549 break 4550 } 4551 v.reset(OpMIPSMOVWreg) 4552 v.AddArg(x) 4553 return true 4554 } 4555 // match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem)) 4556 // cond: x.Uses == 1 && clobber(x) 4557 // result: @x.Block (MOVHload <t> [off] {sym} ptr mem) 4558 for { 4559 t := v.Type 4560 x := v.Args[0] 4561 if x.Op != OpMIPSMOVHUload { 4562 break 4563 } 4564 off := x.AuxInt 4565 sym := x.Aux 4566 _ = x.Args[1] 4567 ptr := x.Args[0] 4568 mem := x.Args[1] 4569 if !(x.Uses == 1 && clobber(x)) { 4570 break 4571 } 4572 b = x.Block 4573 v0 := b.NewValue0(x.Pos, OpMIPSMOVHload, t) 4574 v.reset(OpCopy) 4575 v.AddArg(v0) 4576 v0.AuxInt = off 4577 v0.Aux = sym 4578 v0.AddArg(ptr) 4579 v0.AddArg(mem) 4580 return true 4581 } 4582 // match: (MOVHreg (ANDconst [c] x)) 4583 // cond: c & 0x8000 == 0 4584 // result: (ANDconst [c&0x7fff] x) 4585 for { 4586 v_0 := v.Args[0] 4587 if v_0.Op != OpMIPSANDconst { 4588 break 4589 } 4590 c := v_0.AuxInt 4591 x := v_0.Args[0] 4592 if !(c&0x8000 == 0) { 4593 break 4594 } 4595 v.reset(OpMIPSANDconst) 4596 v.AuxInt = c & 0x7fff 4597 v.AddArg(x) 4598 return true 4599 } 4600 // match: (MOVHreg (MOVWconst [c])) 4601 // cond: 4602 // result: (MOVWconst [int64(int16(c))]) 4603 for { 4604 v_0 := v.Args[0] 4605 if v_0.Op != OpMIPSMOVWconst { 4606 break 4607 } 4608 c := v_0.AuxInt 4609 v.reset(OpMIPSMOVWconst) 4610 v.AuxInt = int64(int16(c)) 4611 return true 4612 } 4613 return false 4614 } 4615 func rewriteValueMIPS_OpMIPSMOVHstore_0(v *Value) bool { 4616 // match: (MOVHstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 4617 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4618 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4619 for { 4620 off1 := v.AuxInt 4621 sym := v.Aux 4622 _ = v.Args[2] 4623 x := v.Args[0] 4624 if x.Op != OpMIPSADDconst { 4625 break 4626 } 4627 off2 := x.AuxInt 4628 ptr := x.Args[0] 4629 val := v.Args[1] 4630 mem := v.Args[2] 4631 if !(is16Bit(off1+off2) || x.Uses == 1) { 4632 break 4633 } 4634 v.reset(OpMIPSMOVHstore) 4635 v.AuxInt = off1 + off2 4636 v.Aux = sym 4637 v.AddArg(ptr) 4638 v.AddArg(val) 4639 v.AddArg(mem) 4640 return true 4641 } 4642 // match: (MOVHstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 4643 // cond: canMergeSym(sym1,sym2) 4644 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4645 for { 4646 off1 := v.AuxInt 4647 sym1 := v.Aux 4648 _ = v.Args[2] 4649 v_0 := v.Args[0] 4650 if v_0.Op != OpMIPSMOVWaddr { 4651 break 4652 } 4653 off2 := v_0.AuxInt 4654 sym2 := v_0.Aux 4655 ptr := v_0.Args[0] 4656 val := v.Args[1] 4657 mem := v.Args[2] 4658 if !(canMergeSym(sym1, sym2)) { 4659 break 4660 } 4661 v.reset(OpMIPSMOVHstore) 4662 v.AuxInt = off1 + off2 4663 v.Aux = mergeSym(sym1, sym2) 4664 v.AddArg(ptr) 4665 v.AddArg(val) 4666 v.AddArg(mem) 4667 return true 4668 } 4669 // match: (MOVHstore [off] {sym} ptr (MOVWconst [0]) mem) 4670 // cond: 4671 // result: (MOVHstorezero [off] {sym} ptr mem) 4672 for { 4673 off := v.AuxInt 4674 sym := v.Aux 4675 _ = v.Args[2] 4676 ptr := v.Args[0] 4677 v_1 := v.Args[1] 4678 if v_1.Op != OpMIPSMOVWconst { 4679 break 4680 } 4681 if v_1.AuxInt != 0 { 4682 break 4683 } 4684 mem := v.Args[2] 4685 v.reset(OpMIPSMOVHstorezero) 4686 v.AuxInt = off 4687 v.Aux = sym 4688 v.AddArg(ptr) 4689 v.AddArg(mem) 4690 return true 4691 } 4692 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4693 // cond: 4694 // result: (MOVHstore [off] {sym} ptr x mem) 4695 for { 4696 off := v.AuxInt 4697 sym := v.Aux 4698 _ = v.Args[2] 4699 ptr := v.Args[0] 4700 v_1 := v.Args[1] 4701 if v_1.Op != OpMIPSMOVHreg { 4702 break 4703 } 4704 x := v_1.Args[0] 4705 mem := v.Args[2] 4706 v.reset(OpMIPSMOVHstore) 4707 v.AuxInt = off 4708 v.Aux = sym 4709 v.AddArg(ptr) 4710 v.AddArg(x) 4711 v.AddArg(mem) 4712 return true 4713 } 4714 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4715 // cond: 4716 // result: (MOVHstore [off] {sym} ptr x mem) 4717 for { 4718 off := v.AuxInt 4719 sym := v.Aux 4720 _ = v.Args[2] 4721 ptr := v.Args[0] 4722 v_1 := v.Args[1] 4723 if v_1.Op != OpMIPSMOVHUreg { 4724 break 4725 } 4726 x := v_1.Args[0] 4727 mem := v.Args[2] 4728 v.reset(OpMIPSMOVHstore) 4729 v.AuxInt = off 4730 v.Aux = sym 4731 v.AddArg(ptr) 4732 v.AddArg(x) 4733 v.AddArg(mem) 4734 return true 4735 } 4736 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4737 // cond: 4738 // result: (MOVHstore [off] {sym} ptr x mem) 4739 for { 4740 off := v.AuxInt 4741 sym := v.Aux 4742 _ = v.Args[2] 4743 ptr := v.Args[0] 4744 v_1 := v.Args[1] 4745 if v_1.Op != OpMIPSMOVWreg { 4746 break 4747 } 4748 x := v_1.Args[0] 4749 mem := v.Args[2] 4750 v.reset(OpMIPSMOVHstore) 4751 v.AuxInt = off 4752 v.Aux = sym 4753 v.AddArg(ptr) 4754 v.AddArg(x) 4755 v.AddArg(mem) 4756 return true 4757 } 4758 return false 4759 } 4760 func rewriteValueMIPS_OpMIPSMOVHstorezero_0(v *Value) bool { 4761 // match: (MOVHstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem) 4762 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4763 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4764 for { 4765 off1 := v.AuxInt 4766 sym := v.Aux 4767 _ = v.Args[1] 4768 x := v.Args[0] 4769 if x.Op != OpMIPSADDconst { 4770 break 4771 } 4772 off2 := x.AuxInt 4773 ptr := x.Args[0] 4774 mem := v.Args[1] 4775 if !(is16Bit(off1+off2) || x.Uses == 1) { 4776 break 4777 } 4778 v.reset(OpMIPSMOVHstorezero) 4779 v.AuxInt = off1 + off2 4780 v.Aux = sym 4781 v.AddArg(ptr) 4782 v.AddArg(mem) 4783 return true 4784 } 4785 // match: (MOVHstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 4786 // cond: canMergeSym(sym1,sym2) 4787 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4788 for { 4789 off1 := v.AuxInt 4790 sym1 := v.Aux 4791 _ = v.Args[1] 4792 v_0 := v.Args[0] 4793 if v_0.Op != OpMIPSMOVWaddr { 4794 break 4795 } 4796 off2 := v_0.AuxInt 4797 sym2 := v_0.Aux 4798 ptr := v_0.Args[0] 4799 mem := v.Args[1] 4800 if !(canMergeSym(sym1, sym2)) { 4801 break 4802 } 4803 v.reset(OpMIPSMOVHstorezero) 4804 v.AuxInt = off1 + off2 4805 v.Aux = mergeSym(sym1, sym2) 4806 v.AddArg(ptr) 4807 v.AddArg(mem) 4808 return true 4809 } 4810 return false 4811 } 4812 func rewriteValueMIPS_OpMIPSMOVWload_0(v *Value) bool { 4813 // match: (MOVWload [off1] {sym} x:(ADDconst [off2] ptr) mem) 4814 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4815 // result: (MOVWload [off1+off2] {sym} ptr mem) 4816 for { 4817 off1 := v.AuxInt 4818 sym := v.Aux 4819 _ = v.Args[1] 4820 x := v.Args[0] 4821 if x.Op != OpMIPSADDconst { 4822 break 4823 } 4824 off2 := x.AuxInt 4825 ptr := x.Args[0] 4826 mem := v.Args[1] 4827 if !(is16Bit(off1+off2) || x.Uses == 1) { 4828 break 4829 } 4830 v.reset(OpMIPSMOVWload) 4831 v.AuxInt = off1 + off2 4832 v.Aux = sym 4833 v.AddArg(ptr) 4834 v.AddArg(mem) 4835 return true 4836 } 4837 // match: (MOVWload [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 4838 // cond: canMergeSym(sym1,sym2) 4839 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4840 for { 4841 off1 := v.AuxInt 4842 sym1 := v.Aux 4843 _ = v.Args[1] 4844 v_0 := v.Args[0] 4845 if v_0.Op != OpMIPSMOVWaddr { 4846 break 4847 } 4848 off2 := v_0.AuxInt 4849 sym2 := v_0.Aux 4850 ptr := v_0.Args[0] 4851 mem := v.Args[1] 4852 if !(canMergeSym(sym1, sym2)) { 4853 break 4854 } 4855 v.reset(OpMIPSMOVWload) 4856 v.AuxInt = off1 + off2 4857 v.Aux = mergeSym(sym1, sym2) 4858 v.AddArg(ptr) 4859 v.AddArg(mem) 4860 return true 4861 } 4862 // match: (MOVWload [off] {sym} ptr (MOVWstore [off2] {sym2} ptr2 x _)) 4863 // cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2) 4864 // result: x 4865 for { 4866 off := v.AuxInt 4867 sym := v.Aux 4868 _ = v.Args[1] 4869 ptr := v.Args[0] 4870 v_1 := v.Args[1] 4871 if v_1.Op != OpMIPSMOVWstore { 4872 break 4873 } 4874 off2 := v_1.AuxInt 4875 sym2 := v_1.Aux 4876 _ = v_1.Args[2] 4877 ptr2 := v_1.Args[0] 4878 x := v_1.Args[1] 4879 if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) { 4880 break 4881 } 4882 v.reset(OpCopy) 4883 v.Type = x.Type 4884 v.AddArg(x) 4885 return true 4886 } 4887 return false 4888 } 4889 func rewriteValueMIPS_OpMIPSMOVWreg_0(v *Value) bool { 4890 // match: (MOVWreg x) 4891 // cond: x.Uses == 1 4892 // result: (MOVWnop x) 4893 for { 4894 x := v.Args[0] 4895 if !(x.Uses == 1) { 4896 break 4897 } 4898 v.reset(OpMIPSMOVWnop) 4899 v.AddArg(x) 4900 return true 4901 } 4902 // match: (MOVWreg (MOVWconst [c])) 4903 // cond: 4904 // result: (MOVWconst [c]) 4905 for { 4906 v_0 := v.Args[0] 4907 if v_0.Op != OpMIPSMOVWconst { 4908 break 4909 } 4910 c := v_0.AuxInt 4911 v.reset(OpMIPSMOVWconst) 4912 v.AuxInt = c 4913 return true 4914 } 4915 return false 4916 } 4917 func rewriteValueMIPS_OpMIPSMOVWstore_0(v *Value) bool { 4918 // match: (MOVWstore [off1] {sym} x:(ADDconst [off2] ptr) val mem) 4919 // cond: (is16Bit(off1+off2) || x.Uses == 1) 4920 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 4921 for { 4922 off1 := v.AuxInt 4923 sym := v.Aux 4924 _ = v.Args[2] 4925 x := v.Args[0] 4926 if x.Op != OpMIPSADDconst { 4927 break 4928 } 4929 off2 := x.AuxInt 4930 ptr := x.Args[0] 4931 val := v.Args[1] 4932 mem := v.Args[2] 4933 if !(is16Bit(off1+off2) || x.Uses == 1) { 4934 break 4935 } 4936 v.reset(OpMIPSMOVWstore) 4937 v.AuxInt = off1 + off2 4938 v.Aux = sym 4939 v.AddArg(ptr) 4940 v.AddArg(val) 4941 v.AddArg(mem) 4942 return true 4943 } 4944 // match: (MOVWstore [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) val mem) 4945 // cond: canMergeSym(sym1,sym2) 4946 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4947 for { 4948 off1 := v.AuxInt 4949 sym1 := v.Aux 4950 _ = v.Args[2] 4951 v_0 := v.Args[0] 4952 if v_0.Op != OpMIPSMOVWaddr { 4953 break 4954 } 4955 off2 := v_0.AuxInt 4956 sym2 := v_0.Aux 4957 ptr := v_0.Args[0] 4958 val := v.Args[1] 4959 mem := v.Args[2] 4960 if !(canMergeSym(sym1, sym2)) { 4961 break 4962 } 4963 v.reset(OpMIPSMOVWstore) 4964 v.AuxInt = off1 + off2 4965 v.Aux = mergeSym(sym1, sym2) 4966 v.AddArg(ptr) 4967 v.AddArg(val) 4968 v.AddArg(mem) 4969 return true 4970 } 4971 // match: (MOVWstore [off] {sym} ptr (MOVWconst [0]) mem) 4972 // cond: 4973 // result: (MOVWstorezero [off] {sym} ptr mem) 4974 for { 4975 off := v.AuxInt 4976 sym := v.Aux 4977 _ = v.Args[2] 4978 ptr := v.Args[0] 4979 v_1 := v.Args[1] 4980 if v_1.Op != OpMIPSMOVWconst { 4981 break 4982 } 4983 if v_1.AuxInt != 0 { 4984 break 4985 } 4986 mem := v.Args[2] 4987 v.reset(OpMIPSMOVWstorezero) 4988 v.AuxInt = off 4989 v.Aux = sym 4990 v.AddArg(ptr) 4991 v.AddArg(mem) 4992 return true 4993 } 4994 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 4995 // cond: 4996 // result: (MOVWstore [off] {sym} ptr x mem) 4997 for { 4998 off := v.AuxInt 4999 sym := v.Aux 5000 _ = v.Args[2] 5001 ptr := v.Args[0] 5002 v_1 := v.Args[1] 5003 if v_1.Op != OpMIPSMOVWreg { 5004 break 5005 } 5006 x := v_1.Args[0] 5007 mem := v.Args[2] 5008 v.reset(OpMIPSMOVWstore) 5009 v.AuxInt = off 5010 v.Aux = sym 5011 v.AddArg(ptr) 5012 v.AddArg(x) 5013 v.AddArg(mem) 5014 return true 5015 } 5016 return false 5017 } 5018 func rewriteValueMIPS_OpMIPSMOVWstorezero_0(v *Value) bool { 5019 // match: (MOVWstorezero [off1] {sym} x:(ADDconst [off2] ptr) mem) 5020 // cond: (is16Bit(off1+off2) || x.Uses == 1) 5021 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5022 for { 5023 off1 := v.AuxInt 5024 sym := v.Aux 5025 _ = v.Args[1] 5026 x := v.Args[0] 5027 if x.Op != OpMIPSADDconst { 5028 break 5029 } 5030 off2 := x.AuxInt 5031 ptr := x.Args[0] 5032 mem := v.Args[1] 5033 if !(is16Bit(off1+off2) || x.Uses == 1) { 5034 break 5035 } 5036 v.reset(OpMIPSMOVWstorezero) 5037 v.AuxInt = off1 + off2 5038 v.Aux = sym 5039 v.AddArg(ptr) 5040 v.AddArg(mem) 5041 return true 5042 } 5043 // match: (MOVWstorezero [off1] {sym1} (MOVWaddr [off2] {sym2} ptr) mem) 5044 // cond: canMergeSym(sym1,sym2) 5045 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5046 for { 5047 off1 := v.AuxInt 5048 sym1 := v.Aux 5049 _ = v.Args[1] 5050 v_0 := v.Args[0] 5051 if v_0.Op != OpMIPSMOVWaddr { 5052 break 5053 } 5054 off2 := v_0.AuxInt 5055 sym2 := v_0.Aux 5056 ptr := v_0.Args[0] 5057 mem := v.Args[1] 5058 if !(canMergeSym(sym1, sym2)) { 5059 break 5060 } 5061 v.reset(OpMIPSMOVWstorezero) 5062 v.AuxInt = off1 + off2 5063 v.Aux = mergeSym(sym1, sym2) 5064 v.AddArg(ptr) 5065 v.AddArg(mem) 5066 return true 5067 } 5068 return false 5069 } 5070 func rewriteValueMIPS_OpMIPSMUL_0(v *Value) bool { 5071 // match: (MUL (MOVWconst [0]) _) 5072 // cond: 5073 // result: (MOVWconst [0]) 5074 for { 5075 _ = v.Args[1] 5076 v_0 := v.Args[0] 5077 if v_0.Op != OpMIPSMOVWconst { 5078 break 5079 } 5080 if v_0.AuxInt != 0 { 5081 break 5082 } 5083 v.reset(OpMIPSMOVWconst) 5084 v.AuxInt = 0 5085 return true 5086 } 5087 // match: (MUL _ (MOVWconst [0])) 5088 // cond: 5089 // result: (MOVWconst [0]) 5090 for { 5091 _ = v.Args[1] 5092 v_1 := v.Args[1] 5093 if v_1.Op != OpMIPSMOVWconst { 5094 break 5095 } 5096 if v_1.AuxInt != 0 { 5097 break 5098 } 5099 v.reset(OpMIPSMOVWconst) 5100 v.AuxInt = 0 5101 return true 5102 } 5103 // match: (MUL (MOVWconst [1]) x) 5104 // cond: 5105 // result: x 5106 for { 5107 _ = v.Args[1] 5108 v_0 := v.Args[0] 5109 if v_0.Op != OpMIPSMOVWconst { 5110 break 5111 } 5112 if v_0.AuxInt != 1 { 5113 break 5114 } 5115 x := v.Args[1] 5116 v.reset(OpCopy) 5117 v.Type = x.Type 5118 v.AddArg(x) 5119 return true 5120 } 5121 // match: (MUL x (MOVWconst [1])) 5122 // cond: 5123 // result: x 5124 for { 5125 _ = v.Args[1] 5126 x := v.Args[0] 5127 v_1 := v.Args[1] 5128 if v_1.Op != OpMIPSMOVWconst { 5129 break 5130 } 5131 if v_1.AuxInt != 1 { 5132 break 5133 } 5134 v.reset(OpCopy) 5135 v.Type = x.Type 5136 v.AddArg(x) 5137 return true 5138 } 5139 // match: (MUL (MOVWconst [-1]) x) 5140 // cond: 5141 // result: (NEG x) 5142 for { 5143 _ = v.Args[1] 5144 v_0 := v.Args[0] 5145 if v_0.Op != OpMIPSMOVWconst { 5146 break 5147 } 5148 if v_0.AuxInt != -1 { 5149 break 5150 } 5151 x := v.Args[1] 5152 v.reset(OpMIPSNEG) 5153 v.AddArg(x) 5154 return true 5155 } 5156 // match: (MUL x (MOVWconst [-1])) 5157 // cond: 5158 // result: (NEG x) 5159 for { 5160 _ = v.Args[1] 5161 x := v.Args[0] 5162 v_1 := v.Args[1] 5163 if v_1.Op != OpMIPSMOVWconst { 5164 break 5165 } 5166 if v_1.AuxInt != -1 { 5167 break 5168 } 5169 v.reset(OpMIPSNEG) 5170 v.AddArg(x) 5171 return true 5172 } 5173 // match: (MUL (MOVWconst [c]) x) 5174 // cond: isPowerOfTwo(int64(uint32(c))) 5175 // result: (SLLconst [log2(int64(uint32(c)))] x) 5176 for { 5177 _ = v.Args[1] 5178 v_0 := v.Args[0] 5179 if v_0.Op != OpMIPSMOVWconst { 5180 break 5181 } 5182 c := v_0.AuxInt 5183 x := v.Args[1] 5184 if !(isPowerOfTwo(int64(uint32(c)))) { 5185 break 5186 } 5187 v.reset(OpMIPSSLLconst) 5188 v.AuxInt = log2(int64(uint32(c))) 5189 v.AddArg(x) 5190 return true 5191 } 5192 // match: (MUL x (MOVWconst [c])) 5193 // cond: isPowerOfTwo(int64(uint32(c))) 5194 // result: (SLLconst [log2(int64(uint32(c)))] x) 5195 for { 5196 _ = v.Args[1] 5197 x := v.Args[0] 5198 v_1 := v.Args[1] 5199 if v_1.Op != OpMIPSMOVWconst { 5200 break 5201 } 5202 c := v_1.AuxInt 5203 if !(isPowerOfTwo(int64(uint32(c)))) { 5204 break 5205 } 5206 v.reset(OpMIPSSLLconst) 5207 v.AuxInt = log2(int64(uint32(c))) 5208 v.AddArg(x) 5209 return true 5210 } 5211 // match: (MUL (MOVWconst [c]) (MOVWconst [d])) 5212 // cond: 5213 // result: (MOVWconst [int64(int32(c)*int32(d))]) 5214 for { 5215 _ = v.Args[1] 5216 v_0 := v.Args[0] 5217 if v_0.Op != OpMIPSMOVWconst { 5218 break 5219 } 5220 c := v_0.AuxInt 5221 v_1 := v.Args[1] 5222 if v_1.Op != OpMIPSMOVWconst { 5223 break 5224 } 5225 d := v_1.AuxInt 5226 v.reset(OpMIPSMOVWconst) 5227 v.AuxInt = int64(int32(c) * int32(d)) 5228 return true 5229 } 5230 // match: (MUL (MOVWconst [d]) (MOVWconst [c])) 5231 // cond: 5232 // result: (MOVWconst [int64(int32(c)*int32(d))]) 5233 for { 5234 _ = v.Args[1] 5235 v_0 := v.Args[0] 5236 if v_0.Op != OpMIPSMOVWconst { 5237 break 5238 } 5239 d := v_0.AuxInt 5240 v_1 := v.Args[1] 5241 if v_1.Op != OpMIPSMOVWconst { 5242 break 5243 } 5244 c := v_1.AuxInt 5245 v.reset(OpMIPSMOVWconst) 5246 v.AuxInt = int64(int32(c) * int32(d)) 5247 return true 5248 } 5249 return false 5250 } 5251 func rewriteValueMIPS_OpMIPSNEG_0(v *Value) bool { 5252 // match: (NEG (MOVWconst [c])) 5253 // cond: 5254 // result: (MOVWconst [int64(int32(-c))]) 5255 for { 5256 v_0 := v.Args[0] 5257 if v_0.Op != OpMIPSMOVWconst { 5258 break 5259 } 5260 c := v_0.AuxInt 5261 v.reset(OpMIPSMOVWconst) 5262 v.AuxInt = int64(int32(-c)) 5263 return true 5264 } 5265 return false 5266 } 5267 func rewriteValueMIPS_OpMIPSNOR_0(v *Value) bool { 5268 // match: (NOR x (MOVWconst [c])) 5269 // cond: 5270 // result: (NORconst [c] x) 5271 for { 5272 _ = v.Args[1] 5273 x := v.Args[0] 5274 v_1 := v.Args[1] 5275 if v_1.Op != OpMIPSMOVWconst { 5276 break 5277 } 5278 c := v_1.AuxInt 5279 v.reset(OpMIPSNORconst) 5280 v.AuxInt = c 5281 v.AddArg(x) 5282 return true 5283 } 5284 // match: (NOR (MOVWconst [c]) x) 5285 // cond: 5286 // result: (NORconst [c] x) 5287 for { 5288 _ = v.Args[1] 5289 v_0 := v.Args[0] 5290 if v_0.Op != OpMIPSMOVWconst { 5291 break 5292 } 5293 c := v_0.AuxInt 5294 x := v.Args[1] 5295 v.reset(OpMIPSNORconst) 5296 v.AuxInt = c 5297 v.AddArg(x) 5298 return true 5299 } 5300 return false 5301 } 5302 func rewriteValueMIPS_OpMIPSNORconst_0(v *Value) bool { 5303 // match: (NORconst [c] (MOVWconst [d])) 5304 // cond: 5305 // result: (MOVWconst [^(c|d)]) 5306 for { 5307 c := v.AuxInt 5308 v_0 := v.Args[0] 5309 if v_0.Op != OpMIPSMOVWconst { 5310 break 5311 } 5312 d := v_0.AuxInt 5313 v.reset(OpMIPSMOVWconst) 5314 v.AuxInt = ^(c | d) 5315 return true 5316 } 5317 return false 5318 } 5319 func rewriteValueMIPS_OpMIPSOR_0(v *Value) bool { 5320 b := v.Block 5321 _ = b 5322 // match: (OR x (MOVWconst [c])) 5323 // cond: 5324 // result: (ORconst [c] x) 5325 for { 5326 _ = v.Args[1] 5327 x := v.Args[0] 5328 v_1 := v.Args[1] 5329 if v_1.Op != OpMIPSMOVWconst { 5330 break 5331 } 5332 c := v_1.AuxInt 5333 v.reset(OpMIPSORconst) 5334 v.AuxInt = c 5335 v.AddArg(x) 5336 return true 5337 } 5338 // match: (OR (MOVWconst [c]) x) 5339 // cond: 5340 // result: (ORconst [c] x) 5341 for { 5342 _ = v.Args[1] 5343 v_0 := v.Args[0] 5344 if v_0.Op != OpMIPSMOVWconst { 5345 break 5346 } 5347 c := v_0.AuxInt 5348 x := v.Args[1] 5349 v.reset(OpMIPSORconst) 5350 v.AuxInt = c 5351 v.AddArg(x) 5352 return true 5353 } 5354 // match: (OR x x) 5355 // cond: 5356 // result: x 5357 for { 5358 _ = v.Args[1] 5359 x := v.Args[0] 5360 if x != v.Args[1] { 5361 break 5362 } 5363 v.reset(OpCopy) 5364 v.Type = x.Type 5365 v.AddArg(x) 5366 return true 5367 } 5368 // match: (OR (SGTUzero x) (SGTUzero y)) 5369 // cond: 5370 // result: (SGTUzero (OR <x.Type> x y)) 5371 for { 5372 _ = v.Args[1] 5373 v_0 := v.Args[0] 5374 if v_0.Op != OpMIPSSGTUzero { 5375 break 5376 } 5377 x := v_0.Args[0] 5378 v_1 := v.Args[1] 5379 if v_1.Op != OpMIPSSGTUzero { 5380 break 5381 } 5382 y := v_1.Args[0] 5383 v.reset(OpMIPSSGTUzero) 5384 v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type) 5385 v0.AddArg(x) 5386 v0.AddArg(y) 5387 v.AddArg(v0) 5388 return true 5389 } 5390 // match: (OR (SGTUzero y) (SGTUzero x)) 5391 // cond: 5392 // result: (SGTUzero (OR <x.Type> x y)) 5393 for { 5394 _ = v.Args[1] 5395 v_0 := v.Args[0] 5396 if v_0.Op != OpMIPSSGTUzero { 5397 break 5398 } 5399 y := v_0.Args[0] 5400 v_1 := v.Args[1] 5401 if v_1.Op != OpMIPSSGTUzero { 5402 break 5403 } 5404 x := v_1.Args[0] 5405 v.reset(OpMIPSSGTUzero) 5406 v0 := b.NewValue0(v.Pos, OpMIPSOR, x.Type) 5407 v0.AddArg(x) 5408 v0.AddArg(y) 5409 v.AddArg(v0) 5410 return true 5411 } 5412 return false 5413 } 5414 func rewriteValueMIPS_OpMIPSORconst_0(v *Value) bool { 5415 // match: (ORconst [0] x) 5416 // cond: 5417 // result: x 5418 for { 5419 if v.AuxInt != 0 { 5420 break 5421 } 5422 x := v.Args[0] 5423 v.reset(OpCopy) 5424 v.Type = x.Type 5425 v.AddArg(x) 5426 return true 5427 } 5428 // match: (ORconst [-1] _) 5429 // cond: 5430 // result: (MOVWconst [-1]) 5431 for { 5432 if v.AuxInt != -1 { 5433 break 5434 } 5435 v.reset(OpMIPSMOVWconst) 5436 v.AuxInt = -1 5437 return true 5438 } 5439 // match: (ORconst [c] (MOVWconst [d])) 5440 // cond: 5441 // result: (MOVWconst [c|d]) 5442 for { 5443 c := v.AuxInt 5444 v_0 := v.Args[0] 5445 if v_0.Op != OpMIPSMOVWconst { 5446 break 5447 } 5448 d := v_0.AuxInt 5449 v.reset(OpMIPSMOVWconst) 5450 v.AuxInt = c | d 5451 return true 5452 } 5453 // match: (ORconst [c] (ORconst [d] x)) 5454 // cond: 5455 // result: (ORconst [c|d] x) 5456 for { 5457 c := v.AuxInt 5458 v_0 := v.Args[0] 5459 if v_0.Op != OpMIPSORconst { 5460 break 5461 } 5462 d := v_0.AuxInt 5463 x := v_0.Args[0] 5464 v.reset(OpMIPSORconst) 5465 v.AuxInt = c | d 5466 v.AddArg(x) 5467 return true 5468 } 5469 return false 5470 } 5471 func rewriteValueMIPS_OpMIPSSGT_0(v *Value) bool { 5472 // match: (SGT (MOVWconst [c]) x) 5473 // cond: 5474 // result: (SGTconst [c] x) 5475 for { 5476 _ = v.Args[1] 5477 v_0 := v.Args[0] 5478 if v_0.Op != OpMIPSMOVWconst { 5479 break 5480 } 5481 c := v_0.AuxInt 5482 x := v.Args[1] 5483 v.reset(OpMIPSSGTconst) 5484 v.AuxInt = c 5485 v.AddArg(x) 5486 return true 5487 } 5488 // match: (SGT x (MOVWconst [0])) 5489 // cond: 5490 // result: (SGTzero x) 5491 for { 5492 _ = v.Args[1] 5493 x := v.Args[0] 5494 v_1 := v.Args[1] 5495 if v_1.Op != OpMIPSMOVWconst { 5496 break 5497 } 5498 if v_1.AuxInt != 0 { 5499 break 5500 } 5501 v.reset(OpMIPSSGTzero) 5502 v.AddArg(x) 5503 return true 5504 } 5505 return false 5506 } 5507 func rewriteValueMIPS_OpMIPSSGTU_0(v *Value) bool { 5508 // match: (SGTU (MOVWconst [c]) x) 5509 // cond: 5510 // result: (SGTUconst [c] x) 5511 for { 5512 _ = v.Args[1] 5513 v_0 := v.Args[0] 5514 if v_0.Op != OpMIPSMOVWconst { 5515 break 5516 } 5517 c := v_0.AuxInt 5518 x := v.Args[1] 5519 v.reset(OpMIPSSGTUconst) 5520 v.AuxInt = c 5521 v.AddArg(x) 5522 return true 5523 } 5524 // match: (SGTU x (MOVWconst [0])) 5525 // cond: 5526 // result: (SGTUzero x) 5527 for { 5528 _ = v.Args[1] 5529 x := v.Args[0] 5530 v_1 := v.Args[1] 5531 if v_1.Op != OpMIPSMOVWconst { 5532 break 5533 } 5534 if v_1.AuxInt != 0 { 5535 break 5536 } 5537 v.reset(OpMIPSSGTUzero) 5538 v.AddArg(x) 5539 return true 5540 } 5541 return false 5542 } 5543 func rewriteValueMIPS_OpMIPSSGTUconst_0(v *Value) bool { 5544 // match: (SGTUconst [c] (MOVWconst [d])) 5545 // cond: uint32(c)>uint32(d) 5546 // result: (MOVWconst [1]) 5547 for { 5548 c := v.AuxInt 5549 v_0 := v.Args[0] 5550 if v_0.Op != OpMIPSMOVWconst { 5551 break 5552 } 5553 d := v_0.AuxInt 5554 if !(uint32(c) > uint32(d)) { 5555 break 5556 } 5557 v.reset(OpMIPSMOVWconst) 5558 v.AuxInt = 1 5559 return true 5560 } 5561 // match: (SGTUconst [c] (MOVWconst [d])) 5562 // cond: uint32(c)<=uint32(d) 5563 // result: (MOVWconst [0]) 5564 for { 5565 c := v.AuxInt 5566 v_0 := v.Args[0] 5567 if v_0.Op != OpMIPSMOVWconst { 5568 break 5569 } 5570 d := v_0.AuxInt 5571 if !(uint32(c) <= uint32(d)) { 5572 break 5573 } 5574 v.reset(OpMIPSMOVWconst) 5575 v.AuxInt = 0 5576 return true 5577 } 5578 // match: (SGTUconst [c] (MOVBUreg _)) 5579 // cond: 0xff < uint32(c) 5580 // result: (MOVWconst [1]) 5581 for { 5582 c := v.AuxInt 5583 v_0 := v.Args[0] 5584 if v_0.Op != OpMIPSMOVBUreg { 5585 break 5586 } 5587 if !(0xff < uint32(c)) { 5588 break 5589 } 5590 v.reset(OpMIPSMOVWconst) 5591 v.AuxInt = 1 5592 return true 5593 } 5594 // match: (SGTUconst [c] (MOVHUreg _)) 5595 // cond: 0xffff < uint32(c) 5596 // result: (MOVWconst [1]) 5597 for { 5598 c := v.AuxInt 5599 v_0 := v.Args[0] 5600 if v_0.Op != OpMIPSMOVHUreg { 5601 break 5602 } 5603 if !(0xffff < uint32(c)) { 5604 break 5605 } 5606 v.reset(OpMIPSMOVWconst) 5607 v.AuxInt = 1 5608 return true 5609 } 5610 // match: (SGTUconst [c] (ANDconst [m] _)) 5611 // cond: uint32(m) < uint32(c) 5612 // result: (MOVWconst [1]) 5613 for { 5614 c := v.AuxInt 5615 v_0 := v.Args[0] 5616 if v_0.Op != OpMIPSANDconst { 5617 break 5618 } 5619 m := v_0.AuxInt 5620 if !(uint32(m) < uint32(c)) { 5621 break 5622 } 5623 v.reset(OpMIPSMOVWconst) 5624 v.AuxInt = 1 5625 return true 5626 } 5627 // match: (SGTUconst [c] (SRLconst _ [d])) 5628 // cond: uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c) 5629 // result: (MOVWconst [1]) 5630 for { 5631 c := v.AuxInt 5632 v_0 := v.Args[0] 5633 if v_0.Op != OpMIPSSRLconst { 5634 break 5635 } 5636 d := v_0.AuxInt 5637 if !(uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)) { 5638 break 5639 } 5640 v.reset(OpMIPSMOVWconst) 5641 v.AuxInt = 1 5642 return true 5643 } 5644 return false 5645 } 5646 func rewriteValueMIPS_OpMIPSSGTUzero_0(v *Value) bool { 5647 // match: (SGTUzero (MOVWconst [d])) 5648 // cond: uint32(d) != 0 5649 // result: (MOVWconst [1]) 5650 for { 5651 v_0 := v.Args[0] 5652 if v_0.Op != OpMIPSMOVWconst { 5653 break 5654 } 5655 d := v_0.AuxInt 5656 if !(uint32(d) != 0) { 5657 break 5658 } 5659 v.reset(OpMIPSMOVWconst) 5660 v.AuxInt = 1 5661 return true 5662 } 5663 // match: (SGTUzero (MOVWconst [d])) 5664 // cond: uint32(d) == 0 5665 // result: (MOVWconst [0]) 5666 for { 5667 v_0 := v.Args[0] 5668 if v_0.Op != OpMIPSMOVWconst { 5669 break 5670 } 5671 d := v_0.AuxInt 5672 if !(uint32(d) == 0) { 5673 break 5674 } 5675 v.reset(OpMIPSMOVWconst) 5676 v.AuxInt = 0 5677 return true 5678 } 5679 return false 5680 } 5681 func rewriteValueMIPS_OpMIPSSGTconst_0(v *Value) bool { 5682 // match: (SGTconst [c] (MOVWconst [d])) 5683 // cond: int32(c) > int32(d) 5684 // result: (MOVWconst [1]) 5685 for { 5686 c := v.AuxInt 5687 v_0 := v.Args[0] 5688 if v_0.Op != OpMIPSMOVWconst { 5689 break 5690 } 5691 d := v_0.AuxInt 5692 if !(int32(c) > int32(d)) { 5693 break 5694 } 5695 v.reset(OpMIPSMOVWconst) 5696 v.AuxInt = 1 5697 return true 5698 } 5699 // match: (SGTconst [c] (MOVWconst [d])) 5700 // cond: int32(c) <= int32(d) 5701 // result: (MOVWconst [0]) 5702 for { 5703 c := v.AuxInt 5704 v_0 := v.Args[0] 5705 if v_0.Op != OpMIPSMOVWconst { 5706 break 5707 } 5708 d := v_0.AuxInt 5709 if !(int32(c) <= int32(d)) { 5710 break 5711 } 5712 v.reset(OpMIPSMOVWconst) 5713 v.AuxInt = 0 5714 return true 5715 } 5716 // match: (SGTconst [c] (MOVBreg _)) 5717 // cond: 0x7f < int32(c) 5718 // result: (MOVWconst [1]) 5719 for { 5720 c := v.AuxInt 5721 v_0 := v.Args[0] 5722 if v_0.Op != OpMIPSMOVBreg { 5723 break 5724 } 5725 if !(0x7f < int32(c)) { 5726 break 5727 } 5728 v.reset(OpMIPSMOVWconst) 5729 v.AuxInt = 1 5730 return true 5731 } 5732 // match: (SGTconst [c] (MOVBreg _)) 5733 // cond: int32(c) <= -0x80 5734 // result: (MOVWconst [0]) 5735 for { 5736 c := v.AuxInt 5737 v_0 := v.Args[0] 5738 if v_0.Op != OpMIPSMOVBreg { 5739 break 5740 } 5741 if !(int32(c) <= -0x80) { 5742 break 5743 } 5744 v.reset(OpMIPSMOVWconst) 5745 v.AuxInt = 0 5746 return true 5747 } 5748 // match: (SGTconst [c] (MOVBUreg _)) 5749 // cond: 0xff < int32(c) 5750 // result: (MOVWconst [1]) 5751 for { 5752 c := v.AuxInt 5753 v_0 := v.Args[0] 5754 if v_0.Op != OpMIPSMOVBUreg { 5755 break 5756 } 5757 if !(0xff < int32(c)) { 5758 break 5759 } 5760 v.reset(OpMIPSMOVWconst) 5761 v.AuxInt = 1 5762 return true 5763 } 5764 // match: (SGTconst [c] (MOVBUreg _)) 5765 // cond: int32(c) < 0 5766 // result: (MOVWconst [0]) 5767 for { 5768 c := v.AuxInt 5769 v_0 := v.Args[0] 5770 if v_0.Op != OpMIPSMOVBUreg { 5771 break 5772 } 5773 if !(int32(c) < 0) { 5774 break 5775 } 5776 v.reset(OpMIPSMOVWconst) 5777 v.AuxInt = 0 5778 return true 5779 } 5780 // match: (SGTconst [c] (MOVHreg _)) 5781 // cond: 0x7fff < int32(c) 5782 // result: (MOVWconst [1]) 5783 for { 5784 c := v.AuxInt 5785 v_0 := v.Args[0] 5786 if v_0.Op != OpMIPSMOVHreg { 5787 break 5788 } 5789 if !(0x7fff < int32(c)) { 5790 break 5791 } 5792 v.reset(OpMIPSMOVWconst) 5793 v.AuxInt = 1 5794 return true 5795 } 5796 // match: (SGTconst [c] (MOVHreg _)) 5797 // cond: int32(c) <= -0x8000 5798 // result: (MOVWconst [0]) 5799 for { 5800 c := v.AuxInt 5801 v_0 := v.Args[0] 5802 if v_0.Op != OpMIPSMOVHreg { 5803 break 5804 } 5805 if !(int32(c) <= -0x8000) { 5806 break 5807 } 5808 v.reset(OpMIPSMOVWconst) 5809 v.AuxInt = 0 5810 return true 5811 } 5812 // match: (SGTconst [c] (MOVHUreg _)) 5813 // cond: 0xffff < int32(c) 5814 // result: (MOVWconst [1]) 5815 for { 5816 c := v.AuxInt 5817 v_0 := v.Args[0] 5818 if v_0.Op != OpMIPSMOVHUreg { 5819 break 5820 } 5821 if !(0xffff < int32(c)) { 5822 break 5823 } 5824 v.reset(OpMIPSMOVWconst) 5825 v.AuxInt = 1 5826 return true 5827 } 5828 // match: (SGTconst [c] (MOVHUreg _)) 5829 // cond: int32(c) < 0 5830 // result: (MOVWconst [0]) 5831 for { 5832 c := v.AuxInt 5833 v_0 := v.Args[0] 5834 if v_0.Op != OpMIPSMOVHUreg { 5835 break 5836 } 5837 if !(int32(c) < 0) { 5838 break 5839 } 5840 v.reset(OpMIPSMOVWconst) 5841 v.AuxInt = 0 5842 return true 5843 } 5844 return false 5845 } 5846 func rewriteValueMIPS_OpMIPSSGTconst_10(v *Value) bool { 5847 // match: (SGTconst [c] (ANDconst [m] _)) 5848 // cond: 0 <= int32(m) && int32(m) < int32(c) 5849 // result: (MOVWconst [1]) 5850 for { 5851 c := v.AuxInt 5852 v_0 := v.Args[0] 5853 if v_0.Op != OpMIPSANDconst { 5854 break 5855 } 5856 m := v_0.AuxInt 5857 if !(0 <= int32(m) && int32(m) < int32(c)) { 5858 break 5859 } 5860 v.reset(OpMIPSMOVWconst) 5861 v.AuxInt = 1 5862 return true 5863 } 5864 // match: (SGTconst [c] (SRLconst _ [d])) 5865 // cond: 0 <= int32(c) && uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c) 5866 // result: (MOVWconst [1]) 5867 for { 5868 c := v.AuxInt 5869 v_0 := v.Args[0] 5870 if v_0.Op != OpMIPSSRLconst { 5871 break 5872 } 5873 d := v_0.AuxInt 5874 if !(0 <= int32(c) && uint32(d) <= 31 && 0xffffffff>>uint32(d) < uint32(c)) { 5875 break 5876 } 5877 v.reset(OpMIPSMOVWconst) 5878 v.AuxInt = 1 5879 return true 5880 } 5881 return false 5882 } 5883 func rewriteValueMIPS_OpMIPSSGTzero_0(v *Value) bool { 5884 // match: (SGTzero (MOVWconst [d])) 5885 // cond: int32(d) > 0 5886 // result: (MOVWconst [1]) 5887 for { 5888 v_0 := v.Args[0] 5889 if v_0.Op != OpMIPSMOVWconst { 5890 break 5891 } 5892 d := v_0.AuxInt 5893 if !(int32(d) > 0) { 5894 break 5895 } 5896 v.reset(OpMIPSMOVWconst) 5897 v.AuxInt = 1 5898 return true 5899 } 5900 // match: (SGTzero (MOVWconst [d])) 5901 // cond: int32(d) <= 0 5902 // result: (MOVWconst [0]) 5903 for { 5904 v_0 := v.Args[0] 5905 if v_0.Op != OpMIPSMOVWconst { 5906 break 5907 } 5908 d := v_0.AuxInt 5909 if !(int32(d) <= 0) { 5910 break 5911 } 5912 v.reset(OpMIPSMOVWconst) 5913 v.AuxInt = 0 5914 return true 5915 } 5916 return false 5917 } 5918 func rewriteValueMIPS_OpMIPSSLL_0(v *Value) bool { 5919 // match: (SLL _ (MOVWconst [c])) 5920 // cond: uint32(c)>=32 5921 // result: (MOVWconst [0]) 5922 for { 5923 _ = v.Args[1] 5924 v_1 := v.Args[1] 5925 if v_1.Op != OpMIPSMOVWconst { 5926 break 5927 } 5928 c := v_1.AuxInt 5929 if !(uint32(c) >= 32) { 5930 break 5931 } 5932 v.reset(OpMIPSMOVWconst) 5933 v.AuxInt = 0 5934 return true 5935 } 5936 // match: (SLL x (MOVWconst [c])) 5937 // cond: 5938 // result: (SLLconst x [c]) 5939 for { 5940 _ = v.Args[1] 5941 x := v.Args[0] 5942 v_1 := v.Args[1] 5943 if v_1.Op != OpMIPSMOVWconst { 5944 break 5945 } 5946 c := v_1.AuxInt 5947 v.reset(OpMIPSSLLconst) 5948 v.AuxInt = c 5949 v.AddArg(x) 5950 return true 5951 } 5952 return false 5953 } 5954 func rewriteValueMIPS_OpMIPSSLLconst_0(v *Value) bool { 5955 // match: (SLLconst [c] (MOVWconst [d])) 5956 // cond: 5957 // result: (MOVWconst [int64(int32(uint32(d)<<uint32(c)))]) 5958 for { 5959 c := v.AuxInt 5960 v_0 := v.Args[0] 5961 if v_0.Op != OpMIPSMOVWconst { 5962 break 5963 } 5964 d := v_0.AuxInt 5965 v.reset(OpMIPSMOVWconst) 5966 v.AuxInt = int64(int32(uint32(d) << uint32(c))) 5967 return true 5968 } 5969 return false 5970 } 5971 func rewriteValueMIPS_OpMIPSSRA_0(v *Value) bool { 5972 // match: (SRA x (MOVWconst [c])) 5973 // cond: uint32(c)>=32 5974 // result: (SRAconst x [31]) 5975 for { 5976 _ = v.Args[1] 5977 x := v.Args[0] 5978 v_1 := v.Args[1] 5979 if v_1.Op != OpMIPSMOVWconst { 5980 break 5981 } 5982 c := v_1.AuxInt 5983 if !(uint32(c) >= 32) { 5984 break 5985 } 5986 v.reset(OpMIPSSRAconst) 5987 v.AuxInt = 31 5988 v.AddArg(x) 5989 return true 5990 } 5991 // match: (SRA x (MOVWconst [c])) 5992 // cond: 5993 // result: (SRAconst x [c]) 5994 for { 5995 _ = v.Args[1] 5996 x := v.Args[0] 5997 v_1 := v.Args[1] 5998 if v_1.Op != OpMIPSMOVWconst { 5999 break 6000 } 6001 c := v_1.AuxInt 6002 v.reset(OpMIPSSRAconst) 6003 v.AuxInt = c 6004 v.AddArg(x) 6005 return true 6006 } 6007 return false 6008 } 6009 func rewriteValueMIPS_OpMIPSSRAconst_0(v *Value) bool { 6010 // match: (SRAconst [c] (MOVWconst [d])) 6011 // cond: 6012 // result: (MOVWconst [int64(int32(d)>>uint32(c))]) 6013 for { 6014 c := v.AuxInt 6015 v_0 := v.Args[0] 6016 if v_0.Op != OpMIPSMOVWconst { 6017 break 6018 } 6019 d := v_0.AuxInt 6020 v.reset(OpMIPSMOVWconst) 6021 v.AuxInt = int64(int32(d) >> uint32(c)) 6022 return true 6023 } 6024 return false 6025 } 6026 func rewriteValueMIPS_OpMIPSSRL_0(v *Value) bool { 6027 // match: (SRL _ (MOVWconst [c])) 6028 // cond: uint32(c)>=32 6029 // result: (MOVWconst [0]) 6030 for { 6031 _ = v.Args[1] 6032 v_1 := v.Args[1] 6033 if v_1.Op != OpMIPSMOVWconst { 6034 break 6035 } 6036 c := v_1.AuxInt 6037 if !(uint32(c) >= 32) { 6038 break 6039 } 6040 v.reset(OpMIPSMOVWconst) 6041 v.AuxInt = 0 6042 return true 6043 } 6044 // match: (SRL x (MOVWconst [c])) 6045 // cond: 6046 // result: (SRLconst x [c]) 6047 for { 6048 _ = v.Args[1] 6049 x := v.Args[0] 6050 v_1 := v.Args[1] 6051 if v_1.Op != OpMIPSMOVWconst { 6052 break 6053 } 6054 c := v_1.AuxInt 6055 v.reset(OpMIPSSRLconst) 6056 v.AuxInt = c 6057 v.AddArg(x) 6058 return true 6059 } 6060 return false 6061 } 6062 func rewriteValueMIPS_OpMIPSSRLconst_0(v *Value) bool { 6063 // match: (SRLconst [c] (MOVWconst [d])) 6064 // cond: 6065 // result: (MOVWconst [int64(uint32(d)>>uint32(c))]) 6066 for { 6067 c := v.AuxInt 6068 v_0 := v.Args[0] 6069 if v_0.Op != OpMIPSMOVWconst { 6070 break 6071 } 6072 d := v_0.AuxInt 6073 v.reset(OpMIPSMOVWconst) 6074 v.AuxInt = int64(uint32(d) >> uint32(c)) 6075 return true 6076 } 6077 return false 6078 } 6079 func rewriteValueMIPS_OpMIPSSUB_0(v *Value) bool { 6080 // match: (SUB x (MOVWconst [c])) 6081 // cond: 6082 // result: (SUBconst [c] x) 6083 for { 6084 _ = v.Args[1] 6085 x := v.Args[0] 6086 v_1 := v.Args[1] 6087 if v_1.Op != OpMIPSMOVWconst { 6088 break 6089 } 6090 c := v_1.AuxInt 6091 v.reset(OpMIPSSUBconst) 6092 v.AuxInt = c 6093 v.AddArg(x) 6094 return true 6095 } 6096 // match: (SUB x x) 6097 // cond: 6098 // result: (MOVWconst [0]) 6099 for { 6100 _ = v.Args[1] 6101 x := v.Args[0] 6102 if x != v.Args[1] { 6103 break 6104 } 6105 v.reset(OpMIPSMOVWconst) 6106 v.AuxInt = 0 6107 return true 6108 } 6109 // match: (SUB (MOVWconst [0]) x) 6110 // cond: 6111 // result: (NEG x) 6112 for { 6113 _ = v.Args[1] 6114 v_0 := v.Args[0] 6115 if v_0.Op != OpMIPSMOVWconst { 6116 break 6117 } 6118 if v_0.AuxInt != 0 { 6119 break 6120 } 6121 x := v.Args[1] 6122 v.reset(OpMIPSNEG) 6123 v.AddArg(x) 6124 return true 6125 } 6126 return false 6127 } 6128 func rewriteValueMIPS_OpMIPSSUBconst_0(v *Value) bool { 6129 // match: (SUBconst [0] x) 6130 // cond: 6131 // result: x 6132 for { 6133 if v.AuxInt != 0 { 6134 break 6135 } 6136 x := v.Args[0] 6137 v.reset(OpCopy) 6138 v.Type = x.Type 6139 v.AddArg(x) 6140 return true 6141 } 6142 // match: (SUBconst [c] (MOVWconst [d])) 6143 // cond: 6144 // result: (MOVWconst [int64(int32(d-c))]) 6145 for { 6146 c := v.AuxInt 6147 v_0 := v.Args[0] 6148 if v_0.Op != OpMIPSMOVWconst { 6149 break 6150 } 6151 d := v_0.AuxInt 6152 v.reset(OpMIPSMOVWconst) 6153 v.AuxInt = int64(int32(d - c)) 6154 return true 6155 } 6156 // match: (SUBconst [c] (SUBconst [d] x)) 6157 // cond: 6158 // result: (ADDconst [int64(int32(-c-d))] x) 6159 for { 6160 c := v.AuxInt 6161 v_0 := v.Args[0] 6162 if v_0.Op != OpMIPSSUBconst { 6163 break 6164 } 6165 d := v_0.AuxInt 6166 x := v_0.Args[0] 6167 v.reset(OpMIPSADDconst) 6168 v.AuxInt = int64(int32(-c - d)) 6169 v.AddArg(x) 6170 return true 6171 } 6172 // match: (SUBconst [c] (ADDconst [d] x)) 6173 // cond: 6174 // result: (ADDconst [int64(int32(-c+d))] x) 6175 for { 6176 c := v.AuxInt 6177 v_0 := v.Args[0] 6178 if v_0.Op != OpMIPSADDconst { 6179 break 6180 } 6181 d := v_0.AuxInt 6182 x := v_0.Args[0] 6183 v.reset(OpMIPSADDconst) 6184 v.AuxInt = int64(int32(-c + d)) 6185 v.AddArg(x) 6186 return true 6187 } 6188 return false 6189 } 6190 func rewriteValueMIPS_OpMIPSXOR_0(v *Value) bool { 6191 // match: (XOR x (MOVWconst [c])) 6192 // cond: 6193 // result: (XORconst [c] x) 6194 for { 6195 _ = v.Args[1] 6196 x := v.Args[0] 6197 v_1 := v.Args[1] 6198 if v_1.Op != OpMIPSMOVWconst { 6199 break 6200 } 6201 c := v_1.AuxInt 6202 v.reset(OpMIPSXORconst) 6203 v.AuxInt = c 6204 v.AddArg(x) 6205 return true 6206 } 6207 // match: (XOR (MOVWconst [c]) x) 6208 // cond: 6209 // result: (XORconst [c] x) 6210 for { 6211 _ = v.Args[1] 6212 v_0 := v.Args[0] 6213 if v_0.Op != OpMIPSMOVWconst { 6214 break 6215 } 6216 c := v_0.AuxInt 6217 x := v.Args[1] 6218 v.reset(OpMIPSXORconst) 6219 v.AuxInt = c 6220 v.AddArg(x) 6221 return true 6222 } 6223 // match: (XOR x x) 6224 // cond: 6225 // result: (MOVWconst [0]) 6226 for { 6227 _ = v.Args[1] 6228 x := v.Args[0] 6229 if x != v.Args[1] { 6230 break 6231 } 6232 v.reset(OpMIPSMOVWconst) 6233 v.AuxInt = 0 6234 return true 6235 } 6236 return false 6237 } 6238 func rewriteValueMIPS_OpMIPSXORconst_0(v *Value) bool { 6239 // match: (XORconst [0] x) 6240 // cond: 6241 // result: x 6242 for { 6243 if v.AuxInt != 0 { 6244 break 6245 } 6246 x := v.Args[0] 6247 v.reset(OpCopy) 6248 v.Type = x.Type 6249 v.AddArg(x) 6250 return true 6251 } 6252 // match: (XORconst [-1] x) 6253 // cond: 6254 // result: (NORconst [0] x) 6255 for { 6256 if v.AuxInt != -1 { 6257 break 6258 } 6259 x := v.Args[0] 6260 v.reset(OpMIPSNORconst) 6261 v.AuxInt = 0 6262 v.AddArg(x) 6263 return true 6264 } 6265 // match: (XORconst [c] (MOVWconst [d])) 6266 // cond: 6267 // result: (MOVWconst [c^d]) 6268 for { 6269 c := v.AuxInt 6270 v_0 := v.Args[0] 6271 if v_0.Op != OpMIPSMOVWconst { 6272 break 6273 } 6274 d := v_0.AuxInt 6275 v.reset(OpMIPSMOVWconst) 6276 v.AuxInt = c ^ d 6277 return true 6278 } 6279 // match: (XORconst [c] (XORconst [d] x)) 6280 // cond: 6281 // result: (XORconst [c^d] x) 6282 for { 6283 c := v.AuxInt 6284 v_0 := v.Args[0] 6285 if v_0.Op != OpMIPSXORconst { 6286 break 6287 } 6288 d := v_0.AuxInt 6289 x := v_0.Args[0] 6290 v.reset(OpMIPSXORconst) 6291 v.AuxInt = c ^ d 6292 v.AddArg(x) 6293 return true 6294 } 6295 return false 6296 } 6297 func rewriteValueMIPS_OpMod16_0(v *Value) bool { 6298 b := v.Block 6299 _ = b 6300 typ := &b.Func.Config.Types 6301 _ = typ 6302 // match: (Mod16 x y) 6303 // cond: 6304 // result: (Select0 (DIV (SignExt16to32 x) (SignExt16to32 y))) 6305 for { 6306 _ = v.Args[1] 6307 x := v.Args[0] 6308 y := v.Args[1] 6309 v.reset(OpSelect0) 6310 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 6311 v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6312 v1.AddArg(x) 6313 v0.AddArg(v1) 6314 v2 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 6315 v2.AddArg(y) 6316 v0.AddArg(v2) 6317 v.AddArg(v0) 6318 return true 6319 } 6320 } 6321 func rewriteValueMIPS_OpMod16u_0(v *Value) bool { 6322 b := v.Block 6323 _ = b 6324 typ := &b.Func.Config.Types 6325 _ = typ 6326 // match: (Mod16u x y) 6327 // cond: 6328 // result: (Select0 (DIVU (ZeroExt16to32 x) (ZeroExt16to32 y))) 6329 for { 6330 _ = v.Args[1] 6331 x := v.Args[0] 6332 y := v.Args[1] 6333 v.reset(OpSelect0) 6334 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 6335 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6336 v1.AddArg(x) 6337 v0.AddArg(v1) 6338 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 6339 v2.AddArg(y) 6340 v0.AddArg(v2) 6341 v.AddArg(v0) 6342 return true 6343 } 6344 } 6345 func rewriteValueMIPS_OpMod32_0(v *Value) bool { 6346 b := v.Block 6347 _ = b 6348 typ := &b.Func.Config.Types 6349 _ = typ 6350 // match: (Mod32 x y) 6351 // cond: 6352 // result: (Select0 (DIV x y)) 6353 for { 6354 _ = v.Args[1] 6355 x := v.Args[0] 6356 y := v.Args[1] 6357 v.reset(OpSelect0) 6358 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 6359 v0.AddArg(x) 6360 v0.AddArg(y) 6361 v.AddArg(v0) 6362 return true 6363 } 6364 } 6365 func rewriteValueMIPS_OpMod32u_0(v *Value) bool { 6366 b := v.Block 6367 _ = b 6368 typ := &b.Func.Config.Types 6369 _ = typ 6370 // match: (Mod32u x y) 6371 // cond: 6372 // result: (Select0 (DIVU x y)) 6373 for { 6374 _ = v.Args[1] 6375 x := v.Args[0] 6376 y := v.Args[1] 6377 v.reset(OpSelect0) 6378 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 6379 v0.AddArg(x) 6380 v0.AddArg(y) 6381 v.AddArg(v0) 6382 return true 6383 } 6384 } 6385 func rewriteValueMIPS_OpMod8_0(v *Value) bool { 6386 b := v.Block 6387 _ = b 6388 typ := &b.Func.Config.Types 6389 _ = typ 6390 // match: (Mod8 x y) 6391 // cond: 6392 // result: (Select0 (DIV (SignExt8to32 x) (SignExt8to32 y))) 6393 for { 6394 _ = v.Args[1] 6395 x := v.Args[0] 6396 y := v.Args[1] 6397 v.reset(OpSelect0) 6398 v0 := b.NewValue0(v.Pos, OpMIPSDIV, types.NewTuple(typ.Int32, typ.Int32)) 6399 v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 6400 v1.AddArg(x) 6401 v0.AddArg(v1) 6402 v2 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32) 6403 v2.AddArg(y) 6404 v0.AddArg(v2) 6405 v.AddArg(v0) 6406 return true 6407 } 6408 } 6409 func rewriteValueMIPS_OpMod8u_0(v *Value) bool { 6410 b := v.Block 6411 _ = b 6412 typ := &b.Func.Config.Types 6413 _ = typ 6414 // match: (Mod8u x y) 6415 // cond: 6416 // result: (Select0 (DIVU (ZeroExt8to32 x) (ZeroExt8to32 y))) 6417 for { 6418 _ = v.Args[1] 6419 x := v.Args[0] 6420 y := v.Args[1] 6421 v.reset(OpSelect0) 6422 v0 := b.NewValue0(v.Pos, OpMIPSDIVU, types.NewTuple(typ.UInt32, typ.UInt32)) 6423 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6424 v1.AddArg(x) 6425 v0.AddArg(v1) 6426 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 6427 v2.AddArg(y) 6428 v0.AddArg(v2) 6429 v.AddArg(v0) 6430 return true 6431 } 6432 } 6433 func rewriteValueMIPS_OpMove_0(v *Value) bool { 6434 b := v.Block 6435 _ = b 6436 typ := &b.Func.Config.Types 6437 _ = typ 6438 // match: (Move [0] _ _ mem) 6439 // cond: 6440 // result: mem 6441 for { 6442 if v.AuxInt != 0 { 6443 break 6444 } 6445 _ = v.Args[2] 6446 mem := v.Args[2] 6447 v.reset(OpCopy) 6448 v.Type = mem.Type 6449 v.AddArg(mem) 6450 return true 6451 } 6452 // match: (Move [1] dst src mem) 6453 // cond: 6454 // result: (MOVBstore dst (MOVBUload src mem) mem) 6455 for { 6456 if v.AuxInt != 1 { 6457 break 6458 } 6459 _ = v.Args[2] 6460 dst := v.Args[0] 6461 src := v.Args[1] 6462 mem := v.Args[2] 6463 v.reset(OpMIPSMOVBstore) 6464 v.AddArg(dst) 6465 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6466 v0.AddArg(src) 6467 v0.AddArg(mem) 6468 v.AddArg(v0) 6469 v.AddArg(mem) 6470 return true 6471 } 6472 // match: (Move [2] {t} dst src mem) 6473 // cond: t.(*types.Type).Alignment()%2 == 0 6474 // result: (MOVHstore dst (MOVHUload src mem) mem) 6475 for { 6476 if v.AuxInt != 2 { 6477 break 6478 } 6479 t := v.Aux 6480 _ = v.Args[2] 6481 dst := v.Args[0] 6482 src := v.Args[1] 6483 mem := v.Args[2] 6484 if !(t.(*types.Type).Alignment()%2 == 0) { 6485 break 6486 } 6487 v.reset(OpMIPSMOVHstore) 6488 v.AddArg(dst) 6489 v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16) 6490 v0.AddArg(src) 6491 v0.AddArg(mem) 6492 v.AddArg(v0) 6493 v.AddArg(mem) 6494 return true 6495 } 6496 // match: (Move [2] dst src mem) 6497 // cond: 6498 // result: (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)) 6499 for { 6500 if v.AuxInt != 2 { 6501 break 6502 } 6503 _ = v.Args[2] 6504 dst := v.Args[0] 6505 src := v.Args[1] 6506 mem := v.Args[2] 6507 v.reset(OpMIPSMOVBstore) 6508 v.AuxInt = 1 6509 v.AddArg(dst) 6510 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6511 v0.AuxInt = 1 6512 v0.AddArg(src) 6513 v0.AddArg(mem) 6514 v.AddArg(v0) 6515 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6516 v1.AddArg(dst) 6517 v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6518 v2.AddArg(src) 6519 v2.AddArg(mem) 6520 v1.AddArg(v2) 6521 v1.AddArg(mem) 6522 v.AddArg(v1) 6523 return true 6524 } 6525 // match: (Move [4] {t} dst src mem) 6526 // cond: t.(*types.Type).Alignment()%4 == 0 6527 // result: (MOVWstore dst (MOVWload src mem) mem) 6528 for { 6529 if v.AuxInt != 4 { 6530 break 6531 } 6532 t := v.Aux 6533 _ = v.Args[2] 6534 dst := v.Args[0] 6535 src := v.Args[1] 6536 mem := v.Args[2] 6537 if !(t.(*types.Type).Alignment()%4 == 0) { 6538 break 6539 } 6540 v.reset(OpMIPSMOVWstore) 6541 v.AddArg(dst) 6542 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6543 v0.AddArg(src) 6544 v0.AddArg(mem) 6545 v.AddArg(v0) 6546 v.AddArg(mem) 6547 return true 6548 } 6549 // match: (Move [4] {t} dst src mem) 6550 // cond: t.(*types.Type).Alignment()%2 == 0 6551 // result: (MOVHstore [2] dst (MOVHUload [2] src mem) (MOVHstore dst (MOVHUload src mem) mem)) 6552 for { 6553 if v.AuxInt != 4 { 6554 break 6555 } 6556 t := v.Aux 6557 _ = v.Args[2] 6558 dst := v.Args[0] 6559 src := v.Args[1] 6560 mem := v.Args[2] 6561 if !(t.(*types.Type).Alignment()%2 == 0) { 6562 break 6563 } 6564 v.reset(OpMIPSMOVHstore) 6565 v.AuxInt = 2 6566 v.AddArg(dst) 6567 v0 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16) 6568 v0.AuxInt = 2 6569 v0.AddArg(src) 6570 v0.AddArg(mem) 6571 v.AddArg(v0) 6572 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 6573 v1.AddArg(dst) 6574 v2 := b.NewValue0(v.Pos, OpMIPSMOVHUload, typ.UInt16) 6575 v2.AddArg(src) 6576 v2.AddArg(mem) 6577 v1.AddArg(v2) 6578 v1.AddArg(mem) 6579 v.AddArg(v1) 6580 return true 6581 } 6582 // match: (Move [4] dst src mem) 6583 // cond: 6584 // result: (MOVBstore [3] dst (MOVBUload [3] src mem) (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem)))) 6585 for { 6586 if v.AuxInt != 4 { 6587 break 6588 } 6589 _ = v.Args[2] 6590 dst := v.Args[0] 6591 src := v.Args[1] 6592 mem := v.Args[2] 6593 v.reset(OpMIPSMOVBstore) 6594 v.AuxInt = 3 6595 v.AddArg(dst) 6596 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6597 v0.AuxInt = 3 6598 v0.AddArg(src) 6599 v0.AddArg(mem) 6600 v.AddArg(v0) 6601 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6602 v1.AuxInt = 2 6603 v1.AddArg(dst) 6604 v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6605 v2.AuxInt = 2 6606 v2.AddArg(src) 6607 v2.AddArg(mem) 6608 v1.AddArg(v2) 6609 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6610 v3.AuxInt = 1 6611 v3.AddArg(dst) 6612 v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6613 v4.AuxInt = 1 6614 v4.AddArg(src) 6615 v4.AddArg(mem) 6616 v3.AddArg(v4) 6617 v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6618 v5.AddArg(dst) 6619 v6 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6620 v6.AddArg(src) 6621 v6.AddArg(mem) 6622 v5.AddArg(v6) 6623 v5.AddArg(mem) 6624 v3.AddArg(v5) 6625 v1.AddArg(v3) 6626 v.AddArg(v1) 6627 return true 6628 } 6629 // match: (Move [3] dst src mem) 6630 // cond: 6631 // result: (MOVBstore [2] dst (MOVBUload [2] src mem) (MOVBstore [1] dst (MOVBUload [1] src mem) (MOVBstore dst (MOVBUload src mem) mem))) 6632 for { 6633 if v.AuxInt != 3 { 6634 break 6635 } 6636 _ = v.Args[2] 6637 dst := v.Args[0] 6638 src := v.Args[1] 6639 mem := v.Args[2] 6640 v.reset(OpMIPSMOVBstore) 6641 v.AuxInt = 2 6642 v.AddArg(dst) 6643 v0 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6644 v0.AuxInt = 2 6645 v0.AddArg(src) 6646 v0.AddArg(mem) 6647 v.AddArg(v0) 6648 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6649 v1.AuxInt = 1 6650 v1.AddArg(dst) 6651 v2 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6652 v2.AuxInt = 1 6653 v2.AddArg(src) 6654 v2.AddArg(mem) 6655 v1.AddArg(v2) 6656 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 6657 v3.AddArg(dst) 6658 v4 := b.NewValue0(v.Pos, OpMIPSMOVBUload, typ.UInt8) 6659 v4.AddArg(src) 6660 v4.AddArg(mem) 6661 v3.AddArg(v4) 6662 v3.AddArg(mem) 6663 v1.AddArg(v3) 6664 v.AddArg(v1) 6665 return true 6666 } 6667 // match: (Move [8] {t} dst src mem) 6668 // cond: t.(*types.Type).Alignment()%4 == 0 6669 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) 6670 for { 6671 if v.AuxInt != 8 { 6672 break 6673 } 6674 t := v.Aux 6675 _ = v.Args[2] 6676 dst := v.Args[0] 6677 src := v.Args[1] 6678 mem := v.Args[2] 6679 if !(t.(*types.Type).Alignment()%4 == 0) { 6680 break 6681 } 6682 v.reset(OpMIPSMOVWstore) 6683 v.AuxInt = 4 6684 v.AddArg(dst) 6685 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6686 v0.AuxInt = 4 6687 v0.AddArg(src) 6688 v0.AddArg(mem) 6689 v.AddArg(v0) 6690 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 6691 v1.AddArg(dst) 6692 v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6693 v2.AddArg(src) 6694 v2.AddArg(mem) 6695 v1.AddArg(v2) 6696 v1.AddArg(mem) 6697 v.AddArg(v1) 6698 return true 6699 } 6700 // match: (Move [8] {t} dst src mem) 6701 // cond: t.(*types.Type).Alignment()%2 == 0 6702 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) 6703 for { 6704 if v.AuxInt != 8 { 6705 break 6706 } 6707 t := v.Aux 6708 _ = v.Args[2] 6709 dst := v.Args[0] 6710 src := v.Args[1] 6711 mem := v.Args[2] 6712 if !(t.(*types.Type).Alignment()%2 == 0) { 6713 break 6714 } 6715 v.reset(OpMIPSMOVHstore) 6716 v.AuxInt = 6 6717 v.AddArg(dst) 6718 v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6719 v0.AuxInt = 6 6720 v0.AddArg(src) 6721 v0.AddArg(mem) 6722 v.AddArg(v0) 6723 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 6724 v1.AuxInt = 4 6725 v1.AddArg(dst) 6726 v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6727 v2.AuxInt = 4 6728 v2.AddArg(src) 6729 v2.AddArg(mem) 6730 v1.AddArg(v2) 6731 v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 6732 v3.AuxInt = 2 6733 v3.AddArg(dst) 6734 v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6735 v4.AuxInt = 2 6736 v4.AddArg(src) 6737 v4.AddArg(mem) 6738 v3.AddArg(v4) 6739 v5 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 6740 v5.AddArg(dst) 6741 v6 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6742 v6.AddArg(src) 6743 v6.AddArg(mem) 6744 v5.AddArg(v6) 6745 v5.AddArg(mem) 6746 v3.AddArg(v5) 6747 v1.AddArg(v3) 6748 v.AddArg(v1) 6749 return true 6750 } 6751 return false 6752 } 6753 func rewriteValueMIPS_OpMove_10(v *Value) bool { 6754 b := v.Block 6755 _ = b 6756 config := b.Func.Config 6757 _ = config 6758 typ := &b.Func.Config.Types 6759 _ = typ 6760 // match: (Move [6] {t} dst src mem) 6761 // cond: t.(*types.Type).Alignment()%2 == 0 6762 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) 6763 for { 6764 if v.AuxInt != 6 { 6765 break 6766 } 6767 t := v.Aux 6768 _ = v.Args[2] 6769 dst := v.Args[0] 6770 src := v.Args[1] 6771 mem := v.Args[2] 6772 if !(t.(*types.Type).Alignment()%2 == 0) { 6773 break 6774 } 6775 v.reset(OpMIPSMOVHstore) 6776 v.AuxInt = 4 6777 v.AddArg(dst) 6778 v0 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6779 v0.AuxInt = 4 6780 v0.AddArg(src) 6781 v0.AddArg(mem) 6782 v.AddArg(v0) 6783 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 6784 v1.AuxInt = 2 6785 v1.AddArg(dst) 6786 v2 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6787 v2.AuxInt = 2 6788 v2.AddArg(src) 6789 v2.AddArg(mem) 6790 v1.AddArg(v2) 6791 v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 6792 v3.AddArg(dst) 6793 v4 := b.NewValue0(v.Pos, OpMIPSMOVHload, typ.Int16) 6794 v4.AddArg(src) 6795 v4.AddArg(mem) 6796 v3.AddArg(v4) 6797 v3.AddArg(mem) 6798 v1.AddArg(v3) 6799 v.AddArg(v1) 6800 return true 6801 } 6802 // match: (Move [12] {t} dst src mem) 6803 // cond: t.(*types.Type).Alignment()%4 == 0 6804 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) 6805 for { 6806 if v.AuxInt != 12 { 6807 break 6808 } 6809 t := v.Aux 6810 _ = v.Args[2] 6811 dst := v.Args[0] 6812 src := v.Args[1] 6813 mem := v.Args[2] 6814 if !(t.(*types.Type).Alignment()%4 == 0) { 6815 break 6816 } 6817 v.reset(OpMIPSMOVWstore) 6818 v.AuxInt = 8 6819 v.AddArg(dst) 6820 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6821 v0.AuxInt = 8 6822 v0.AddArg(src) 6823 v0.AddArg(mem) 6824 v.AddArg(v0) 6825 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 6826 v1.AuxInt = 4 6827 v1.AddArg(dst) 6828 v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6829 v2.AuxInt = 4 6830 v2.AddArg(src) 6831 v2.AddArg(mem) 6832 v1.AddArg(v2) 6833 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 6834 v3.AddArg(dst) 6835 v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6836 v4.AddArg(src) 6837 v4.AddArg(mem) 6838 v3.AddArg(v4) 6839 v3.AddArg(mem) 6840 v1.AddArg(v3) 6841 v.AddArg(v1) 6842 return true 6843 } 6844 // match: (Move [16] {t} dst src mem) 6845 // cond: t.(*types.Type).Alignment()%4 == 0 6846 // result: (MOVWstore [12] dst (MOVWload [12] src mem) (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))) 6847 for { 6848 if v.AuxInt != 16 { 6849 break 6850 } 6851 t := v.Aux 6852 _ = v.Args[2] 6853 dst := v.Args[0] 6854 src := v.Args[1] 6855 mem := v.Args[2] 6856 if !(t.(*types.Type).Alignment()%4 == 0) { 6857 break 6858 } 6859 v.reset(OpMIPSMOVWstore) 6860 v.AuxInt = 12 6861 v.AddArg(dst) 6862 v0 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6863 v0.AuxInt = 12 6864 v0.AddArg(src) 6865 v0.AddArg(mem) 6866 v.AddArg(v0) 6867 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 6868 v1.AuxInt = 8 6869 v1.AddArg(dst) 6870 v2 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6871 v2.AuxInt = 8 6872 v2.AddArg(src) 6873 v2.AddArg(mem) 6874 v1.AddArg(v2) 6875 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 6876 v3.AuxInt = 4 6877 v3.AddArg(dst) 6878 v4 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6879 v4.AuxInt = 4 6880 v4.AddArg(src) 6881 v4.AddArg(mem) 6882 v3.AddArg(v4) 6883 v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 6884 v5.AddArg(dst) 6885 v6 := b.NewValue0(v.Pos, OpMIPSMOVWload, typ.UInt32) 6886 v6.AddArg(src) 6887 v6.AddArg(mem) 6888 v5.AddArg(v6) 6889 v5.AddArg(mem) 6890 v3.AddArg(v5) 6891 v1.AddArg(v3) 6892 v.AddArg(v1) 6893 return true 6894 } 6895 // match: (Move [s] {t} dst src mem) 6896 // cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0) 6897 // result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 6898 for { 6899 s := v.AuxInt 6900 t := v.Aux 6901 _ = v.Args[2] 6902 dst := v.Args[0] 6903 src := v.Args[1] 6904 mem := v.Args[2] 6905 if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) { 6906 break 6907 } 6908 v.reset(OpMIPSLoweredMove) 6909 v.AuxInt = t.(*types.Type).Alignment() 6910 v.AddArg(dst) 6911 v.AddArg(src) 6912 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, src.Type) 6913 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 6914 v0.AddArg(src) 6915 v.AddArg(v0) 6916 v.AddArg(mem) 6917 return true 6918 } 6919 return false 6920 } 6921 func rewriteValueMIPS_OpMul16_0(v *Value) bool { 6922 // match: (Mul16 x y) 6923 // cond: 6924 // result: (MUL x y) 6925 for { 6926 _ = v.Args[1] 6927 x := v.Args[0] 6928 y := v.Args[1] 6929 v.reset(OpMIPSMUL) 6930 v.AddArg(x) 6931 v.AddArg(y) 6932 return true 6933 } 6934 } 6935 func rewriteValueMIPS_OpMul32_0(v *Value) bool { 6936 // match: (Mul32 x y) 6937 // cond: 6938 // result: (MUL x y) 6939 for { 6940 _ = v.Args[1] 6941 x := v.Args[0] 6942 y := v.Args[1] 6943 v.reset(OpMIPSMUL) 6944 v.AddArg(x) 6945 v.AddArg(y) 6946 return true 6947 } 6948 } 6949 func rewriteValueMIPS_OpMul32F_0(v *Value) bool { 6950 // match: (Mul32F x y) 6951 // cond: 6952 // result: (MULF x y) 6953 for { 6954 _ = v.Args[1] 6955 x := v.Args[0] 6956 y := v.Args[1] 6957 v.reset(OpMIPSMULF) 6958 v.AddArg(x) 6959 v.AddArg(y) 6960 return true 6961 } 6962 } 6963 func rewriteValueMIPS_OpMul32uhilo_0(v *Value) bool { 6964 // match: (Mul32uhilo x y) 6965 // cond: 6966 // result: (MULTU x y) 6967 for { 6968 _ = v.Args[1] 6969 x := v.Args[0] 6970 y := v.Args[1] 6971 v.reset(OpMIPSMULTU) 6972 v.AddArg(x) 6973 v.AddArg(y) 6974 return true 6975 } 6976 } 6977 func rewriteValueMIPS_OpMul64F_0(v *Value) bool { 6978 // match: (Mul64F x y) 6979 // cond: 6980 // result: (MULD x y) 6981 for { 6982 _ = v.Args[1] 6983 x := v.Args[0] 6984 y := v.Args[1] 6985 v.reset(OpMIPSMULD) 6986 v.AddArg(x) 6987 v.AddArg(y) 6988 return true 6989 } 6990 } 6991 func rewriteValueMIPS_OpMul8_0(v *Value) bool { 6992 // match: (Mul8 x y) 6993 // cond: 6994 // result: (MUL x y) 6995 for { 6996 _ = v.Args[1] 6997 x := v.Args[0] 6998 y := v.Args[1] 6999 v.reset(OpMIPSMUL) 7000 v.AddArg(x) 7001 v.AddArg(y) 7002 return true 7003 } 7004 } 7005 func rewriteValueMIPS_OpNeg16_0(v *Value) bool { 7006 // match: (Neg16 x) 7007 // cond: 7008 // result: (NEG x) 7009 for { 7010 x := v.Args[0] 7011 v.reset(OpMIPSNEG) 7012 v.AddArg(x) 7013 return true 7014 } 7015 } 7016 func rewriteValueMIPS_OpNeg32_0(v *Value) bool { 7017 // match: (Neg32 x) 7018 // cond: 7019 // result: (NEG x) 7020 for { 7021 x := v.Args[0] 7022 v.reset(OpMIPSNEG) 7023 v.AddArg(x) 7024 return true 7025 } 7026 } 7027 func rewriteValueMIPS_OpNeg32F_0(v *Value) bool { 7028 // match: (Neg32F x) 7029 // cond: 7030 // result: (NEGF x) 7031 for { 7032 x := v.Args[0] 7033 v.reset(OpMIPSNEGF) 7034 v.AddArg(x) 7035 return true 7036 } 7037 } 7038 func rewriteValueMIPS_OpNeg64F_0(v *Value) bool { 7039 // match: (Neg64F x) 7040 // cond: 7041 // result: (NEGD x) 7042 for { 7043 x := v.Args[0] 7044 v.reset(OpMIPSNEGD) 7045 v.AddArg(x) 7046 return true 7047 } 7048 } 7049 func rewriteValueMIPS_OpNeg8_0(v *Value) bool { 7050 // match: (Neg8 x) 7051 // cond: 7052 // result: (NEG x) 7053 for { 7054 x := v.Args[0] 7055 v.reset(OpMIPSNEG) 7056 v.AddArg(x) 7057 return true 7058 } 7059 } 7060 func rewriteValueMIPS_OpNeq16_0(v *Value) bool { 7061 b := v.Block 7062 _ = b 7063 typ := &b.Func.Config.Types 7064 _ = typ 7065 // match: (Neq16 x y) 7066 // cond: 7067 // result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to32 y)) (MOVWconst [0])) 7068 for { 7069 _ = v.Args[1] 7070 x := v.Args[0] 7071 y := v.Args[1] 7072 v.reset(OpMIPSSGTU) 7073 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 7074 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7075 v1.AddArg(x) 7076 v0.AddArg(v1) 7077 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7078 v2.AddArg(y) 7079 v0.AddArg(v2) 7080 v.AddArg(v0) 7081 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7082 v3.AuxInt = 0 7083 v.AddArg(v3) 7084 return true 7085 } 7086 } 7087 func rewriteValueMIPS_OpNeq32_0(v *Value) bool { 7088 b := v.Block 7089 _ = b 7090 typ := &b.Func.Config.Types 7091 _ = typ 7092 // match: (Neq32 x y) 7093 // cond: 7094 // result: (SGTU (XOR x y) (MOVWconst [0])) 7095 for { 7096 _ = v.Args[1] 7097 x := v.Args[0] 7098 y := v.Args[1] 7099 v.reset(OpMIPSSGTU) 7100 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 7101 v0.AddArg(x) 7102 v0.AddArg(y) 7103 v.AddArg(v0) 7104 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7105 v1.AuxInt = 0 7106 v.AddArg(v1) 7107 return true 7108 } 7109 } 7110 func rewriteValueMIPS_OpNeq32F_0(v *Value) bool { 7111 b := v.Block 7112 _ = b 7113 // match: (Neq32F x y) 7114 // cond: 7115 // result: (FPFlagFalse (CMPEQF x y)) 7116 for { 7117 _ = v.Args[1] 7118 x := v.Args[0] 7119 y := v.Args[1] 7120 v.reset(OpMIPSFPFlagFalse) 7121 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQF, types.TypeFlags) 7122 v0.AddArg(x) 7123 v0.AddArg(y) 7124 v.AddArg(v0) 7125 return true 7126 } 7127 } 7128 func rewriteValueMIPS_OpNeq64F_0(v *Value) bool { 7129 b := v.Block 7130 _ = b 7131 // match: (Neq64F x y) 7132 // cond: 7133 // result: (FPFlagFalse (CMPEQD x y)) 7134 for { 7135 _ = v.Args[1] 7136 x := v.Args[0] 7137 y := v.Args[1] 7138 v.reset(OpMIPSFPFlagFalse) 7139 v0 := b.NewValue0(v.Pos, OpMIPSCMPEQD, types.TypeFlags) 7140 v0.AddArg(x) 7141 v0.AddArg(y) 7142 v.AddArg(v0) 7143 return true 7144 } 7145 } 7146 func rewriteValueMIPS_OpNeq8_0(v *Value) bool { 7147 b := v.Block 7148 _ = b 7149 typ := &b.Func.Config.Types 7150 _ = typ 7151 // match: (Neq8 x y) 7152 // cond: 7153 // result: (SGTU (XOR (ZeroExt8to32 x) (ZeroExt8to32 y)) (MOVWconst [0])) 7154 for { 7155 _ = v.Args[1] 7156 x := v.Args[0] 7157 y := v.Args[1] 7158 v.reset(OpMIPSSGTU) 7159 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 7160 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7161 v1.AddArg(x) 7162 v0.AddArg(v1) 7163 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7164 v2.AddArg(y) 7165 v0.AddArg(v2) 7166 v.AddArg(v0) 7167 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7168 v3.AuxInt = 0 7169 v.AddArg(v3) 7170 return true 7171 } 7172 } 7173 func rewriteValueMIPS_OpNeqB_0(v *Value) bool { 7174 // match: (NeqB x y) 7175 // cond: 7176 // result: (XOR x y) 7177 for { 7178 _ = v.Args[1] 7179 x := v.Args[0] 7180 y := v.Args[1] 7181 v.reset(OpMIPSXOR) 7182 v.AddArg(x) 7183 v.AddArg(y) 7184 return true 7185 } 7186 } 7187 func rewriteValueMIPS_OpNeqPtr_0(v *Value) bool { 7188 b := v.Block 7189 _ = b 7190 typ := &b.Func.Config.Types 7191 _ = typ 7192 // match: (NeqPtr x y) 7193 // cond: 7194 // result: (SGTU (XOR x y) (MOVWconst [0])) 7195 for { 7196 _ = v.Args[1] 7197 x := v.Args[0] 7198 y := v.Args[1] 7199 v.reset(OpMIPSSGTU) 7200 v0 := b.NewValue0(v.Pos, OpMIPSXOR, typ.UInt32) 7201 v0.AddArg(x) 7202 v0.AddArg(y) 7203 v.AddArg(v0) 7204 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7205 v1.AuxInt = 0 7206 v.AddArg(v1) 7207 return true 7208 } 7209 } 7210 func rewriteValueMIPS_OpNilCheck_0(v *Value) bool { 7211 // match: (NilCheck ptr mem) 7212 // cond: 7213 // result: (LoweredNilCheck ptr mem) 7214 for { 7215 _ = v.Args[1] 7216 ptr := v.Args[0] 7217 mem := v.Args[1] 7218 v.reset(OpMIPSLoweredNilCheck) 7219 v.AddArg(ptr) 7220 v.AddArg(mem) 7221 return true 7222 } 7223 } 7224 func rewriteValueMIPS_OpNot_0(v *Value) bool { 7225 // match: (Not x) 7226 // cond: 7227 // result: (XORconst [1] x) 7228 for { 7229 x := v.Args[0] 7230 v.reset(OpMIPSXORconst) 7231 v.AuxInt = 1 7232 v.AddArg(x) 7233 return true 7234 } 7235 } 7236 func rewriteValueMIPS_OpOffPtr_0(v *Value) bool { 7237 // match: (OffPtr [off] ptr:(SP)) 7238 // cond: 7239 // result: (MOVWaddr [off] ptr) 7240 for { 7241 off := v.AuxInt 7242 ptr := v.Args[0] 7243 if ptr.Op != OpSP { 7244 break 7245 } 7246 v.reset(OpMIPSMOVWaddr) 7247 v.AuxInt = off 7248 v.AddArg(ptr) 7249 return true 7250 } 7251 // match: (OffPtr [off] ptr) 7252 // cond: 7253 // result: (ADDconst [off] ptr) 7254 for { 7255 off := v.AuxInt 7256 ptr := v.Args[0] 7257 v.reset(OpMIPSADDconst) 7258 v.AuxInt = off 7259 v.AddArg(ptr) 7260 return true 7261 } 7262 } 7263 func rewriteValueMIPS_OpOr16_0(v *Value) bool { 7264 // match: (Or16 x y) 7265 // cond: 7266 // result: (OR x y) 7267 for { 7268 _ = v.Args[1] 7269 x := v.Args[0] 7270 y := v.Args[1] 7271 v.reset(OpMIPSOR) 7272 v.AddArg(x) 7273 v.AddArg(y) 7274 return true 7275 } 7276 } 7277 func rewriteValueMIPS_OpOr32_0(v *Value) bool { 7278 // match: (Or32 x y) 7279 // cond: 7280 // result: (OR x y) 7281 for { 7282 _ = v.Args[1] 7283 x := v.Args[0] 7284 y := v.Args[1] 7285 v.reset(OpMIPSOR) 7286 v.AddArg(x) 7287 v.AddArg(y) 7288 return true 7289 } 7290 } 7291 func rewriteValueMIPS_OpOr8_0(v *Value) bool { 7292 // match: (Or8 x y) 7293 // cond: 7294 // result: (OR x y) 7295 for { 7296 _ = v.Args[1] 7297 x := v.Args[0] 7298 y := v.Args[1] 7299 v.reset(OpMIPSOR) 7300 v.AddArg(x) 7301 v.AddArg(y) 7302 return true 7303 } 7304 } 7305 func rewriteValueMIPS_OpOrB_0(v *Value) bool { 7306 // match: (OrB x y) 7307 // cond: 7308 // result: (OR x y) 7309 for { 7310 _ = v.Args[1] 7311 x := v.Args[0] 7312 y := v.Args[1] 7313 v.reset(OpMIPSOR) 7314 v.AddArg(x) 7315 v.AddArg(y) 7316 return true 7317 } 7318 } 7319 func rewriteValueMIPS_OpRound32F_0(v *Value) bool { 7320 // match: (Round32F x) 7321 // cond: 7322 // result: x 7323 for { 7324 x := v.Args[0] 7325 v.reset(OpCopy) 7326 v.Type = x.Type 7327 v.AddArg(x) 7328 return true 7329 } 7330 } 7331 func rewriteValueMIPS_OpRound64F_0(v *Value) bool { 7332 // match: (Round64F x) 7333 // cond: 7334 // result: x 7335 for { 7336 x := v.Args[0] 7337 v.reset(OpCopy) 7338 v.Type = x.Type 7339 v.AddArg(x) 7340 return true 7341 } 7342 } 7343 func rewriteValueMIPS_OpRsh16Ux16_0(v *Value) bool { 7344 b := v.Block 7345 _ = b 7346 typ := &b.Func.Config.Types 7347 _ = typ 7348 // match: (Rsh16Ux16 <t> x y) 7349 // cond: 7350 // result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 7351 for { 7352 t := v.Type 7353 _ = v.Args[1] 7354 x := v.Args[0] 7355 y := v.Args[1] 7356 v.reset(OpMIPSCMOVZ) 7357 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7358 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7359 v1.AddArg(x) 7360 v0.AddArg(v1) 7361 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7362 v2.AddArg(y) 7363 v0.AddArg(v2) 7364 v.AddArg(v0) 7365 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7366 v3.AuxInt = 0 7367 v.AddArg(v3) 7368 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7369 v4.AuxInt = 32 7370 v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7371 v5.AddArg(y) 7372 v4.AddArg(v5) 7373 v.AddArg(v4) 7374 return true 7375 } 7376 } 7377 func rewriteValueMIPS_OpRsh16Ux32_0(v *Value) bool { 7378 b := v.Block 7379 _ = b 7380 typ := &b.Func.Config.Types 7381 _ = typ 7382 // match: (Rsh16Ux32 <t> x y) 7383 // cond: 7384 // result: (CMOVZ (SRL <t> (ZeroExt16to32 x) y) (MOVWconst [0]) (SGTUconst [32] y)) 7385 for { 7386 t := v.Type 7387 _ = v.Args[1] 7388 x := v.Args[0] 7389 y := v.Args[1] 7390 v.reset(OpMIPSCMOVZ) 7391 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7392 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7393 v1.AddArg(x) 7394 v0.AddArg(v1) 7395 v0.AddArg(y) 7396 v.AddArg(v0) 7397 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7398 v2.AuxInt = 0 7399 v.AddArg(v2) 7400 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7401 v3.AuxInt = 32 7402 v3.AddArg(y) 7403 v.AddArg(v3) 7404 return true 7405 } 7406 } 7407 func rewriteValueMIPS_OpRsh16Ux64_0(v *Value) bool { 7408 b := v.Block 7409 _ = b 7410 typ := &b.Func.Config.Types 7411 _ = typ 7412 // match: (Rsh16Ux64 x (Const64 [c])) 7413 // cond: uint32(c) < 16 7414 // result: (SRLconst (SLLconst <typ.UInt32> x [16]) [c+16]) 7415 for { 7416 _ = v.Args[1] 7417 x := v.Args[0] 7418 v_1 := v.Args[1] 7419 if v_1.Op != OpConst64 { 7420 break 7421 } 7422 c := v_1.AuxInt 7423 if !(uint32(c) < 16) { 7424 break 7425 } 7426 v.reset(OpMIPSSRLconst) 7427 v.AuxInt = c + 16 7428 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 7429 v0.AuxInt = 16 7430 v0.AddArg(x) 7431 v.AddArg(v0) 7432 return true 7433 } 7434 // match: (Rsh16Ux64 _ (Const64 [c])) 7435 // cond: uint32(c) >= 16 7436 // result: (MOVWconst [0]) 7437 for { 7438 _ = v.Args[1] 7439 v_1 := v.Args[1] 7440 if v_1.Op != OpConst64 { 7441 break 7442 } 7443 c := v_1.AuxInt 7444 if !(uint32(c) >= 16) { 7445 break 7446 } 7447 v.reset(OpMIPSMOVWconst) 7448 v.AuxInt = 0 7449 return true 7450 } 7451 return false 7452 } 7453 func rewriteValueMIPS_OpRsh16Ux8_0(v *Value) bool { 7454 b := v.Block 7455 _ = b 7456 typ := &b.Func.Config.Types 7457 _ = typ 7458 // match: (Rsh16Ux8 <t> x y) 7459 // cond: 7460 // result: (CMOVZ (SRL <t> (ZeroExt16to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 7461 for { 7462 t := v.Type 7463 _ = v.Args[1] 7464 x := v.Args[0] 7465 y := v.Args[1] 7466 v.reset(OpMIPSCMOVZ) 7467 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7468 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7469 v1.AddArg(x) 7470 v0.AddArg(v1) 7471 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7472 v2.AddArg(y) 7473 v0.AddArg(v2) 7474 v.AddArg(v0) 7475 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7476 v3.AuxInt = 0 7477 v.AddArg(v3) 7478 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7479 v4.AuxInt = 32 7480 v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7481 v5.AddArg(y) 7482 v4.AddArg(v5) 7483 v.AddArg(v4) 7484 return true 7485 } 7486 } 7487 func rewriteValueMIPS_OpRsh16x16_0(v *Value) bool { 7488 b := v.Block 7489 _ = b 7490 typ := &b.Func.Config.Types 7491 _ = typ 7492 // match: (Rsh16x16 x y) 7493 // cond: 7494 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y)))) 7495 for { 7496 _ = v.Args[1] 7497 x := v.Args[0] 7498 y := v.Args[1] 7499 v.reset(OpMIPSSRA) 7500 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 7501 v0.AddArg(x) 7502 v.AddArg(v0) 7503 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 7504 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7505 v2.AddArg(y) 7506 v1.AddArg(v2) 7507 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7508 v3.AuxInt = -1 7509 v1.AddArg(v3) 7510 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7511 v4.AuxInt = 32 7512 v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7513 v5.AddArg(y) 7514 v4.AddArg(v5) 7515 v1.AddArg(v4) 7516 v.AddArg(v1) 7517 return true 7518 } 7519 } 7520 func rewriteValueMIPS_OpRsh16x32_0(v *Value) bool { 7521 b := v.Block 7522 _ = b 7523 typ := &b.Func.Config.Types 7524 _ = typ 7525 // match: (Rsh16x32 x y) 7526 // cond: 7527 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y))) 7528 for { 7529 _ = v.Args[1] 7530 x := v.Args[0] 7531 y := v.Args[1] 7532 v.reset(OpMIPSSRA) 7533 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 7534 v0.AddArg(x) 7535 v.AddArg(v0) 7536 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 7537 v1.AddArg(y) 7538 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7539 v2.AuxInt = -1 7540 v1.AddArg(v2) 7541 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7542 v3.AuxInt = 32 7543 v3.AddArg(y) 7544 v1.AddArg(v3) 7545 v.AddArg(v1) 7546 return true 7547 } 7548 } 7549 func rewriteValueMIPS_OpRsh16x64_0(v *Value) bool { 7550 b := v.Block 7551 _ = b 7552 typ := &b.Func.Config.Types 7553 _ = typ 7554 // match: (Rsh16x64 x (Const64 [c])) 7555 // cond: uint32(c) < 16 7556 // result: (SRAconst (SLLconst <typ.UInt32> x [16]) [c+16]) 7557 for { 7558 _ = v.Args[1] 7559 x := v.Args[0] 7560 v_1 := v.Args[1] 7561 if v_1.Op != OpConst64 { 7562 break 7563 } 7564 c := v_1.AuxInt 7565 if !(uint32(c) < 16) { 7566 break 7567 } 7568 v.reset(OpMIPSSRAconst) 7569 v.AuxInt = c + 16 7570 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 7571 v0.AuxInt = 16 7572 v0.AddArg(x) 7573 v.AddArg(v0) 7574 return true 7575 } 7576 // match: (Rsh16x64 x (Const64 [c])) 7577 // cond: uint32(c) >= 16 7578 // result: (SRAconst (SLLconst <typ.UInt32> x [16]) [31]) 7579 for { 7580 _ = v.Args[1] 7581 x := v.Args[0] 7582 v_1 := v.Args[1] 7583 if v_1.Op != OpConst64 { 7584 break 7585 } 7586 c := v_1.AuxInt 7587 if !(uint32(c) >= 16) { 7588 break 7589 } 7590 v.reset(OpMIPSSRAconst) 7591 v.AuxInt = 31 7592 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 7593 v0.AuxInt = 16 7594 v0.AddArg(x) 7595 v.AddArg(v0) 7596 return true 7597 } 7598 return false 7599 } 7600 func rewriteValueMIPS_OpRsh16x8_0(v *Value) bool { 7601 b := v.Block 7602 _ = b 7603 typ := &b.Func.Config.Types 7604 _ = typ 7605 // match: (Rsh16x8 x y) 7606 // cond: 7607 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y)))) 7608 for { 7609 _ = v.Args[1] 7610 x := v.Args[0] 7611 y := v.Args[1] 7612 v.reset(OpMIPSSRA) 7613 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 7614 v0.AddArg(x) 7615 v.AddArg(v0) 7616 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 7617 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7618 v2.AddArg(y) 7619 v1.AddArg(v2) 7620 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7621 v3.AuxInt = -1 7622 v1.AddArg(v3) 7623 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7624 v4.AuxInt = 32 7625 v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7626 v5.AddArg(y) 7627 v4.AddArg(v5) 7628 v1.AddArg(v4) 7629 v.AddArg(v1) 7630 return true 7631 } 7632 } 7633 func rewriteValueMIPS_OpRsh32Ux16_0(v *Value) bool { 7634 b := v.Block 7635 _ = b 7636 typ := &b.Func.Config.Types 7637 _ = typ 7638 // match: (Rsh32Ux16 <t> x y) 7639 // cond: 7640 // result: (CMOVZ (SRL <t> x (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 7641 for { 7642 t := v.Type 7643 _ = v.Args[1] 7644 x := v.Args[0] 7645 y := v.Args[1] 7646 v.reset(OpMIPSCMOVZ) 7647 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7648 v0.AddArg(x) 7649 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7650 v1.AddArg(y) 7651 v0.AddArg(v1) 7652 v.AddArg(v0) 7653 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7654 v2.AuxInt = 0 7655 v.AddArg(v2) 7656 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7657 v3.AuxInt = 32 7658 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7659 v4.AddArg(y) 7660 v3.AddArg(v4) 7661 v.AddArg(v3) 7662 return true 7663 } 7664 } 7665 func rewriteValueMIPS_OpRsh32Ux32_0(v *Value) bool { 7666 b := v.Block 7667 _ = b 7668 typ := &b.Func.Config.Types 7669 _ = typ 7670 // match: (Rsh32Ux32 <t> x y) 7671 // cond: 7672 // result: (CMOVZ (SRL <t> x y) (MOVWconst [0]) (SGTUconst [32] y)) 7673 for { 7674 t := v.Type 7675 _ = v.Args[1] 7676 x := v.Args[0] 7677 y := v.Args[1] 7678 v.reset(OpMIPSCMOVZ) 7679 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7680 v0.AddArg(x) 7681 v0.AddArg(y) 7682 v.AddArg(v0) 7683 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7684 v1.AuxInt = 0 7685 v.AddArg(v1) 7686 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7687 v2.AuxInt = 32 7688 v2.AddArg(y) 7689 v.AddArg(v2) 7690 return true 7691 } 7692 } 7693 func rewriteValueMIPS_OpRsh32Ux64_0(v *Value) bool { 7694 // match: (Rsh32Ux64 x (Const64 [c])) 7695 // cond: uint32(c) < 32 7696 // result: (SRLconst x [c]) 7697 for { 7698 _ = v.Args[1] 7699 x := v.Args[0] 7700 v_1 := v.Args[1] 7701 if v_1.Op != OpConst64 { 7702 break 7703 } 7704 c := v_1.AuxInt 7705 if !(uint32(c) < 32) { 7706 break 7707 } 7708 v.reset(OpMIPSSRLconst) 7709 v.AuxInt = c 7710 v.AddArg(x) 7711 return true 7712 } 7713 // match: (Rsh32Ux64 _ (Const64 [c])) 7714 // cond: uint32(c) >= 32 7715 // result: (MOVWconst [0]) 7716 for { 7717 _ = v.Args[1] 7718 v_1 := v.Args[1] 7719 if v_1.Op != OpConst64 { 7720 break 7721 } 7722 c := v_1.AuxInt 7723 if !(uint32(c) >= 32) { 7724 break 7725 } 7726 v.reset(OpMIPSMOVWconst) 7727 v.AuxInt = 0 7728 return true 7729 } 7730 return false 7731 } 7732 func rewriteValueMIPS_OpRsh32Ux8_0(v *Value) bool { 7733 b := v.Block 7734 _ = b 7735 typ := &b.Func.Config.Types 7736 _ = typ 7737 // match: (Rsh32Ux8 <t> x y) 7738 // cond: 7739 // result: (CMOVZ (SRL <t> x (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 7740 for { 7741 t := v.Type 7742 _ = v.Args[1] 7743 x := v.Args[0] 7744 y := v.Args[1] 7745 v.reset(OpMIPSCMOVZ) 7746 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7747 v0.AddArg(x) 7748 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7749 v1.AddArg(y) 7750 v0.AddArg(v1) 7751 v.AddArg(v0) 7752 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7753 v2.AuxInt = 0 7754 v.AddArg(v2) 7755 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7756 v3.AuxInt = 32 7757 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7758 v4.AddArg(y) 7759 v3.AddArg(v4) 7760 v.AddArg(v3) 7761 return true 7762 } 7763 } 7764 func rewriteValueMIPS_OpRsh32x16_0(v *Value) bool { 7765 b := v.Block 7766 _ = b 7767 typ := &b.Func.Config.Types 7768 _ = typ 7769 // match: (Rsh32x16 x y) 7770 // cond: 7771 // result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y)))) 7772 for { 7773 _ = v.Args[1] 7774 x := v.Args[0] 7775 y := v.Args[1] 7776 v.reset(OpMIPSSRA) 7777 v.AddArg(x) 7778 v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 7779 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7780 v1.AddArg(y) 7781 v0.AddArg(v1) 7782 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7783 v2.AuxInt = -1 7784 v0.AddArg(v2) 7785 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7786 v3.AuxInt = 32 7787 v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7788 v4.AddArg(y) 7789 v3.AddArg(v4) 7790 v0.AddArg(v3) 7791 v.AddArg(v0) 7792 return true 7793 } 7794 } 7795 func rewriteValueMIPS_OpRsh32x32_0(v *Value) bool { 7796 b := v.Block 7797 _ = b 7798 typ := &b.Func.Config.Types 7799 _ = typ 7800 // match: (Rsh32x32 x y) 7801 // cond: 7802 // result: (SRA x ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y))) 7803 for { 7804 _ = v.Args[1] 7805 x := v.Args[0] 7806 y := v.Args[1] 7807 v.reset(OpMIPSSRA) 7808 v.AddArg(x) 7809 v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 7810 v0.AddArg(y) 7811 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7812 v1.AuxInt = -1 7813 v0.AddArg(v1) 7814 v2 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7815 v2.AuxInt = 32 7816 v2.AddArg(y) 7817 v0.AddArg(v2) 7818 v.AddArg(v0) 7819 return true 7820 } 7821 } 7822 func rewriteValueMIPS_OpRsh32x64_0(v *Value) bool { 7823 // match: (Rsh32x64 x (Const64 [c])) 7824 // cond: uint32(c) < 32 7825 // result: (SRAconst x [c]) 7826 for { 7827 _ = v.Args[1] 7828 x := v.Args[0] 7829 v_1 := v.Args[1] 7830 if v_1.Op != OpConst64 { 7831 break 7832 } 7833 c := v_1.AuxInt 7834 if !(uint32(c) < 32) { 7835 break 7836 } 7837 v.reset(OpMIPSSRAconst) 7838 v.AuxInt = c 7839 v.AddArg(x) 7840 return true 7841 } 7842 // match: (Rsh32x64 x (Const64 [c])) 7843 // cond: uint32(c) >= 32 7844 // result: (SRAconst x [31]) 7845 for { 7846 _ = v.Args[1] 7847 x := v.Args[0] 7848 v_1 := v.Args[1] 7849 if v_1.Op != OpConst64 { 7850 break 7851 } 7852 c := v_1.AuxInt 7853 if !(uint32(c) >= 32) { 7854 break 7855 } 7856 v.reset(OpMIPSSRAconst) 7857 v.AuxInt = 31 7858 v.AddArg(x) 7859 return true 7860 } 7861 return false 7862 } 7863 func rewriteValueMIPS_OpRsh32x8_0(v *Value) bool { 7864 b := v.Block 7865 _ = b 7866 typ := &b.Func.Config.Types 7867 _ = typ 7868 // match: (Rsh32x8 x y) 7869 // cond: 7870 // result: (SRA x ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y)))) 7871 for { 7872 _ = v.Args[1] 7873 x := v.Args[0] 7874 y := v.Args[1] 7875 v.reset(OpMIPSSRA) 7876 v.AddArg(x) 7877 v0 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 7878 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7879 v1.AddArg(y) 7880 v0.AddArg(v1) 7881 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7882 v2.AuxInt = -1 7883 v0.AddArg(v2) 7884 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7885 v3.AuxInt = 32 7886 v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7887 v4.AddArg(y) 7888 v3.AddArg(v4) 7889 v0.AddArg(v3) 7890 v.AddArg(v0) 7891 return true 7892 } 7893 } 7894 func rewriteValueMIPS_OpRsh8Ux16_0(v *Value) bool { 7895 b := v.Block 7896 _ = b 7897 typ := &b.Func.Config.Types 7898 _ = typ 7899 // match: (Rsh8Ux16 <t> x y) 7900 // cond: 7901 // result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt16to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt16to32 y))) 7902 for { 7903 t := v.Type 7904 _ = v.Args[1] 7905 x := v.Args[0] 7906 y := v.Args[1] 7907 v.reset(OpMIPSCMOVZ) 7908 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7909 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7910 v1.AddArg(x) 7911 v0.AddArg(v1) 7912 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7913 v2.AddArg(y) 7914 v0.AddArg(v2) 7915 v.AddArg(v0) 7916 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7917 v3.AuxInt = 0 7918 v.AddArg(v3) 7919 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7920 v4.AuxInt = 32 7921 v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7922 v5.AddArg(y) 7923 v4.AddArg(v5) 7924 v.AddArg(v4) 7925 return true 7926 } 7927 } 7928 func rewriteValueMIPS_OpRsh8Ux32_0(v *Value) bool { 7929 b := v.Block 7930 _ = b 7931 typ := &b.Func.Config.Types 7932 _ = typ 7933 // match: (Rsh8Ux32 <t> x y) 7934 // cond: 7935 // result: (CMOVZ (SRL <t> (ZeroExt8to32 x) y) (MOVWconst [0]) (SGTUconst [32] y)) 7936 for { 7937 t := v.Type 7938 _ = v.Args[1] 7939 x := v.Args[0] 7940 y := v.Args[1] 7941 v.reset(OpMIPSCMOVZ) 7942 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 7943 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 7944 v1.AddArg(x) 7945 v0.AddArg(v1) 7946 v0.AddArg(y) 7947 v.AddArg(v0) 7948 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 7949 v2.AuxInt = 0 7950 v.AddArg(v2) 7951 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 7952 v3.AuxInt = 32 7953 v3.AddArg(y) 7954 v.AddArg(v3) 7955 return true 7956 } 7957 } 7958 func rewriteValueMIPS_OpRsh8Ux64_0(v *Value) bool { 7959 b := v.Block 7960 _ = b 7961 typ := &b.Func.Config.Types 7962 _ = typ 7963 // match: (Rsh8Ux64 x (Const64 [c])) 7964 // cond: uint32(c) < 8 7965 // result: (SRLconst (SLLconst <typ.UInt32> x [24]) [c+24]) 7966 for { 7967 _ = v.Args[1] 7968 x := v.Args[0] 7969 v_1 := v.Args[1] 7970 if v_1.Op != OpConst64 { 7971 break 7972 } 7973 c := v_1.AuxInt 7974 if !(uint32(c) < 8) { 7975 break 7976 } 7977 v.reset(OpMIPSSRLconst) 7978 v.AuxInt = c + 24 7979 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 7980 v0.AuxInt = 24 7981 v0.AddArg(x) 7982 v.AddArg(v0) 7983 return true 7984 } 7985 // match: (Rsh8Ux64 _ (Const64 [c])) 7986 // cond: uint32(c) >= 8 7987 // result: (MOVWconst [0]) 7988 for { 7989 _ = v.Args[1] 7990 v_1 := v.Args[1] 7991 if v_1.Op != OpConst64 { 7992 break 7993 } 7994 c := v_1.AuxInt 7995 if !(uint32(c) >= 8) { 7996 break 7997 } 7998 v.reset(OpMIPSMOVWconst) 7999 v.AuxInt = 0 8000 return true 8001 } 8002 return false 8003 } 8004 func rewriteValueMIPS_OpRsh8Ux8_0(v *Value) bool { 8005 b := v.Block 8006 _ = b 8007 typ := &b.Func.Config.Types 8008 _ = typ 8009 // match: (Rsh8Ux8 <t> x y) 8010 // cond: 8011 // result: (CMOVZ (SRL <t> (ZeroExt8to32 x) (ZeroExt8to32 y) ) (MOVWconst [0]) (SGTUconst [32] (ZeroExt8to32 y))) 8012 for { 8013 t := v.Type 8014 _ = v.Args[1] 8015 x := v.Args[0] 8016 y := v.Args[1] 8017 v.reset(OpMIPSCMOVZ) 8018 v0 := b.NewValue0(v.Pos, OpMIPSSRL, t) 8019 v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 8020 v1.AddArg(x) 8021 v0.AddArg(v1) 8022 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 8023 v2.AddArg(y) 8024 v0.AddArg(v2) 8025 v.AddArg(v0) 8026 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 8027 v3.AuxInt = 0 8028 v.AddArg(v3) 8029 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 8030 v4.AuxInt = 32 8031 v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 8032 v5.AddArg(y) 8033 v4.AddArg(v5) 8034 v.AddArg(v4) 8035 return true 8036 } 8037 } 8038 func rewriteValueMIPS_OpRsh8x16_0(v *Value) bool { 8039 b := v.Block 8040 _ = b 8041 typ := &b.Func.Config.Types 8042 _ = typ 8043 // match: (Rsh8x16 x y) 8044 // cond: 8045 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt16to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt16to32 y)))) 8046 for { 8047 _ = v.Args[1] 8048 x := v.Args[0] 8049 y := v.Args[1] 8050 v.reset(OpMIPSSRA) 8051 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 8052 v0.AddArg(x) 8053 v.AddArg(v0) 8054 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 8055 v2 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 8056 v2.AddArg(y) 8057 v1.AddArg(v2) 8058 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 8059 v3.AuxInt = -1 8060 v1.AddArg(v3) 8061 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 8062 v4.AuxInt = 32 8063 v5 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 8064 v5.AddArg(y) 8065 v4.AddArg(v5) 8066 v1.AddArg(v4) 8067 v.AddArg(v1) 8068 return true 8069 } 8070 } 8071 func rewriteValueMIPS_OpRsh8x32_0(v *Value) bool { 8072 b := v.Block 8073 _ = b 8074 typ := &b.Func.Config.Types 8075 _ = typ 8076 // match: (Rsh8x32 x y) 8077 // cond: 8078 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> y (MOVWconst [-1]) (SGTUconst [32] y))) 8079 for { 8080 _ = v.Args[1] 8081 x := v.Args[0] 8082 y := v.Args[1] 8083 v.reset(OpMIPSSRA) 8084 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 8085 v0.AddArg(x) 8086 v.AddArg(v0) 8087 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 8088 v1.AddArg(y) 8089 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 8090 v2.AuxInt = -1 8091 v1.AddArg(v2) 8092 v3 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 8093 v3.AuxInt = 32 8094 v3.AddArg(y) 8095 v1.AddArg(v3) 8096 v.AddArg(v1) 8097 return true 8098 } 8099 } 8100 func rewriteValueMIPS_OpRsh8x64_0(v *Value) bool { 8101 b := v.Block 8102 _ = b 8103 typ := &b.Func.Config.Types 8104 _ = typ 8105 // match: (Rsh8x64 x (Const64 [c])) 8106 // cond: uint32(c) < 8 8107 // result: (SRAconst (SLLconst <typ.UInt32> x [24]) [c+24]) 8108 for { 8109 _ = v.Args[1] 8110 x := v.Args[0] 8111 v_1 := v.Args[1] 8112 if v_1.Op != OpConst64 { 8113 break 8114 } 8115 c := v_1.AuxInt 8116 if !(uint32(c) < 8) { 8117 break 8118 } 8119 v.reset(OpMIPSSRAconst) 8120 v.AuxInt = c + 24 8121 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 8122 v0.AuxInt = 24 8123 v0.AddArg(x) 8124 v.AddArg(v0) 8125 return true 8126 } 8127 // match: (Rsh8x64 x (Const64 [c])) 8128 // cond: uint32(c) >= 8 8129 // result: (SRAconst (SLLconst <typ.UInt32> x [24]) [31]) 8130 for { 8131 _ = v.Args[1] 8132 x := v.Args[0] 8133 v_1 := v.Args[1] 8134 if v_1.Op != OpConst64 { 8135 break 8136 } 8137 c := v_1.AuxInt 8138 if !(uint32(c) >= 8) { 8139 break 8140 } 8141 v.reset(OpMIPSSRAconst) 8142 v.AuxInt = 31 8143 v0 := b.NewValue0(v.Pos, OpMIPSSLLconst, typ.UInt32) 8144 v0.AuxInt = 24 8145 v0.AddArg(x) 8146 v.AddArg(v0) 8147 return true 8148 } 8149 return false 8150 } 8151 func rewriteValueMIPS_OpRsh8x8_0(v *Value) bool { 8152 b := v.Block 8153 _ = b 8154 typ := &b.Func.Config.Types 8155 _ = typ 8156 // match: (Rsh8x8 x y) 8157 // cond: 8158 // result: (SRA (SignExt16to32 x) ( CMOVZ <typ.UInt32> (ZeroExt8to32 y) (MOVWconst [-1]) (SGTUconst [32] (ZeroExt8to32 y)))) 8159 for { 8160 _ = v.Args[1] 8161 x := v.Args[0] 8162 y := v.Args[1] 8163 v.reset(OpMIPSSRA) 8164 v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32) 8165 v0.AddArg(x) 8166 v.AddArg(v0) 8167 v1 := b.NewValue0(v.Pos, OpMIPSCMOVZ, typ.UInt32) 8168 v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 8169 v2.AddArg(y) 8170 v1.AddArg(v2) 8171 v3 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 8172 v3.AuxInt = -1 8173 v1.AddArg(v3) 8174 v4 := b.NewValue0(v.Pos, OpMIPSSGTUconst, typ.Bool) 8175 v4.AuxInt = 32 8176 v5 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32) 8177 v5.AddArg(y) 8178 v4.AddArg(v5) 8179 v1.AddArg(v4) 8180 v.AddArg(v1) 8181 return true 8182 } 8183 } 8184 func rewriteValueMIPS_OpSelect0_0(v *Value) bool { 8185 b := v.Block 8186 _ = b 8187 typ := &b.Func.Config.Types 8188 _ = typ 8189 // match: (Select0 (Add32carry <t> x y)) 8190 // cond: 8191 // result: (ADD <t.FieldType(0)> x y) 8192 for { 8193 v_0 := v.Args[0] 8194 if v_0.Op != OpAdd32carry { 8195 break 8196 } 8197 t := v_0.Type 8198 _ = v_0.Args[1] 8199 x := v_0.Args[0] 8200 y := v_0.Args[1] 8201 v.reset(OpMIPSADD) 8202 v.Type = t.FieldType(0) 8203 v.AddArg(x) 8204 v.AddArg(y) 8205 return true 8206 } 8207 // match: (Select0 (Sub32carry <t> x y)) 8208 // cond: 8209 // result: (SUB <t.FieldType(0)> x y) 8210 for { 8211 v_0 := v.Args[0] 8212 if v_0.Op != OpSub32carry { 8213 break 8214 } 8215 t := v_0.Type 8216 _ = v_0.Args[1] 8217 x := v_0.Args[0] 8218 y := v_0.Args[1] 8219 v.reset(OpMIPSSUB) 8220 v.Type = t.FieldType(0) 8221 v.AddArg(x) 8222 v.AddArg(y) 8223 return true 8224 } 8225 // match: (Select0 (MULTU (MOVWconst [0]) _)) 8226 // cond: 8227 // result: (MOVWconst [0]) 8228 for { 8229 v_0 := v.Args[0] 8230 if v_0.Op != OpMIPSMULTU { 8231 break 8232 } 8233 _ = v_0.Args[1] 8234 v_0_0 := v_0.Args[0] 8235 if v_0_0.Op != OpMIPSMOVWconst { 8236 break 8237 } 8238 if v_0_0.AuxInt != 0 { 8239 break 8240 } 8241 v.reset(OpMIPSMOVWconst) 8242 v.AuxInt = 0 8243 return true 8244 } 8245 // match: (Select0 (MULTU _ (MOVWconst [0]))) 8246 // cond: 8247 // result: (MOVWconst [0]) 8248 for { 8249 v_0 := v.Args[0] 8250 if v_0.Op != OpMIPSMULTU { 8251 break 8252 } 8253 _ = v_0.Args[1] 8254 v_0_1 := v_0.Args[1] 8255 if v_0_1.Op != OpMIPSMOVWconst { 8256 break 8257 } 8258 if v_0_1.AuxInt != 0 { 8259 break 8260 } 8261 v.reset(OpMIPSMOVWconst) 8262 v.AuxInt = 0 8263 return true 8264 } 8265 // match: (Select0 (MULTU (MOVWconst [1]) _)) 8266 // cond: 8267 // result: (MOVWconst [0]) 8268 for { 8269 v_0 := v.Args[0] 8270 if v_0.Op != OpMIPSMULTU { 8271 break 8272 } 8273 _ = v_0.Args[1] 8274 v_0_0 := v_0.Args[0] 8275 if v_0_0.Op != OpMIPSMOVWconst { 8276 break 8277 } 8278 if v_0_0.AuxInt != 1 { 8279 break 8280 } 8281 v.reset(OpMIPSMOVWconst) 8282 v.AuxInt = 0 8283 return true 8284 } 8285 // match: (Select0 (MULTU _ (MOVWconst [1]))) 8286 // cond: 8287 // result: (MOVWconst [0]) 8288 for { 8289 v_0 := v.Args[0] 8290 if v_0.Op != OpMIPSMULTU { 8291 break 8292 } 8293 _ = v_0.Args[1] 8294 v_0_1 := v_0.Args[1] 8295 if v_0_1.Op != OpMIPSMOVWconst { 8296 break 8297 } 8298 if v_0_1.AuxInt != 1 { 8299 break 8300 } 8301 v.reset(OpMIPSMOVWconst) 8302 v.AuxInt = 0 8303 return true 8304 } 8305 // match: (Select0 (MULTU (MOVWconst [-1]) x)) 8306 // cond: 8307 // result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x) 8308 for { 8309 v_0 := v.Args[0] 8310 if v_0.Op != OpMIPSMULTU { 8311 break 8312 } 8313 _ = v_0.Args[1] 8314 v_0_0 := v_0.Args[0] 8315 if v_0_0.Op != OpMIPSMOVWconst { 8316 break 8317 } 8318 if v_0_0.AuxInt != -1 { 8319 break 8320 } 8321 x := v_0.Args[1] 8322 v.reset(OpMIPSCMOVZ) 8323 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type) 8324 v0.AuxInt = -1 8325 v0.AddArg(x) 8326 v.AddArg(v0) 8327 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 8328 v1.AuxInt = 0 8329 v.AddArg(v1) 8330 v.AddArg(x) 8331 return true 8332 } 8333 // match: (Select0 (MULTU x (MOVWconst [-1]))) 8334 // cond: 8335 // result: (CMOVZ (ADDconst <x.Type> [-1] x) (MOVWconst [0]) x) 8336 for { 8337 v_0 := v.Args[0] 8338 if v_0.Op != OpMIPSMULTU { 8339 break 8340 } 8341 _ = v_0.Args[1] 8342 x := v_0.Args[0] 8343 v_0_1 := v_0.Args[1] 8344 if v_0_1.Op != OpMIPSMOVWconst { 8345 break 8346 } 8347 if v_0_1.AuxInt != -1 { 8348 break 8349 } 8350 v.reset(OpMIPSCMOVZ) 8351 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, x.Type) 8352 v0.AuxInt = -1 8353 v0.AddArg(x) 8354 v.AddArg(v0) 8355 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 8356 v1.AuxInt = 0 8357 v.AddArg(v1) 8358 v.AddArg(x) 8359 return true 8360 } 8361 // match: (Select0 (MULTU (MOVWconst [c]) x)) 8362 // cond: isPowerOfTwo(int64(uint32(c))) 8363 // result: (SRLconst [32-log2(int64(uint32(c)))] x) 8364 for { 8365 v_0 := v.Args[0] 8366 if v_0.Op != OpMIPSMULTU { 8367 break 8368 } 8369 _ = v_0.Args[1] 8370 v_0_0 := v_0.Args[0] 8371 if v_0_0.Op != OpMIPSMOVWconst { 8372 break 8373 } 8374 c := v_0_0.AuxInt 8375 x := v_0.Args[1] 8376 if !(isPowerOfTwo(int64(uint32(c)))) { 8377 break 8378 } 8379 v.reset(OpMIPSSRLconst) 8380 v.AuxInt = 32 - log2(int64(uint32(c))) 8381 v.AddArg(x) 8382 return true 8383 } 8384 // match: (Select0 (MULTU x (MOVWconst [c]))) 8385 // cond: isPowerOfTwo(int64(uint32(c))) 8386 // result: (SRLconst [32-log2(int64(uint32(c)))] x) 8387 for { 8388 v_0 := v.Args[0] 8389 if v_0.Op != OpMIPSMULTU { 8390 break 8391 } 8392 _ = v_0.Args[1] 8393 x := v_0.Args[0] 8394 v_0_1 := v_0.Args[1] 8395 if v_0_1.Op != OpMIPSMOVWconst { 8396 break 8397 } 8398 c := v_0_1.AuxInt 8399 if !(isPowerOfTwo(int64(uint32(c)))) { 8400 break 8401 } 8402 v.reset(OpMIPSSRLconst) 8403 v.AuxInt = 32 - log2(int64(uint32(c))) 8404 v.AddArg(x) 8405 return true 8406 } 8407 return false 8408 } 8409 func rewriteValueMIPS_OpSelect0_10(v *Value) bool { 8410 // match: (Select0 (MULTU (MOVWconst [c]) (MOVWconst [d]))) 8411 // cond: 8412 // result: (MOVWconst [(c*d)>>32]) 8413 for { 8414 v_0 := v.Args[0] 8415 if v_0.Op != OpMIPSMULTU { 8416 break 8417 } 8418 _ = v_0.Args[1] 8419 v_0_0 := v_0.Args[0] 8420 if v_0_0.Op != OpMIPSMOVWconst { 8421 break 8422 } 8423 c := v_0_0.AuxInt 8424 v_0_1 := v_0.Args[1] 8425 if v_0_1.Op != OpMIPSMOVWconst { 8426 break 8427 } 8428 d := v_0_1.AuxInt 8429 v.reset(OpMIPSMOVWconst) 8430 v.AuxInt = (c * d) >> 32 8431 return true 8432 } 8433 // match: (Select0 (MULTU (MOVWconst [d]) (MOVWconst [c]))) 8434 // cond: 8435 // result: (MOVWconst [(c*d)>>32]) 8436 for { 8437 v_0 := v.Args[0] 8438 if v_0.Op != OpMIPSMULTU { 8439 break 8440 } 8441 _ = v_0.Args[1] 8442 v_0_0 := v_0.Args[0] 8443 if v_0_0.Op != OpMIPSMOVWconst { 8444 break 8445 } 8446 d := v_0_0.AuxInt 8447 v_0_1 := v_0.Args[1] 8448 if v_0_1.Op != OpMIPSMOVWconst { 8449 break 8450 } 8451 c := v_0_1.AuxInt 8452 v.reset(OpMIPSMOVWconst) 8453 v.AuxInt = (c * d) >> 32 8454 return true 8455 } 8456 // match: (Select0 (DIV (MOVWconst [c]) (MOVWconst [d]))) 8457 // cond: 8458 // result: (MOVWconst [int64(int32(c)%int32(d))]) 8459 for { 8460 v_0 := v.Args[0] 8461 if v_0.Op != OpMIPSDIV { 8462 break 8463 } 8464 _ = v_0.Args[1] 8465 v_0_0 := v_0.Args[0] 8466 if v_0_0.Op != OpMIPSMOVWconst { 8467 break 8468 } 8469 c := v_0_0.AuxInt 8470 v_0_1 := v_0.Args[1] 8471 if v_0_1.Op != OpMIPSMOVWconst { 8472 break 8473 } 8474 d := v_0_1.AuxInt 8475 v.reset(OpMIPSMOVWconst) 8476 v.AuxInt = int64(int32(c) % int32(d)) 8477 return true 8478 } 8479 // match: (Select0 (DIVU (MOVWconst [c]) (MOVWconst [d]))) 8480 // cond: 8481 // result: (MOVWconst [int64(int32(uint32(c)%uint32(d)))]) 8482 for { 8483 v_0 := v.Args[0] 8484 if v_0.Op != OpMIPSDIVU { 8485 break 8486 } 8487 _ = v_0.Args[1] 8488 v_0_0 := v_0.Args[0] 8489 if v_0_0.Op != OpMIPSMOVWconst { 8490 break 8491 } 8492 c := v_0_0.AuxInt 8493 v_0_1 := v_0.Args[1] 8494 if v_0_1.Op != OpMIPSMOVWconst { 8495 break 8496 } 8497 d := v_0_1.AuxInt 8498 v.reset(OpMIPSMOVWconst) 8499 v.AuxInt = int64(int32(uint32(c) % uint32(d))) 8500 return true 8501 } 8502 return false 8503 } 8504 func rewriteValueMIPS_OpSelect1_0(v *Value) bool { 8505 b := v.Block 8506 _ = b 8507 typ := &b.Func.Config.Types 8508 _ = typ 8509 // match: (Select1 (Add32carry <t> x y)) 8510 // cond: 8511 // result: (SGTU <typ.Bool> x (ADD <t.FieldType(0)> x y)) 8512 for { 8513 v_0 := v.Args[0] 8514 if v_0.Op != OpAdd32carry { 8515 break 8516 } 8517 t := v_0.Type 8518 _ = v_0.Args[1] 8519 x := v_0.Args[0] 8520 y := v_0.Args[1] 8521 v.reset(OpMIPSSGTU) 8522 v.Type = typ.Bool 8523 v.AddArg(x) 8524 v0 := b.NewValue0(v.Pos, OpMIPSADD, t.FieldType(0)) 8525 v0.AddArg(x) 8526 v0.AddArg(y) 8527 v.AddArg(v0) 8528 return true 8529 } 8530 // match: (Select1 (Sub32carry <t> x y)) 8531 // cond: 8532 // result: (SGTU <typ.Bool> (SUB <t.FieldType(0)> x y) x) 8533 for { 8534 v_0 := v.Args[0] 8535 if v_0.Op != OpSub32carry { 8536 break 8537 } 8538 t := v_0.Type 8539 _ = v_0.Args[1] 8540 x := v_0.Args[0] 8541 y := v_0.Args[1] 8542 v.reset(OpMIPSSGTU) 8543 v.Type = typ.Bool 8544 v0 := b.NewValue0(v.Pos, OpMIPSSUB, t.FieldType(0)) 8545 v0.AddArg(x) 8546 v0.AddArg(y) 8547 v.AddArg(v0) 8548 v.AddArg(x) 8549 return true 8550 } 8551 // match: (Select1 (MULTU (MOVWconst [0]) _)) 8552 // cond: 8553 // result: (MOVWconst [0]) 8554 for { 8555 v_0 := v.Args[0] 8556 if v_0.Op != OpMIPSMULTU { 8557 break 8558 } 8559 _ = v_0.Args[1] 8560 v_0_0 := v_0.Args[0] 8561 if v_0_0.Op != OpMIPSMOVWconst { 8562 break 8563 } 8564 if v_0_0.AuxInt != 0 { 8565 break 8566 } 8567 v.reset(OpMIPSMOVWconst) 8568 v.AuxInt = 0 8569 return true 8570 } 8571 // match: (Select1 (MULTU _ (MOVWconst [0]))) 8572 // cond: 8573 // result: (MOVWconst [0]) 8574 for { 8575 v_0 := v.Args[0] 8576 if v_0.Op != OpMIPSMULTU { 8577 break 8578 } 8579 _ = v_0.Args[1] 8580 v_0_1 := v_0.Args[1] 8581 if v_0_1.Op != OpMIPSMOVWconst { 8582 break 8583 } 8584 if v_0_1.AuxInt != 0 { 8585 break 8586 } 8587 v.reset(OpMIPSMOVWconst) 8588 v.AuxInt = 0 8589 return true 8590 } 8591 // match: (Select1 (MULTU (MOVWconst [1]) x)) 8592 // cond: 8593 // result: x 8594 for { 8595 v_0 := v.Args[0] 8596 if v_0.Op != OpMIPSMULTU { 8597 break 8598 } 8599 _ = v_0.Args[1] 8600 v_0_0 := v_0.Args[0] 8601 if v_0_0.Op != OpMIPSMOVWconst { 8602 break 8603 } 8604 if v_0_0.AuxInt != 1 { 8605 break 8606 } 8607 x := v_0.Args[1] 8608 v.reset(OpCopy) 8609 v.Type = x.Type 8610 v.AddArg(x) 8611 return true 8612 } 8613 // match: (Select1 (MULTU x (MOVWconst [1]))) 8614 // cond: 8615 // result: x 8616 for { 8617 v_0 := v.Args[0] 8618 if v_0.Op != OpMIPSMULTU { 8619 break 8620 } 8621 _ = v_0.Args[1] 8622 x := v_0.Args[0] 8623 v_0_1 := v_0.Args[1] 8624 if v_0_1.Op != OpMIPSMOVWconst { 8625 break 8626 } 8627 if v_0_1.AuxInt != 1 { 8628 break 8629 } 8630 v.reset(OpCopy) 8631 v.Type = x.Type 8632 v.AddArg(x) 8633 return true 8634 } 8635 // match: (Select1 (MULTU (MOVWconst [-1]) x)) 8636 // cond: 8637 // result: (NEG <x.Type> x) 8638 for { 8639 v_0 := v.Args[0] 8640 if v_0.Op != OpMIPSMULTU { 8641 break 8642 } 8643 _ = v_0.Args[1] 8644 v_0_0 := v_0.Args[0] 8645 if v_0_0.Op != OpMIPSMOVWconst { 8646 break 8647 } 8648 if v_0_0.AuxInt != -1 { 8649 break 8650 } 8651 x := v_0.Args[1] 8652 v.reset(OpMIPSNEG) 8653 v.Type = x.Type 8654 v.AddArg(x) 8655 return true 8656 } 8657 // match: (Select1 (MULTU x (MOVWconst [-1]))) 8658 // cond: 8659 // result: (NEG <x.Type> x) 8660 for { 8661 v_0 := v.Args[0] 8662 if v_0.Op != OpMIPSMULTU { 8663 break 8664 } 8665 _ = v_0.Args[1] 8666 x := v_0.Args[0] 8667 v_0_1 := v_0.Args[1] 8668 if v_0_1.Op != OpMIPSMOVWconst { 8669 break 8670 } 8671 if v_0_1.AuxInt != -1 { 8672 break 8673 } 8674 v.reset(OpMIPSNEG) 8675 v.Type = x.Type 8676 v.AddArg(x) 8677 return true 8678 } 8679 // match: (Select1 (MULTU (MOVWconst [c]) x)) 8680 // cond: isPowerOfTwo(int64(uint32(c))) 8681 // result: (SLLconst [log2(int64(uint32(c)))] x) 8682 for { 8683 v_0 := v.Args[0] 8684 if v_0.Op != OpMIPSMULTU { 8685 break 8686 } 8687 _ = v_0.Args[1] 8688 v_0_0 := v_0.Args[0] 8689 if v_0_0.Op != OpMIPSMOVWconst { 8690 break 8691 } 8692 c := v_0_0.AuxInt 8693 x := v_0.Args[1] 8694 if !(isPowerOfTwo(int64(uint32(c)))) { 8695 break 8696 } 8697 v.reset(OpMIPSSLLconst) 8698 v.AuxInt = log2(int64(uint32(c))) 8699 v.AddArg(x) 8700 return true 8701 } 8702 // match: (Select1 (MULTU x (MOVWconst [c]))) 8703 // cond: isPowerOfTwo(int64(uint32(c))) 8704 // result: (SLLconst [log2(int64(uint32(c)))] x) 8705 for { 8706 v_0 := v.Args[0] 8707 if v_0.Op != OpMIPSMULTU { 8708 break 8709 } 8710 _ = v_0.Args[1] 8711 x := v_0.Args[0] 8712 v_0_1 := v_0.Args[1] 8713 if v_0_1.Op != OpMIPSMOVWconst { 8714 break 8715 } 8716 c := v_0_1.AuxInt 8717 if !(isPowerOfTwo(int64(uint32(c)))) { 8718 break 8719 } 8720 v.reset(OpMIPSSLLconst) 8721 v.AuxInt = log2(int64(uint32(c))) 8722 v.AddArg(x) 8723 return true 8724 } 8725 return false 8726 } 8727 func rewriteValueMIPS_OpSelect1_10(v *Value) bool { 8728 // match: (Select1 (MULTU (MOVWconst [c]) (MOVWconst [d]))) 8729 // cond: 8730 // result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))]) 8731 for { 8732 v_0 := v.Args[0] 8733 if v_0.Op != OpMIPSMULTU { 8734 break 8735 } 8736 _ = v_0.Args[1] 8737 v_0_0 := v_0.Args[0] 8738 if v_0_0.Op != OpMIPSMOVWconst { 8739 break 8740 } 8741 c := v_0_0.AuxInt 8742 v_0_1 := v_0.Args[1] 8743 if v_0_1.Op != OpMIPSMOVWconst { 8744 break 8745 } 8746 d := v_0_1.AuxInt 8747 v.reset(OpMIPSMOVWconst) 8748 v.AuxInt = int64(int32(uint32(c) * uint32(d))) 8749 return true 8750 } 8751 // match: (Select1 (MULTU (MOVWconst [d]) (MOVWconst [c]))) 8752 // cond: 8753 // result: (MOVWconst [int64(int32(uint32(c)*uint32(d)))]) 8754 for { 8755 v_0 := v.Args[0] 8756 if v_0.Op != OpMIPSMULTU { 8757 break 8758 } 8759 _ = v_0.Args[1] 8760 v_0_0 := v_0.Args[0] 8761 if v_0_0.Op != OpMIPSMOVWconst { 8762 break 8763 } 8764 d := v_0_0.AuxInt 8765 v_0_1 := v_0.Args[1] 8766 if v_0_1.Op != OpMIPSMOVWconst { 8767 break 8768 } 8769 c := v_0_1.AuxInt 8770 v.reset(OpMIPSMOVWconst) 8771 v.AuxInt = int64(int32(uint32(c) * uint32(d))) 8772 return true 8773 } 8774 // match: (Select1 (DIV (MOVWconst [c]) (MOVWconst [d]))) 8775 // cond: 8776 // result: (MOVWconst [int64(int32(c)/int32(d))]) 8777 for { 8778 v_0 := v.Args[0] 8779 if v_0.Op != OpMIPSDIV { 8780 break 8781 } 8782 _ = v_0.Args[1] 8783 v_0_0 := v_0.Args[0] 8784 if v_0_0.Op != OpMIPSMOVWconst { 8785 break 8786 } 8787 c := v_0_0.AuxInt 8788 v_0_1 := v_0.Args[1] 8789 if v_0_1.Op != OpMIPSMOVWconst { 8790 break 8791 } 8792 d := v_0_1.AuxInt 8793 v.reset(OpMIPSMOVWconst) 8794 v.AuxInt = int64(int32(c) / int32(d)) 8795 return true 8796 } 8797 // match: (Select1 (DIVU (MOVWconst [c]) (MOVWconst [d]))) 8798 // cond: 8799 // result: (MOVWconst [int64(int32(uint32(c)/uint32(d)))]) 8800 for { 8801 v_0 := v.Args[0] 8802 if v_0.Op != OpMIPSDIVU { 8803 break 8804 } 8805 _ = v_0.Args[1] 8806 v_0_0 := v_0.Args[0] 8807 if v_0_0.Op != OpMIPSMOVWconst { 8808 break 8809 } 8810 c := v_0_0.AuxInt 8811 v_0_1 := v_0.Args[1] 8812 if v_0_1.Op != OpMIPSMOVWconst { 8813 break 8814 } 8815 d := v_0_1.AuxInt 8816 v.reset(OpMIPSMOVWconst) 8817 v.AuxInt = int64(int32(uint32(c) / uint32(d))) 8818 return true 8819 } 8820 return false 8821 } 8822 func rewriteValueMIPS_OpSignExt16to32_0(v *Value) bool { 8823 // match: (SignExt16to32 x) 8824 // cond: 8825 // result: (MOVHreg x) 8826 for { 8827 x := v.Args[0] 8828 v.reset(OpMIPSMOVHreg) 8829 v.AddArg(x) 8830 return true 8831 } 8832 } 8833 func rewriteValueMIPS_OpSignExt8to16_0(v *Value) bool { 8834 // match: (SignExt8to16 x) 8835 // cond: 8836 // result: (MOVBreg x) 8837 for { 8838 x := v.Args[0] 8839 v.reset(OpMIPSMOVBreg) 8840 v.AddArg(x) 8841 return true 8842 } 8843 } 8844 func rewriteValueMIPS_OpSignExt8to32_0(v *Value) bool { 8845 // match: (SignExt8to32 x) 8846 // cond: 8847 // result: (MOVBreg x) 8848 for { 8849 x := v.Args[0] 8850 v.reset(OpMIPSMOVBreg) 8851 v.AddArg(x) 8852 return true 8853 } 8854 } 8855 func rewriteValueMIPS_OpSignmask_0(v *Value) bool { 8856 // match: (Signmask x) 8857 // cond: 8858 // result: (SRAconst x [31]) 8859 for { 8860 x := v.Args[0] 8861 v.reset(OpMIPSSRAconst) 8862 v.AuxInt = 31 8863 v.AddArg(x) 8864 return true 8865 } 8866 } 8867 func rewriteValueMIPS_OpSlicemask_0(v *Value) bool { 8868 b := v.Block 8869 _ = b 8870 // match: (Slicemask <t> x) 8871 // cond: 8872 // result: (SRAconst (NEG <t> x) [31]) 8873 for { 8874 t := v.Type 8875 x := v.Args[0] 8876 v.reset(OpMIPSSRAconst) 8877 v.AuxInt = 31 8878 v0 := b.NewValue0(v.Pos, OpMIPSNEG, t) 8879 v0.AddArg(x) 8880 v.AddArg(v0) 8881 return true 8882 } 8883 } 8884 func rewriteValueMIPS_OpSqrt_0(v *Value) bool { 8885 // match: (Sqrt x) 8886 // cond: 8887 // result: (SQRTD x) 8888 for { 8889 x := v.Args[0] 8890 v.reset(OpMIPSSQRTD) 8891 v.AddArg(x) 8892 return true 8893 } 8894 } 8895 func rewriteValueMIPS_OpStaticCall_0(v *Value) bool { 8896 // match: (StaticCall [argwid] {target} mem) 8897 // cond: 8898 // result: (CALLstatic [argwid] {target} mem) 8899 for { 8900 argwid := v.AuxInt 8901 target := v.Aux 8902 mem := v.Args[0] 8903 v.reset(OpMIPSCALLstatic) 8904 v.AuxInt = argwid 8905 v.Aux = target 8906 v.AddArg(mem) 8907 return true 8908 } 8909 } 8910 func rewriteValueMIPS_OpStore_0(v *Value) bool { 8911 // match: (Store {t} ptr val mem) 8912 // cond: t.(*types.Type).Size() == 1 8913 // result: (MOVBstore ptr val mem) 8914 for { 8915 t := v.Aux 8916 _ = v.Args[2] 8917 ptr := v.Args[0] 8918 val := v.Args[1] 8919 mem := v.Args[2] 8920 if !(t.(*types.Type).Size() == 1) { 8921 break 8922 } 8923 v.reset(OpMIPSMOVBstore) 8924 v.AddArg(ptr) 8925 v.AddArg(val) 8926 v.AddArg(mem) 8927 return true 8928 } 8929 // match: (Store {t} ptr val mem) 8930 // cond: t.(*types.Type).Size() == 2 8931 // result: (MOVHstore ptr val mem) 8932 for { 8933 t := v.Aux 8934 _ = v.Args[2] 8935 ptr := v.Args[0] 8936 val := v.Args[1] 8937 mem := v.Args[2] 8938 if !(t.(*types.Type).Size() == 2) { 8939 break 8940 } 8941 v.reset(OpMIPSMOVHstore) 8942 v.AddArg(ptr) 8943 v.AddArg(val) 8944 v.AddArg(mem) 8945 return true 8946 } 8947 // match: (Store {t} ptr val mem) 8948 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 8949 // result: (MOVWstore ptr val mem) 8950 for { 8951 t := v.Aux 8952 _ = v.Args[2] 8953 ptr := v.Args[0] 8954 val := v.Args[1] 8955 mem := v.Args[2] 8956 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 8957 break 8958 } 8959 v.reset(OpMIPSMOVWstore) 8960 v.AddArg(ptr) 8961 v.AddArg(val) 8962 v.AddArg(mem) 8963 return true 8964 } 8965 // match: (Store {t} ptr val mem) 8966 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 8967 // result: (MOVFstore ptr val mem) 8968 for { 8969 t := v.Aux 8970 _ = v.Args[2] 8971 ptr := v.Args[0] 8972 val := v.Args[1] 8973 mem := v.Args[2] 8974 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 8975 break 8976 } 8977 v.reset(OpMIPSMOVFstore) 8978 v.AddArg(ptr) 8979 v.AddArg(val) 8980 v.AddArg(mem) 8981 return true 8982 } 8983 // match: (Store {t} ptr val mem) 8984 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 8985 // result: (MOVDstore ptr val mem) 8986 for { 8987 t := v.Aux 8988 _ = v.Args[2] 8989 ptr := v.Args[0] 8990 val := v.Args[1] 8991 mem := v.Args[2] 8992 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 8993 break 8994 } 8995 v.reset(OpMIPSMOVDstore) 8996 v.AddArg(ptr) 8997 v.AddArg(val) 8998 v.AddArg(mem) 8999 return true 9000 } 9001 return false 9002 } 9003 func rewriteValueMIPS_OpSub16_0(v *Value) bool { 9004 // match: (Sub16 x y) 9005 // cond: 9006 // result: (SUB x y) 9007 for { 9008 _ = v.Args[1] 9009 x := v.Args[0] 9010 y := v.Args[1] 9011 v.reset(OpMIPSSUB) 9012 v.AddArg(x) 9013 v.AddArg(y) 9014 return true 9015 } 9016 } 9017 func rewriteValueMIPS_OpSub32_0(v *Value) bool { 9018 // match: (Sub32 x y) 9019 // cond: 9020 // result: (SUB x y) 9021 for { 9022 _ = v.Args[1] 9023 x := v.Args[0] 9024 y := v.Args[1] 9025 v.reset(OpMIPSSUB) 9026 v.AddArg(x) 9027 v.AddArg(y) 9028 return true 9029 } 9030 } 9031 func rewriteValueMIPS_OpSub32F_0(v *Value) bool { 9032 // match: (Sub32F x y) 9033 // cond: 9034 // result: (SUBF x y) 9035 for { 9036 _ = v.Args[1] 9037 x := v.Args[0] 9038 y := v.Args[1] 9039 v.reset(OpMIPSSUBF) 9040 v.AddArg(x) 9041 v.AddArg(y) 9042 return true 9043 } 9044 } 9045 func rewriteValueMIPS_OpSub32withcarry_0(v *Value) bool { 9046 b := v.Block 9047 _ = b 9048 // match: (Sub32withcarry <t> x y c) 9049 // cond: 9050 // result: (SUB (SUB <t> x y) c) 9051 for { 9052 t := v.Type 9053 _ = v.Args[2] 9054 x := v.Args[0] 9055 y := v.Args[1] 9056 c := v.Args[2] 9057 v.reset(OpMIPSSUB) 9058 v0 := b.NewValue0(v.Pos, OpMIPSSUB, t) 9059 v0.AddArg(x) 9060 v0.AddArg(y) 9061 v.AddArg(v0) 9062 v.AddArg(c) 9063 return true 9064 } 9065 } 9066 func rewriteValueMIPS_OpSub64F_0(v *Value) bool { 9067 // match: (Sub64F x y) 9068 // cond: 9069 // result: (SUBD x y) 9070 for { 9071 _ = v.Args[1] 9072 x := v.Args[0] 9073 y := v.Args[1] 9074 v.reset(OpMIPSSUBD) 9075 v.AddArg(x) 9076 v.AddArg(y) 9077 return true 9078 } 9079 } 9080 func rewriteValueMIPS_OpSub8_0(v *Value) bool { 9081 // match: (Sub8 x y) 9082 // cond: 9083 // result: (SUB x y) 9084 for { 9085 _ = v.Args[1] 9086 x := v.Args[0] 9087 y := v.Args[1] 9088 v.reset(OpMIPSSUB) 9089 v.AddArg(x) 9090 v.AddArg(y) 9091 return true 9092 } 9093 } 9094 func rewriteValueMIPS_OpSubPtr_0(v *Value) bool { 9095 // match: (SubPtr x y) 9096 // cond: 9097 // result: (SUB x y) 9098 for { 9099 _ = v.Args[1] 9100 x := v.Args[0] 9101 y := v.Args[1] 9102 v.reset(OpMIPSSUB) 9103 v.AddArg(x) 9104 v.AddArg(y) 9105 return true 9106 } 9107 } 9108 func rewriteValueMIPS_OpTrunc16to8_0(v *Value) bool { 9109 // match: (Trunc16to8 x) 9110 // cond: 9111 // result: x 9112 for { 9113 x := v.Args[0] 9114 v.reset(OpCopy) 9115 v.Type = x.Type 9116 v.AddArg(x) 9117 return true 9118 } 9119 } 9120 func rewriteValueMIPS_OpTrunc32to16_0(v *Value) bool { 9121 // match: (Trunc32to16 x) 9122 // cond: 9123 // result: x 9124 for { 9125 x := v.Args[0] 9126 v.reset(OpCopy) 9127 v.Type = x.Type 9128 v.AddArg(x) 9129 return true 9130 } 9131 } 9132 func rewriteValueMIPS_OpTrunc32to8_0(v *Value) bool { 9133 // match: (Trunc32to8 x) 9134 // cond: 9135 // result: x 9136 for { 9137 x := v.Args[0] 9138 v.reset(OpCopy) 9139 v.Type = x.Type 9140 v.AddArg(x) 9141 return true 9142 } 9143 } 9144 func rewriteValueMIPS_OpWB_0(v *Value) bool { 9145 // match: (WB {fn} destptr srcptr mem) 9146 // cond: 9147 // result: (LoweredWB {fn} destptr srcptr mem) 9148 for { 9149 fn := v.Aux 9150 _ = v.Args[2] 9151 destptr := v.Args[0] 9152 srcptr := v.Args[1] 9153 mem := v.Args[2] 9154 v.reset(OpMIPSLoweredWB) 9155 v.Aux = fn 9156 v.AddArg(destptr) 9157 v.AddArg(srcptr) 9158 v.AddArg(mem) 9159 return true 9160 } 9161 } 9162 func rewriteValueMIPS_OpXor16_0(v *Value) bool { 9163 // match: (Xor16 x y) 9164 // cond: 9165 // result: (XOR x y) 9166 for { 9167 _ = v.Args[1] 9168 x := v.Args[0] 9169 y := v.Args[1] 9170 v.reset(OpMIPSXOR) 9171 v.AddArg(x) 9172 v.AddArg(y) 9173 return true 9174 } 9175 } 9176 func rewriteValueMIPS_OpXor32_0(v *Value) bool { 9177 // match: (Xor32 x y) 9178 // cond: 9179 // result: (XOR x y) 9180 for { 9181 _ = v.Args[1] 9182 x := v.Args[0] 9183 y := v.Args[1] 9184 v.reset(OpMIPSXOR) 9185 v.AddArg(x) 9186 v.AddArg(y) 9187 return true 9188 } 9189 } 9190 func rewriteValueMIPS_OpXor8_0(v *Value) bool { 9191 // match: (Xor8 x y) 9192 // cond: 9193 // result: (XOR x y) 9194 for { 9195 _ = v.Args[1] 9196 x := v.Args[0] 9197 y := v.Args[1] 9198 v.reset(OpMIPSXOR) 9199 v.AddArg(x) 9200 v.AddArg(y) 9201 return true 9202 } 9203 } 9204 func rewriteValueMIPS_OpZero_0(v *Value) bool { 9205 b := v.Block 9206 _ = b 9207 typ := &b.Func.Config.Types 9208 _ = typ 9209 // match: (Zero [0] _ mem) 9210 // cond: 9211 // result: mem 9212 for { 9213 if v.AuxInt != 0 { 9214 break 9215 } 9216 _ = v.Args[1] 9217 mem := v.Args[1] 9218 v.reset(OpCopy) 9219 v.Type = mem.Type 9220 v.AddArg(mem) 9221 return true 9222 } 9223 // match: (Zero [1] ptr mem) 9224 // cond: 9225 // result: (MOVBstore ptr (MOVWconst [0]) mem) 9226 for { 9227 if v.AuxInt != 1 { 9228 break 9229 } 9230 _ = v.Args[1] 9231 ptr := v.Args[0] 9232 mem := v.Args[1] 9233 v.reset(OpMIPSMOVBstore) 9234 v.AddArg(ptr) 9235 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9236 v0.AuxInt = 0 9237 v.AddArg(v0) 9238 v.AddArg(mem) 9239 return true 9240 } 9241 // match: (Zero [2] {t} ptr mem) 9242 // cond: t.(*types.Type).Alignment()%2 == 0 9243 // result: (MOVHstore ptr (MOVWconst [0]) mem) 9244 for { 9245 if v.AuxInt != 2 { 9246 break 9247 } 9248 t := v.Aux 9249 _ = v.Args[1] 9250 ptr := v.Args[0] 9251 mem := v.Args[1] 9252 if !(t.(*types.Type).Alignment()%2 == 0) { 9253 break 9254 } 9255 v.reset(OpMIPSMOVHstore) 9256 v.AddArg(ptr) 9257 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9258 v0.AuxInt = 0 9259 v.AddArg(v0) 9260 v.AddArg(mem) 9261 return true 9262 } 9263 // match: (Zero [2] ptr mem) 9264 // cond: 9265 // result: (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)) 9266 for { 9267 if v.AuxInt != 2 { 9268 break 9269 } 9270 _ = v.Args[1] 9271 ptr := v.Args[0] 9272 mem := v.Args[1] 9273 v.reset(OpMIPSMOVBstore) 9274 v.AuxInt = 1 9275 v.AddArg(ptr) 9276 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9277 v0.AuxInt = 0 9278 v.AddArg(v0) 9279 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 9280 v1.AuxInt = 0 9281 v1.AddArg(ptr) 9282 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9283 v2.AuxInt = 0 9284 v1.AddArg(v2) 9285 v1.AddArg(mem) 9286 v.AddArg(v1) 9287 return true 9288 } 9289 // match: (Zero [4] {t} ptr mem) 9290 // cond: t.(*types.Type).Alignment()%4 == 0 9291 // result: (MOVWstore ptr (MOVWconst [0]) mem) 9292 for { 9293 if v.AuxInt != 4 { 9294 break 9295 } 9296 t := v.Aux 9297 _ = v.Args[1] 9298 ptr := v.Args[0] 9299 mem := v.Args[1] 9300 if !(t.(*types.Type).Alignment()%4 == 0) { 9301 break 9302 } 9303 v.reset(OpMIPSMOVWstore) 9304 v.AddArg(ptr) 9305 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9306 v0.AuxInt = 0 9307 v.AddArg(v0) 9308 v.AddArg(mem) 9309 return true 9310 } 9311 // match: (Zero [4] {t} ptr mem) 9312 // cond: t.(*types.Type).Alignment()%2 == 0 9313 // result: (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem)) 9314 for { 9315 if v.AuxInt != 4 { 9316 break 9317 } 9318 t := v.Aux 9319 _ = v.Args[1] 9320 ptr := v.Args[0] 9321 mem := v.Args[1] 9322 if !(t.(*types.Type).Alignment()%2 == 0) { 9323 break 9324 } 9325 v.reset(OpMIPSMOVHstore) 9326 v.AuxInt = 2 9327 v.AddArg(ptr) 9328 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9329 v0.AuxInt = 0 9330 v.AddArg(v0) 9331 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 9332 v1.AuxInt = 0 9333 v1.AddArg(ptr) 9334 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9335 v2.AuxInt = 0 9336 v1.AddArg(v2) 9337 v1.AddArg(mem) 9338 v.AddArg(v1) 9339 return true 9340 } 9341 // match: (Zero [4] ptr mem) 9342 // cond: 9343 // result: (MOVBstore [3] ptr (MOVWconst [0]) (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem)))) 9344 for { 9345 if v.AuxInt != 4 { 9346 break 9347 } 9348 _ = v.Args[1] 9349 ptr := v.Args[0] 9350 mem := v.Args[1] 9351 v.reset(OpMIPSMOVBstore) 9352 v.AuxInt = 3 9353 v.AddArg(ptr) 9354 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9355 v0.AuxInt = 0 9356 v.AddArg(v0) 9357 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 9358 v1.AuxInt = 2 9359 v1.AddArg(ptr) 9360 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9361 v2.AuxInt = 0 9362 v1.AddArg(v2) 9363 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 9364 v3.AuxInt = 1 9365 v3.AddArg(ptr) 9366 v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9367 v4.AuxInt = 0 9368 v3.AddArg(v4) 9369 v5 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 9370 v5.AuxInt = 0 9371 v5.AddArg(ptr) 9372 v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9373 v6.AuxInt = 0 9374 v5.AddArg(v6) 9375 v5.AddArg(mem) 9376 v3.AddArg(v5) 9377 v1.AddArg(v3) 9378 v.AddArg(v1) 9379 return true 9380 } 9381 // match: (Zero [3] ptr mem) 9382 // cond: 9383 // result: (MOVBstore [2] ptr (MOVWconst [0]) (MOVBstore [1] ptr (MOVWconst [0]) (MOVBstore [0] ptr (MOVWconst [0]) mem))) 9384 for { 9385 if v.AuxInt != 3 { 9386 break 9387 } 9388 _ = v.Args[1] 9389 ptr := v.Args[0] 9390 mem := v.Args[1] 9391 v.reset(OpMIPSMOVBstore) 9392 v.AuxInt = 2 9393 v.AddArg(ptr) 9394 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9395 v0.AuxInt = 0 9396 v.AddArg(v0) 9397 v1 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 9398 v1.AuxInt = 1 9399 v1.AddArg(ptr) 9400 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9401 v2.AuxInt = 0 9402 v1.AddArg(v2) 9403 v3 := b.NewValue0(v.Pos, OpMIPSMOVBstore, types.TypeMem) 9404 v3.AuxInt = 0 9405 v3.AddArg(ptr) 9406 v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9407 v4.AuxInt = 0 9408 v3.AddArg(v4) 9409 v3.AddArg(mem) 9410 v1.AddArg(v3) 9411 v.AddArg(v1) 9412 return true 9413 } 9414 // match: (Zero [6] {t} ptr mem) 9415 // cond: t.(*types.Type).Alignment()%2 == 0 9416 // result: (MOVHstore [4] ptr (MOVWconst [0]) (MOVHstore [2] ptr (MOVWconst [0]) (MOVHstore [0] ptr (MOVWconst [0]) mem))) 9417 for { 9418 if v.AuxInt != 6 { 9419 break 9420 } 9421 t := v.Aux 9422 _ = v.Args[1] 9423 ptr := v.Args[0] 9424 mem := v.Args[1] 9425 if !(t.(*types.Type).Alignment()%2 == 0) { 9426 break 9427 } 9428 v.reset(OpMIPSMOVHstore) 9429 v.AuxInt = 4 9430 v.AddArg(ptr) 9431 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9432 v0.AuxInt = 0 9433 v.AddArg(v0) 9434 v1 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 9435 v1.AuxInt = 2 9436 v1.AddArg(ptr) 9437 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9438 v2.AuxInt = 0 9439 v1.AddArg(v2) 9440 v3 := b.NewValue0(v.Pos, OpMIPSMOVHstore, types.TypeMem) 9441 v3.AuxInt = 0 9442 v3.AddArg(ptr) 9443 v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9444 v4.AuxInt = 0 9445 v3.AddArg(v4) 9446 v3.AddArg(mem) 9447 v1.AddArg(v3) 9448 v.AddArg(v1) 9449 return true 9450 } 9451 // match: (Zero [8] {t} ptr mem) 9452 // cond: t.(*types.Type).Alignment()%4 == 0 9453 // result: (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)) 9454 for { 9455 if v.AuxInt != 8 { 9456 break 9457 } 9458 t := v.Aux 9459 _ = v.Args[1] 9460 ptr := v.Args[0] 9461 mem := v.Args[1] 9462 if !(t.(*types.Type).Alignment()%4 == 0) { 9463 break 9464 } 9465 v.reset(OpMIPSMOVWstore) 9466 v.AuxInt = 4 9467 v.AddArg(ptr) 9468 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9469 v0.AuxInt = 0 9470 v.AddArg(v0) 9471 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 9472 v1.AuxInt = 0 9473 v1.AddArg(ptr) 9474 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9475 v2.AuxInt = 0 9476 v1.AddArg(v2) 9477 v1.AddArg(mem) 9478 v.AddArg(v1) 9479 return true 9480 } 9481 return false 9482 } 9483 func rewriteValueMIPS_OpZero_10(v *Value) bool { 9484 b := v.Block 9485 _ = b 9486 config := b.Func.Config 9487 _ = config 9488 typ := &b.Func.Config.Types 9489 _ = typ 9490 // match: (Zero [12] {t} ptr mem) 9491 // cond: t.(*types.Type).Alignment()%4 == 0 9492 // result: (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem))) 9493 for { 9494 if v.AuxInt != 12 { 9495 break 9496 } 9497 t := v.Aux 9498 _ = v.Args[1] 9499 ptr := v.Args[0] 9500 mem := v.Args[1] 9501 if !(t.(*types.Type).Alignment()%4 == 0) { 9502 break 9503 } 9504 v.reset(OpMIPSMOVWstore) 9505 v.AuxInt = 8 9506 v.AddArg(ptr) 9507 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9508 v0.AuxInt = 0 9509 v.AddArg(v0) 9510 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 9511 v1.AuxInt = 4 9512 v1.AddArg(ptr) 9513 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9514 v2.AuxInt = 0 9515 v1.AddArg(v2) 9516 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 9517 v3.AuxInt = 0 9518 v3.AddArg(ptr) 9519 v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9520 v4.AuxInt = 0 9521 v3.AddArg(v4) 9522 v3.AddArg(mem) 9523 v1.AddArg(v3) 9524 v.AddArg(v1) 9525 return true 9526 } 9527 // match: (Zero [16] {t} ptr mem) 9528 // cond: t.(*types.Type).Alignment()%4 == 0 9529 // result: (MOVWstore [12] ptr (MOVWconst [0]) (MOVWstore [8] ptr (MOVWconst [0]) (MOVWstore [4] ptr (MOVWconst [0]) (MOVWstore [0] ptr (MOVWconst [0]) mem)))) 9530 for { 9531 if v.AuxInt != 16 { 9532 break 9533 } 9534 t := v.Aux 9535 _ = v.Args[1] 9536 ptr := v.Args[0] 9537 mem := v.Args[1] 9538 if !(t.(*types.Type).Alignment()%4 == 0) { 9539 break 9540 } 9541 v.reset(OpMIPSMOVWstore) 9542 v.AuxInt = 12 9543 v.AddArg(ptr) 9544 v0 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9545 v0.AuxInt = 0 9546 v.AddArg(v0) 9547 v1 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 9548 v1.AuxInt = 8 9549 v1.AddArg(ptr) 9550 v2 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9551 v2.AuxInt = 0 9552 v1.AddArg(v2) 9553 v3 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 9554 v3.AuxInt = 4 9555 v3.AddArg(ptr) 9556 v4 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9557 v4.AuxInt = 0 9558 v3.AddArg(v4) 9559 v5 := b.NewValue0(v.Pos, OpMIPSMOVWstore, types.TypeMem) 9560 v5.AuxInt = 0 9561 v5.AddArg(ptr) 9562 v6 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9563 v6.AuxInt = 0 9564 v5.AddArg(v6) 9565 v5.AddArg(mem) 9566 v3.AddArg(v5) 9567 v1.AddArg(v3) 9568 v.AddArg(v1) 9569 return true 9570 } 9571 // match: (Zero [s] {t} ptr mem) 9572 // cond: (s > 16 || t.(*types.Type).Alignment()%4 != 0) 9573 // result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 9574 for { 9575 s := v.AuxInt 9576 t := v.Aux 9577 _ = v.Args[1] 9578 ptr := v.Args[0] 9579 mem := v.Args[1] 9580 if !(s > 16 || t.(*types.Type).Alignment()%4 != 0) { 9581 break 9582 } 9583 v.reset(OpMIPSLoweredZero) 9584 v.AuxInt = t.(*types.Type).Alignment() 9585 v.AddArg(ptr) 9586 v0 := b.NewValue0(v.Pos, OpMIPSADDconst, ptr.Type) 9587 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 9588 v0.AddArg(ptr) 9589 v.AddArg(v0) 9590 v.AddArg(mem) 9591 return true 9592 } 9593 return false 9594 } 9595 func rewriteValueMIPS_OpZeroExt16to32_0(v *Value) bool { 9596 // match: (ZeroExt16to32 x) 9597 // cond: 9598 // result: (MOVHUreg x) 9599 for { 9600 x := v.Args[0] 9601 v.reset(OpMIPSMOVHUreg) 9602 v.AddArg(x) 9603 return true 9604 } 9605 } 9606 func rewriteValueMIPS_OpZeroExt8to16_0(v *Value) bool { 9607 // match: (ZeroExt8to16 x) 9608 // cond: 9609 // result: (MOVBUreg x) 9610 for { 9611 x := v.Args[0] 9612 v.reset(OpMIPSMOVBUreg) 9613 v.AddArg(x) 9614 return true 9615 } 9616 } 9617 func rewriteValueMIPS_OpZeroExt8to32_0(v *Value) bool { 9618 // match: (ZeroExt8to32 x) 9619 // cond: 9620 // result: (MOVBUreg x) 9621 for { 9622 x := v.Args[0] 9623 v.reset(OpMIPSMOVBUreg) 9624 v.AddArg(x) 9625 return true 9626 } 9627 } 9628 func rewriteValueMIPS_OpZeromask_0(v *Value) bool { 9629 b := v.Block 9630 _ = b 9631 typ := &b.Func.Config.Types 9632 _ = typ 9633 // match: (Zeromask x) 9634 // cond: 9635 // result: (NEG (SGTU x (MOVWconst [0]))) 9636 for { 9637 x := v.Args[0] 9638 v.reset(OpMIPSNEG) 9639 v0 := b.NewValue0(v.Pos, OpMIPSSGTU, typ.Bool) 9640 v0.AddArg(x) 9641 v1 := b.NewValue0(v.Pos, OpMIPSMOVWconst, typ.UInt32) 9642 v1.AuxInt = 0 9643 v0.AddArg(v1) 9644 v.AddArg(v0) 9645 return true 9646 } 9647 } 9648 func rewriteBlockMIPS(b *Block) bool { 9649 config := b.Func.Config 9650 _ = config 9651 fe := b.Func.fe 9652 _ = fe 9653 typ := &config.Types 9654 _ = typ 9655 switch b.Kind { 9656 case BlockMIPSEQ: 9657 // match: (EQ (FPFlagTrue cmp) yes no) 9658 // cond: 9659 // result: (FPF cmp yes no) 9660 for { 9661 v := b.Control 9662 if v.Op != OpMIPSFPFlagTrue { 9663 break 9664 } 9665 cmp := v.Args[0] 9666 b.Kind = BlockMIPSFPF 9667 b.SetControl(cmp) 9668 b.Aux = nil 9669 return true 9670 } 9671 // match: (EQ (FPFlagFalse cmp) yes no) 9672 // cond: 9673 // result: (FPT cmp yes no) 9674 for { 9675 v := b.Control 9676 if v.Op != OpMIPSFPFlagFalse { 9677 break 9678 } 9679 cmp := v.Args[0] 9680 b.Kind = BlockMIPSFPT 9681 b.SetControl(cmp) 9682 b.Aux = nil 9683 return true 9684 } 9685 // match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no) 9686 // cond: 9687 // result: (NE cmp yes no) 9688 for { 9689 v := b.Control 9690 if v.Op != OpMIPSXORconst { 9691 break 9692 } 9693 if v.AuxInt != 1 { 9694 break 9695 } 9696 cmp := v.Args[0] 9697 if cmp.Op != OpMIPSSGT { 9698 break 9699 } 9700 _ = cmp.Args[1] 9701 b.Kind = BlockMIPSNE 9702 b.SetControl(cmp) 9703 b.Aux = nil 9704 return true 9705 } 9706 // match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no) 9707 // cond: 9708 // result: (NE cmp yes no) 9709 for { 9710 v := b.Control 9711 if v.Op != OpMIPSXORconst { 9712 break 9713 } 9714 if v.AuxInt != 1 { 9715 break 9716 } 9717 cmp := v.Args[0] 9718 if cmp.Op != OpMIPSSGTU { 9719 break 9720 } 9721 _ = cmp.Args[1] 9722 b.Kind = BlockMIPSNE 9723 b.SetControl(cmp) 9724 b.Aux = nil 9725 return true 9726 } 9727 // match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no) 9728 // cond: 9729 // result: (NE cmp yes no) 9730 for { 9731 v := b.Control 9732 if v.Op != OpMIPSXORconst { 9733 break 9734 } 9735 if v.AuxInt != 1 { 9736 break 9737 } 9738 cmp := v.Args[0] 9739 if cmp.Op != OpMIPSSGTconst { 9740 break 9741 } 9742 b.Kind = BlockMIPSNE 9743 b.SetControl(cmp) 9744 b.Aux = nil 9745 return true 9746 } 9747 // match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no) 9748 // cond: 9749 // result: (NE cmp yes no) 9750 for { 9751 v := b.Control 9752 if v.Op != OpMIPSXORconst { 9753 break 9754 } 9755 if v.AuxInt != 1 { 9756 break 9757 } 9758 cmp := v.Args[0] 9759 if cmp.Op != OpMIPSSGTUconst { 9760 break 9761 } 9762 b.Kind = BlockMIPSNE 9763 b.SetControl(cmp) 9764 b.Aux = nil 9765 return true 9766 } 9767 // match: (EQ (XORconst [1] cmp:(SGTzero _)) yes no) 9768 // cond: 9769 // result: (NE cmp yes no) 9770 for { 9771 v := b.Control 9772 if v.Op != OpMIPSXORconst { 9773 break 9774 } 9775 if v.AuxInt != 1 { 9776 break 9777 } 9778 cmp := v.Args[0] 9779 if cmp.Op != OpMIPSSGTzero { 9780 break 9781 } 9782 b.Kind = BlockMIPSNE 9783 b.SetControl(cmp) 9784 b.Aux = nil 9785 return true 9786 } 9787 // match: (EQ (XORconst [1] cmp:(SGTUzero _)) yes no) 9788 // cond: 9789 // result: (NE cmp yes no) 9790 for { 9791 v := b.Control 9792 if v.Op != OpMIPSXORconst { 9793 break 9794 } 9795 if v.AuxInt != 1 { 9796 break 9797 } 9798 cmp := v.Args[0] 9799 if cmp.Op != OpMIPSSGTUzero { 9800 break 9801 } 9802 b.Kind = BlockMIPSNE 9803 b.SetControl(cmp) 9804 b.Aux = nil 9805 return true 9806 } 9807 // match: (EQ (SGTUconst [1] x) yes no) 9808 // cond: 9809 // result: (NE x yes no) 9810 for { 9811 v := b.Control 9812 if v.Op != OpMIPSSGTUconst { 9813 break 9814 } 9815 if v.AuxInt != 1 { 9816 break 9817 } 9818 x := v.Args[0] 9819 b.Kind = BlockMIPSNE 9820 b.SetControl(x) 9821 b.Aux = nil 9822 return true 9823 } 9824 // match: (EQ (SGTUzero x) yes no) 9825 // cond: 9826 // result: (EQ x yes no) 9827 for { 9828 v := b.Control 9829 if v.Op != OpMIPSSGTUzero { 9830 break 9831 } 9832 x := v.Args[0] 9833 b.Kind = BlockMIPSEQ 9834 b.SetControl(x) 9835 b.Aux = nil 9836 return true 9837 } 9838 // match: (EQ (SGTconst [0] x) yes no) 9839 // cond: 9840 // result: (GEZ x yes no) 9841 for { 9842 v := b.Control 9843 if v.Op != OpMIPSSGTconst { 9844 break 9845 } 9846 if v.AuxInt != 0 { 9847 break 9848 } 9849 x := v.Args[0] 9850 b.Kind = BlockMIPSGEZ 9851 b.SetControl(x) 9852 b.Aux = nil 9853 return true 9854 } 9855 // match: (EQ (SGTzero x) yes no) 9856 // cond: 9857 // result: (LEZ x yes no) 9858 for { 9859 v := b.Control 9860 if v.Op != OpMIPSSGTzero { 9861 break 9862 } 9863 x := v.Args[0] 9864 b.Kind = BlockMIPSLEZ 9865 b.SetControl(x) 9866 b.Aux = nil 9867 return true 9868 } 9869 // match: (EQ (MOVWconst [0]) yes no) 9870 // cond: 9871 // result: (First nil yes no) 9872 for { 9873 v := b.Control 9874 if v.Op != OpMIPSMOVWconst { 9875 break 9876 } 9877 if v.AuxInt != 0 { 9878 break 9879 } 9880 b.Kind = BlockFirst 9881 b.SetControl(nil) 9882 b.Aux = nil 9883 return true 9884 } 9885 // match: (EQ (MOVWconst [c]) yes no) 9886 // cond: c != 0 9887 // result: (First nil no yes) 9888 for { 9889 v := b.Control 9890 if v.Op != OpMIPSMOVWconst { 9891 break 9892 } 9893 c := v.AuxInt 9894 if !(c != 0) { 9895 break 9896 } 9897 b.Kind = BlockFirst 9898 b.SetControl(nil) 9899 b.Aux = nil 9900 b.swapSuccessors() 9901 return true 9902 } 9903 case BlockMIPSGEZ: 9904 // match: (GEZ (MOVWconst [c]) yes no) 9905 // cond: int32(c) >= 0 9906 // result: (First nil yes no) 9907 for { 9908 v := b.Control 9909 if v.Op != OpMIPSMOVWconst { 9910 break 9911 } 9912 c := v.AuxInt 9913 if !(int32(c) >= 0) { 9914 break 9915 } 9916 b.Kind = BlockFirst 9917 b.SetControl(nil) 9918 b.Aux = nil 9919 return true 9920 } 9921 // match: (GEZ (MOVWconst [c]) yes no) 9922 // cond: int32(c) < 0 9923 // result: (First nil no yes) 9924 for { 9925 v := b.Control 9926 if v.Op != OpMIPSMOVWconst { 9927 break 9928 } 9929 c := v.AuxInt 9930 if !(int32(c) < 0) { 9931 break 9932 } 9933 b.Kind = BlockFirst 9934 b.SetControl(nil) 9935 b.Aux = nil 9936 b.swapSuccessors() 9937 return true 9938 } 9939 case BlockMIPSGTZ: 9940 // match: (GTZ (MOVWconst [c]) yes no) 9941 // cond: int32(c) > 0 9942 // result: (First nil yes no) 9943 for { 9944 v := b.Control 9945 if v.Op != OpMIPSMOVWconst { 9946 break 9947 } 9948 c := v.AuxInt 9949 if !(int32(c) > 0) { 9950 break 9951 } 9952 b.Kind = BlockFirst 9953 b.SetControl(nil) 9954 b.Aux = nil 9955 return true 9956 } 9957 // match: (GTZ (MOVWconst [c]) yes no) 9958 // cond: int32(c) <= 0 9959 // result: (First nil no yes) 9960 for { 9961 v := b.Control 9962 if v.Op != OpMIPSMOVWconst { 9963 break 9964 } 9965 c := v.AuxInt 9966 if !(int32(c) <= 0) { 9967 break 9968 } 9969 b.Kind = BlockFirst 9970 b.SetControl(nil) 9971 b.Aux = nil 9972 b.swapSuccessors() 9973 return true 9974 } 9975 case BlockIf: 9976 // match: (If cond yes no) 9977 // cond: 9978 // result: (NE cond yes no) 9979 for { 9980 v := b.Control 9981 _ = v 9982 cond := b.Control 9983 b.Kind = BlockMIPSNE 9984 b.SetControl(cond) 9985 b.Aux = nil 9986 return true 9987 } 9988 case BlockMIPSLEZ: 9989 // match: (LEZ (MOVWconst [c]) yes no) 9990 // cond: int32(c) <= 0 9991 // result: (First nil yes no) 9992 for { 9993 v := b.Control 9994 if v.Op != OpMIPSMOVWconst { 9995 break 9996 } 9997 c := v.AuxInt 9998 if !(int32(c) <= 0) { 9999 break 10000 } 10001 b.Kind = BlockFirst 10002 b.SetControl(nil) 10003 b.Aux = nil 10004 return true 10005 } 10006 // match: (LEZ (MOVWconst [c]) yes no) 10007 // cond: int32(c) > 0 10008 // result: (First nil no yes) 10009 for { 10010 v := b.Control 10011 if v.Op != OpMIPSMOVWconst { 10012 break 10013 } 10014 c := v.AuxInt 10015 if !(int32(c) > 0) { 10016 break 10017 } 10018 b.Kind = BlockFirst 10019 b.SetControl(nil) 10020 b.Aux = nil 10021 b.swapSuccessors() 10022 return true 10023 } 10024 case BlockMIPSLTZ: 10025 // match: (LTZ (MOVWconst [c]) yes no) 10026 // cond: int32(c) < 0 10027 // result: (First nil yes no) 10028 for { 10029 v := b.Control 10030 if v.Op != OpMIPSMOVWconst { 10031 break 10032 } 10033 c := v.AuxInt 10034 if !(int32(c) < 0) { 10035 break 10036 } 10037 b.Kind = BlockFirst 10038 b.SetControl(nil) 10039 b.Aux = nil 10040 return true 10041 } 10042 // match: (LTZ (MOVWconst [c]) yes no) 10043 // cond: int32(c) >= 0 10044 // result: (First nil no yes) 10045 for { 10046 v := b.Control 10047 if v.Op != OpMIPSMOVWconst { 10048 break 10049 } 10050 c := v.AuxInt 10051 if !(int32(c) >= 0) { 10052 break 10053 } 10054 b.Kind = BlockFirst 10055 b.SetControl(nil) 10056 b.Aux = nil 10057 b.swapSuccessors() 10058 return true 10059 } 10060 case BlockMIPSNE: 10061 // match: (NE (FPFlagTrue cmp) yes no) 10062 // cond: 10063 // result: (FPT cmp yes no) 10064 for { 10065 v := b.Control 10066 if v.Op != OpMIPSFPFlagTrue { 10067 break 10068 } 10069 cmp := v.Args[0] 10070 b.Kind = BlockMIPSFPT 10071 b.SetControl(cmp) 10072 b.Aux = nil 10073 return true 10074 } 10075 // match: (NE (FPFlagFalse cmp) yes no) 10076 // cond: 10077 // result: (FPF cmp yes no) 10078 for { 10079 v := b.Control 10080 if v.Op != OpMIPSFPFlagFalse { 10081 break 10082 } 10083 cmp := v.Args[0] 10084 b.Kind = BlockMIPSFPF 10085 b.SetControl(cmp) 10086 b.Aux = nil 10087 return true 10088 } 10089 // match: (NE (XORconst [1] cmp:(SGT _ _)) yes no) 10090 // cond: 10091 // result: (EQ cmp yes no) 10092 for { 10093 v := b.Control 10094 if v.Op != OpMIPSXORconst { 10095 break 10096 } 10097 if v.AuxInt != 1 { 10098 break 10099 } 10100 cmp := v.Args[0] 10101 if cmp.Op != OpMIPSSGT { 10102 break 10103 } 10104 _ = cmp.Args[1] 10105 b.Kind = BlockMIPSEQ 10106 b.SetControl(cmp) 10107 b.Aux = nil 10108 return true 10109 } 10110 // match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no) 10111 // cond: 10112 // result: (EQ cmp yes no) 10113 for { 10114 v := b.Control 10115 if v.Op != OpMIPSXORconst { 10116 break 10117 } 10118 if v.AuxInt != 1 { 10119 break 10120 } 10121 cmp := v.Args[0] 10122 if cmp.Op != OpMIPSSGTU { 10123 break 10124 } 10125 _ = cmp.Args[1] 10126 b.Kind = BlockMIPSEQ 10127 b.SetControl(cmp) 10128 b.Aux = nil 10129 return true 10130 } 10131 // match: (NE (XORconst [1] cmp:(SGTconst _)) yes no) 10132 // cond: 10133 // result: (EQ cmp yes no) 10134 for { 10135 v := b.Control 10136 if v.Op != OpMIPSXORconst { 10137 break 10138 } 10139 if v.AuxInt != 1 { 10140 break 10141 } 10142 cmp := v.Args[0] 10143 if cmp.Op != OpMIPSSGTconst { 10144 break 10145 } 10146 b.Kind = BlockMIPSEQ 10147 b.SetControl(cmp) 10148 b.Aux = nil 10149 return true 10150 } 10151 // match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no) 10152 // cond: 10153 // result: (EQ cmp yes no) 10154 for { 10155 v := b.Control 10156 if v.Op != OpMIPSXORconst { 10157 break 10158 } 10159 if v.AuxInt != 1 { 10160 break 10161 } 10162 cmp := v.Args[0] 10163 if cmp.Op != OpMIPSSGTUconst { 10164 break 10165 } 10166 b.Kind = BlockMIPSEQ 10167 b.SetControl(cmp) 10168 b.Aux = nil 10169 return true 10170 } 10171 // match: (NE (XORconst [1] cmp:(SGTzero _)) yes no) 10172 // cond: 10173 // result: (EQ cmp yes no) 10174 for { 10175 v := b.Control 10176 if v.Op != OpMIPSXORconst { 10177 break 10178 } 10179 if v.AuxInt != 1 { 10180 break 10181 } 10182 cmp := v.Args[0] 10183 if cmp.Op != OpMIPSSGTzero { 10184 break 10185 } 10186 b.Kind = BlockMIPSEQ 10187 b.SetControl(cmp) 10188 b.Aux = nil 10189 return true 10190 } 10191 // match: (NE (XORconst [1] cmp:(SGTUzero _)) yes no) 10192 // cond: 10193 // result: (EQ cmp yes no) 10194 for { 10195 v := b.Control 10196 if v.Op != OpMIPSXORconst { 10197 break 10198 } 10199 if v.AuxInt != 1 { 10200 break 10201 } 10202 cmp := v.Args[0] 10203 if cmp.Op != OpMIPSSGTUzero { 10204 break 10205 } 10206 b.Kind = BlockMIPSEQ 10207 b.SetControl(cmp) 10208 b.Aux = nil 10209 return true 10210 } 10211 // match: (NE (SGTUconst [1] x) yes no) 10212 // cond: 10213 // result: (EQ x yes no) 10214 for { 10215 v := b.Control 10216 if v.Op != OpMIPSSGTUconst { 10217 break 10218 } 10219 if v.AuxInt != 1 { 10220 break 10221 } 10222 x := v.Args[0] 10223 b.Kind = BlockMIPSEQ 10224 b.SetControl(x) 10225 b.Aux = nil 10226 return true 10227 } 10228 // match: (NE (SGTUzero x) yes no) 10229 // cond: 10230 // result: (NE x yes no) 10231 for { 10232 v := b.Control 10233 if v.Op != OpMIPSSGTUzero { 10234 break 10235 } 10236 x := v.Args[0] 10237 b.Kind = BlockMIPSNE 10238 b.SetControl(x) 10239 b.Aux = nil 10240 return true 10241 } 10242 // match: (NE (SGTconst [0] x) yes no) 10243 // cond: 10244 // result: (LTZ x yes no) 10245 for { 10246 v := b.Control 10247 if v.Op != OpMIPSSGTconst { 10248 break 10249 } 10250 if v.AuxInt != 0 { 10251 break 10252 } 10253 x := v.Args[0] 10254 b.Kind = BlockMIPSLTZ 10255 b.SetControl(x) 10256 b.Aux = nil 10257 return true 10258 } 10259 // match: (NE (SGTzero x) yes no) 10260 // cond: 10261 // result: (GTZ x yes no) 10262 for { 10263 v := b.Control 10264 if v.Op != OpMIPSSGTzero { 10265 break 10266 } 10267 x := v.Args[0] 10268 b.Kind = BlockMIPSGTZ 10269 b.SetControl(x) 10270 b.Aux = nil 10271 return true 10272 } 10273 // match: (NE (MOVWconst [0]) yes no) 10274 // cond: 10275 // result: (First nil no yes) 10276 for { 10277 v := b.Control 10278 if v.Op != OpMIPSMOVWconst { 10279 break 10280 } 10281 if v.AuxInt != 0 { 10282 break 10283 } 10284 b.Kind = BlockFirst 10285 b.SetControl(nil) 10286 b.Aux = nil 10287 b.swapSuccessors() 10288 return true 10289 } 10290 // match: (NE (MOVWconst [c]) yes no) 10291 // cond: c != 0 10292 // result: (First nil yes no) 10293 for { 10294 v := b.Control 10295 if v.Op != OpMIPSMOVWconst { 10296 break 10297 } 10298 c := v.AuxInt 10299 if !(c != 0) { 10300 break 10301 } 10302 b.Kind = BlockFirst 10303 b.SetControl(nil) 10304 b.Aux = nil 10305 return true 10306 } 10307 } 10308 return false 10309 }