github.com/sanprasirt/go@v0.0.0-20170607001320-a027466e4b6d/src/cmd/compile/internal/ssa/rewriteMIPS64.go (about) 1 // Code generated from gen/MIPS64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 import "cmd/internal/obj" 8 import "cmd/internal/objabi" 9 import "cmd/compile/internal/types" 10 11 var _ = math.MinInt8 // in case not otherwise used 12 var _ = obj.ANOP // in case not otherwise used 13 var _ = objabi.GOROOT // in case not otherwise used 14 var _ = types.TypeMem // in case not otherwise used 15 16 func rewriteValueMIPS64(v *Value) bool { 17 switch v.Op { 18 case OpAdd16: 19 return rewriteValueMIPS64_OpAdd16_0(v) 20 case OpAdd32: 21 return rewriteValueMIPS64_OpAdd32_0(v) 22 case OpAdd32F: 23 return rewriteValueMIPS64_OpAdd32F_0(v) 24 case OpAdd64: 25 return rewriteValueMIPS64_OpAdd64_0(v) 26 case OpAdd64F: 27 return rewriteValueMIPS64_OpAdd64F_0(v) 28 case OpAdd8: 29 return rewriteValueMIPS64_OpAdd8_0(v) 30 case OpAddPtr: 31 return rewriteValueMIPS64_OpAddPtr_0(v) 32 case OpAddr: 33 return rewriteValueMIPS64_OpAddr_0(v) 34 case OpAnd16: 35 return rewriteValueMIPS64_OpAnd16_0(v) 36 case OpAnd32: 37 return rewriteValueMIPS64_OpAnd32_0(v) 38 case OpAnd64: 39 return rewriteValueMIPS64_OpAnd64_0(v) 40 case OpAnd8: 41 return rewriteValueMIPS64_OpAnd8_0(v) 42 case OpAndB: 43 return rewriteValueMIPS64_OpAndB_0(v) 44 case OpAvg64u: 45 return rewriteValueMIPS64_OpAvg64u_0(v) 46 case OpClosureCall: 47 return rewriteValueMIPS64_OpClosureCall_0(v) 48 case OpCom16: 49 return rewriteValueMIPS64_OpCom16_0(v) 50 case OpCom32: 51 return rewriteValueMIPS64_OpCom32_0(v) 52 case OpCom64: 53 return rewriteValueMIPS64_OpCom64_0(v) 54 case OpCom8: 55 return rewriteValueMIPS64_OpCom8_0(v) 56 case OpConst16: 57 return rewriteValueMIPS64_OpConst16_0(v) 58 case OpConst32: 59 return rewriteValueMIPS64_OpConst32_0(v) 60 case OpConst32F: 61 return rewriteValueMIPS64_OpConst32F_0(v) 62 case OpConst64: 63 return rewriteValueMIPS64_OpConst64_0(v) 64 case OpConst64F: 65 return rewriteValueMIPS64_OpConst64F_0(v) 66 case OpConst8: 67 return rewriteValueMIPS64_OpConst8_0(v) 68 case OpConstBool: 69 return rewriteValueMIPS64_OpConstBool_0(v) 70 case OpConstNil: 71 return rewriteValueMIPS64_OpConstNil_0(v) 72 case OpConvert: 73 return rewriteValueMIPS64_OpConvert_0(v) 74 case OpCvt32Fto32: 75 return rewriteValueMIPS64_OpCvt32Fto32_0(v) 76 case OpCvt32Fto64: 77 return rewriteValueMIPS64_OpCvt32Fto64_0(v) 78 case OpCvt32Fto64F: 79 return rewriteValueMIPS64_OpCvt32Fto64F_0(v) 80 case OpCvt32to32F: 81 return rewriteValueMIPS64_OpCvt32to32F_0(v) 82 case OpCvt32to64F: 83 return rewriteValueMIPS64_OpCvt32to64F_0(v) 84 case OpCvt64Fto32: 85 return rewriteValueMIPS64_OpCvt64Fto32_0(v) 86 case OpCvt64Fto32F: 87 return rewriteValueMIPS64_OpCvt64Fto32F_0(v) 88 case OpCvt64Fto64: 89 return rewriteValueMIPS64_OpCvt64Fto64_0(v) 90 case OpCvt64to32F: 91 return rewriteValueMIPS64_OpCvt64to32F_0(v) 92 case OpCvt64to64F: 93 return rewriteValueMIPS64_OpCvt64to64F_0(v) 94 case OpDiv16: 95 return rewriteValueMIPS64_OpDiv16_0(v) 96 case OpDiv16u: 97 return rewriteValueMIPS64_OpDiv16u_0(v) 98 case OpDiv32: 99 return rewriteValueMIPS64_OpDiv32_0(v) 100 case OpDiv32F: 101 return rewriteValueMIPS64_OpDiv32F_0(v) 102 case OpDiv32u: 103 return rewriteValueMIPS64_OpDiv32u_0(v) 104 case OpDiv64: 105 return rewriteValueMIPS64_OpDiv64_0(v) 106 case OpDiv64F: 107 return rewriteValueMIPS64_OpDiv64F_0(v) 108 case OpDiv64u: 109 return rewriteValueMIPS64_OpDiv64u_0(v) 110 case OpDiv8: 111 return rewriteValueMIPS64_OpDiv8_0(v) 112 case OpDiv8u: 113 return rewriteValueMIPS64_OpDiv8u_0(v) 114 case OpEq16: 115 return rewriteValueMIPS64_OpEq16_0(v) 116 case OpEq32: 117 return rewriteValueMIPS64_OpEq32_0(v) 118 case OpEq32F: 119 return rewriteValueMIPS64_OpEq32F_0(v) 120 case OpEq64: 121 return rewriteValueMIPS64_OpEq64_0(v) 122 case OpEq64F: 123 return rewriteValueMIPS64_OpEq64F_0(v) 124 case OpEq8: 125 return rewriteValueMIPS64_OpEq8_0(v) 126 case OpEqB: 127 return rewriteValueMIPS64_OpEqB_0(v) 128 case OpEqPtr: 129 return rewriteValueMIPS64_OpEqPtr_0(v) 130 case OpGeq16: 131 return rewriteValueMIPS64_OpGeq16_0(v) 132 case OpGeq16U: 133 return rewriteValueMIPS64_OpGeq16U_0(v) 134 case OpGeq32: 135 return rewriteValueMIPS64_OpGeq32_0(v) 136 case OpGeq32F: 137 return rewriteValueMIPS64_OpGeq32F_0(v) 138 case OpGeq32U: 139 return rewriteValueMIPS64_OpGeq32U_0(v) 140 case OpGeq64: 141 return rewriteValueMIPS64_OpGeq64_0(v) 142 case OpGeq64F: 143 return rewriteValueMIPS64_OpGeq64F_0(v) 144 case OpGeq64U: 145 return rewriteValueMIPS64_OpGeq64U_0(v) 146 case OpGeq8: 147 return rewriteValueMIPS64_OpGeq8_0(v) 148 case OpGeq8U: 149 return rewriteValueMIPS64_OpGeq8U_0(v) 150 case OpGetClosurePtr: 151 return rewriteValueMIPS64_OpGetClosurePtr_0(v) 152 case OpGreater16: 153 return rewriteValueMIPS64_OpGreater16_0(v) 154 case OpGreater16U: 155 return rewriteValueMIPS64_OpGreater16U_0(v) 156 case OpGreater32: 157 return rewriteValueMIPS64_OpGreater32_0(v) 158 case OpGreater32F: 159 return rewriteValueMIPS64_OpGreater32F_0(v) 160 case OpGreater32U: 161 return rewriteValueMIPS64_OpGreater32U_0(v) 162 case OpGreater64: 163 return rewriteValueMIPS64_OpGreater64_0(v) 164 case OpGreater64F: 165 return rewriteValueMIPS64_OpGreater64F_0(v) 166 case OpGreater64U: 167 return rewriteValueMIPS64_OpGreater64U_0(v) 168 case OpGreater8: 169 return rewriteValueMIPS64_OpGreater8_0(v) 170 case OpGreater8U: 171 return rewriteValueMIPS64_OpGreater8U_0(v) 172 case OpHmul32: 173 return rewriteValueMIPS64_OpHmul32_0(v) 174 case OpHmul32u: 175 return rewriteValueMIPS64_OpHmul32u_0(v) 176 case OpHmul64: 177 return rewriteValueMIPS64_OpHmul64_0(v) 178 case OpHmul64u: 179 return rewriteValueMIPS64_OpHmul64u_0(v) 180 case OpInterCall: 181 return rewriteValueMIPS64_OpInterCall_0(v) 182 case OpIsInBounds: 183 return rewriteValueMIPS64_OpIsInBounds_0(v) 184 case OpIsNonNil: 185 return rewriteValueMIPS64_OpIsNonNil_0(v) 186 case OpIsSliceInBounds: 187 return rewriteValueMIPS64_OpIsSliceInBounds_0(v) 188 case OpLeq16: 189 return rewriteValueMIPS64_OpLeq16_0(v) 190 case OpLeq16U: 191 return rewriteValueMIPS64_OpLeq16U_0(v) 192 case OpLeq32: 193 return rewriteValueMIPS64_OpLeq32_0(v) 194 case OpLeq32F: 195 return rewriteValueMIPS64_OpLeq32F_0(v) 196 case OpLeq32U: 197 return rewriteValueMIPS64_OpLeq32U_0(v) 198 case OpLeq64: 199 return rewriteValueMIPS64_OpLeq64_0(v) 200 case OpLeq64F: 201 return rewriteValueMIPS64_OpLeq64F_0(v) 202 case OpLeq64U: 203 return rewriteValueMIPS64_OpLeq64U_0(v) 204 case OpLeq8: 205 return rewriteValueMIPS64_OpLeq8_0(v) 206 case OpLeq8U: 207 return rewriteValueMIPS64_OpLeq8U_0(v) 208 case OpLess16: 209 return rewriteValueMIPS64_OpLess16_0(v) 210 case OpLess16U: 211 return rewriteValueMIPS64_OpLess16U_0(v) 212 case OpLess32: 213 return rewriteValueMIPS64_OpLess32_0(v) 214 case OpLess32F: 215 return rewriteValueMIPS64_OpLess32F_0(v) 216 case OpLess32U: 217 return rewriteValueMIPS64_OpLess32U_0(v) 218 case OpLess64: 219 return rewriteValueMIPS64_OpLess64_0(v) 220 case OpLess64F: 221 return rewriteValueMIPS64_OpLess64F_0(v) 222 case OpLess64U: 223 return rewriteValueMIPS64_OpLess64U_0(v) 224 case OpLess8: 225 return rewriteValueMIPS64_OpLess8_0(v) 226 case OpLess8U: 227 return rewriteValueMIPS64_OpLess8U_0(v) 228 case OpLoad: 229 return rewriteValueMIPS64_OpLoad_0(v) 230 case OpLsh16x16: 231 return rewriteValueMIPS64_OpLsh16x16_0(v) 232 case OpLsh16x32: 233 return rewriteValueMIPS64_OpLsh16x32_0(v) 234 case OpLsh16x64: 235 return rewriteValueMIPS64_OpLsh16x64_0(v) 236 case OpLsh16x8: 237 return rewriteValueMIPS64_OpLsh16x8_0(v) 238 case OpLsh32x16: 239 return rewriteValueMIPS64_OpLsh32x16_0(v) 240 case OpLsh32x32: 241 return rewriteValueMIPS64_OpLsh32x32_0(v) 242 case OpLsh32x64: 243 return rewriteValueMIPS64_OpLsh32x64_0(v) 244 case OpLsh32x8: 245 return rewriteValueMIPS64_OpLsh32x8_0(v) 246 case OpLsh64x16: 247 return rewriteValueMIPS64_OpLsh64x16_0(v) 248 case OpLsh64x32: 249 return rewriteValueMIPS64_OpLsh64x32_0(v) 250 case OpLsh64x64: 251 return rewriteValueMIPS64_OpLsh64x64_0(v) 252 case OpLsh64x8: 253 return rewriteValueMIPS64_OpLsh64x8_0(v) 254 case OpLsh8x16: 255 return rewriteValueMIPS64_OpLsh8x16_0(v) 256 case OpLsh8x32: 257 return rewriteValueMIPS64_OpLsh8x32_0(v) 258 case OpLsh8x64: 259 return rewriteValueMIPS64_OpLsh8x64_0(v) 260 case OpLsh8x8: 261 return rewriteValueMIPS64_OpLsh8x8_0(v) 262 case OpMIPS64ADDV: 263 return rewriteValueMIPS64_OpMIPS64ADDV_0(v) 264 case OpMIPS64ADDVconst: 265 return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v) 266 case OpMIPS64AND: 267 return rewriteValueMIPS64_OpMIPS64AND_0(v) 268 case OpMIPS64ANDconst: 269 return rewriteValueMIPS64_OpMIPS64ANDconst_0(v) 270 case OpMIPS64MOVBUload: 271 return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v) 272 case OpMIPS64MOVBUreg: 273 return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v) 274 case OpMIPS64MOVBload: 275 return rewriteValueMIPS64_OpMIPS64MOVBload_0(v) 276 case OpMIPS64MOVBreg: 277 return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v) 278 case OpMIPS64MOVBstore: 279 return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v) 280 case OpMIPS64MOVBstorezero: 281 return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v) 282 case OpMIPS64MOVDload: 283 return rewriteValueMIPS64_OpMIPS64MOVDload_0(v) 284 case OpMIPS64MOVDstore: 285 return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v) 286 case OpMIPS64MOVFload: 287 return rewriteValueMIPS64_OpMIPS64MOVFload_0(v) 288 case OpMIPS64MOVFstore: 289 return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v) 290 case OpMIPS64MOVHUload: 291 return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v) 292 case OpMIPS64MOVHUreg: 293 return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v) 294 case OpMIPS64MOVHload: 295 return rewriteValueMIPS64_OpMIPS64MOVHload_0(v) 296 case OpMIPS64MOVHreg: 297 return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v) 298 case OpMIPS64MOVHstore: 299 return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v) 300 case OpMIPS64MOVHstorezero: 301 return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v) 302 case OpMIPS64MOVVload: 303 return rewriteValueMIPS64_OpMIPS64MOVVload_0(v) 304 case OpMIPS64MOVVreg: 305 return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v) 306 case OpMIPS64MOVVstore: 307 return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v) 308 case OpMIPS64MOVVstorezero: 309 return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v) 310 case OpMIPS64MOVWUload: 311 return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v) 312 case OpMIPS64MOVWUreg: 313 return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v) 314 case OpMIPS64MOVWload: 315 return rewriteValueMIPS64_OpMIPS64MOVWload_0(v) 316 case OpMIPS64MOVWreg: 317 return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v) 318 case OpMIPS64MOVWstore: 319 return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v) 320 case OpMIPS64MOVWstorezero: 321 return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v) 322 case OpMIPS64NEGV: 323 return rewriteValueMIPS64_OpMIPS64NEGV_0(v) 324 case OpMIPS64NOR: 325 return rewriteValueMIPS64_OpMIPS64NOR_0(v) 326 case OpMIPS64NORconst: 327 return rewriteValueMIPS64_OpMIPS64NORconst_0(v) 328 case OpMIPS64OR: 329 return rewriteValueMIPS64_OpMIPS64OR_0(v) 330 case OpMIPS64ORconst: 331 return rewriteValueMIPS64_OpMIPS64ORconst_0(v) 332 case OpMIPS64SGT: 333 return rewriteValueMIPS64_OpMIPS64SGT_0(v) 334 case OpMIPS64SGTU: 335 return rewriteValueMIPS64_OpMIPS64SGTU_0(v) 336 case OpMIPS64SGTUconst: 337 return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v) 338 case OpMIPS64SGTconst: 339 return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v) 340 case OpMIPS64SLLV: 341 return rewriteValueMIPS64_OpMIPS64SLLV_0(v) 342 case OpMIPS64SLLVconst: 343 return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v) 344 case OpMIPS64SRAV: 345 return rewriteValueMIPS64_OpMIPS64SRAV_0(v) 346 case OpMIPS64SRAVconst: 347 return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v) 348 case OpMIPS64SRLV: 349 return rewriteValueMIPS64_OpMIPS64SRLV_0(v) 350 case OpMIPS64SRLVconst: 351 return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v) 352 case OpMIPS64SUBV: 353 return rewriteValueMIPS64_OpMIPS64SUBV_0(v) 354 case OpMIPS64SUBVconst: 355 return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v) 356 case OpMIPS64XOR: 357 return rewriteValueMIPS64_OpMIPS64XOR_0(v) 358 case OpMIPS64XORconst: 359 return rewriteValueMIPS64_OpMIPS64XORconst_0(v) 360 case OpMod16: 361 return rewriteValueMIPS64_OpMod16_0(v) 362 case OpMod16u: 363 return rewriteValueMIPS64_OpMod16u_0(v) 364 case OpMod32: 365 return rewriteValueMIPS64_OpMod32_0(v) 366 case OpMod32u: 367 return rewriteValueMIPS64_OpMod32u_0(v) 368 case OpMod64: 369 return rewriteValueMIPS64_OpMod64_0(v) 370 case OpMod64u: 371 return rewriteValueMIPS64_OpMod64u_0(v) 372 case OpMod8: 373 return rewriteValueMIPS64_OpMod8_0(v) 374 case OpMod8u: 375 return rewriteValueMIPS64_OpMod8u_0(v) 376 case OpMove: 377 return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v) 378 case OpMul16: 379 return rewriteValueMIPS64_OpMul16_0(v) 380 case OpMul32: 381 return rewriteValueMIPS64_OpMul32_0(v) 382 case OpMul32F: 383 return rewriteValueMIPS64_OpMul32F_0(v) 384 case OpMul64: 385 return rewriteValueMIPS64_OpMul64_0(v) 386 case OpMul64F: 387 return rewriteValueMIPS64_OpMul64F_0(v) 388 case OpMul8: 389 return rewriteValueMIPS64_OpMul8_0(v) 390 case OpNeg16: 391 return rewriteValueMIPS64_OpNeg16_0(v) 392 case OpNeg32: 393 return rewriteValueMIPS64_OpNeg32_0(v) 394 case OpNeg32F: 395 return rewriteValueMIPS64_OpNeg32F_0(v) 396 case OpNeg64: 397 return rewriteValueMIPS64_OpNeg64_0(v) 398 case OpNeg64F: 399 return rewriteValueMIPS64_OpNeg64F_0(v) 400 case OpNeg8: 401 return rewriteValueMIPS64_OpNeg8_0(v) 402 case OpNeq16: 403 return rewriteValueMIPS64_OpNeq16_0(v) 404 case OpNeq32: 405 return rewriteValueMIPS64_OpNeq32_0(v) 406 case OpNeq32F: 407 return rewriteValueMIPS64_OpNeq32F_0(v) 408 case OpNeq64: 409 return rewriteValueMIPS64_OpNeq64_0(v) 410 case OpNeq64F: 411 return rewriteValueMIPS64_OpNeq64F_0(v) 412 case OpNeq8: 413 return rewriteValueMIPS64_OpNeq8_0(v) 414 case OpNeqB: 415 return rewriteValueMIPS64_OpNeqB_0(v) 416 case OpNeqPtr: 417 return rewriteValueMIPS64_OpNeqPtr_0(v) 418 case OpNilCheck: 419 return rewriteValueMIPS64_OpNilCheck_0(v) 420 case OpNot: 421 return rewriteValueMIPS64_OpNot_0(v) 422 case OpOffPtr: 423 return rewriteValueMIPS64_OpOffPtr_0(v) 424 case OpOr16: 425 return rewriteValueMIPS64_OpOr16_0(v) 426 case OpOr32: 427 return rewriteValueMIPS64_OpOr32_0(v) 428 case OpOr64: 429 return rewriteValueMIPS64_OpOr64_0(v) 430 case OpOr8: 431 return rewriteValueMIPS64_OpOr8_0(v) 432 case OpOrB: 433 return rewriteValueMIPS64_OpOrB_0(v) 434 case OpRound32F: 435 return rewriteValueMIPS64_OpRound32F_0(v) 436 case OpRound64F: 437 return rewriteValueMIPS64_OpRound64F_0(v) 438 case OpRsh16Ux16: 439 return rewriteValueMIPS64_OpRsh16Ux16_0(v) 440 case OpRsh16Ux32: 441 return rewriteValueMIPS64_OpRsh16Ux32_0(v) 442 case OpRsh16Ux64: 443 return rewriteValueMIPS64_OpRsh16Ux64_0(v) 444 case OpRsh16Ux8: 445 return rewriteValueMIPS64_OpRsh16Ux8_0(v) 446 case OpRsh16x16: 447 return rewriteValueMIPS64_OpRsh16x16_0(v) 448 case OpRsh16x32: 449 return rewriteValueMIPS64_OpRsh16x32_0(v) 450 case OpRsh16x64: 451 return rewriteValueMIPS64_OpRsh16x64_0(v) 452 case OpRsh16x8: 453 return rewriteValueMIPS64_OpRsh16x8_0(v) 454 case OpRsh32Ux16: 455 return rewriteValueMIPS64_OpRsh32Ux16_0(v) 456 case OpRsh32Ux32: 457 return rewriteValueMIPS64_OpRsh32Ux32_0(v) 458 case OpRsh32Ux64: 459 return rewriteValueMIPS64_OpRsh32Ux64_0(v) 460 case OpRsh32Ux8: 461 return rewriteValueMIPS64_OpRsh32Ux8_0(v) 462 case OpRsh32x16: 463 return rewriteValueMIPS64_OpRsh32x16_0(v) 464 case OpRsh32x32: 465 return rewriteValueMIPS64_OpRsh32x32_0(v) 466 case OpRsh32x64: 467 return rewriteValueMIPS64_OpRsh32x64_0(v) 468 case OpRsh32x8: 469 return rewriteValueMIPS64_OpRsh32x8_0(v) 470 case OpRsh64Ux16: 471 return rewriteValueMIPS64_OpRsh64Ux16_0(v) 472 case OpRsh64Ux32: 473 return rewriteValueMIPS64_OpRsh64Ux32_0(v) 474 case OpRsh64Ux64: 475 return rewriteValueMIPS64_OpRsh64Ux64_0(v) 476 case OpRsh64Ux8: 477 return rewriteValueMIPS64_OpRsh64Ux8_0(v) 478 case OpRsh64x16: 479 return rewriteValueMIPS64_OpRsh64x16_0(v) 480 case OpRsh64x32: 481 return rewriteValueMIPS64_OpRsh64x32_0(v) 482 case OpRsh64x64: 483 return rewriteValueMIPS64_OpRsh64x64_0(v) 484 case OpRsh64x8: 485 return rewriteValueMIPS64_OpRsh64x8_0(v) 486 case OpRsh8Ux16: 487 return rewriteValueMIPS64_OpRsh8Ux16_0(v) 488 case OpRsh8Ux32: 489 return rewriteValueMIPS64_OpRsh8Ux32_0(v) 490 case OpRsh8Ux64: 491 return rewriteValueMIPS64_OpRsh8Ux64_0(v) 492 case OpRsh8Ux8: 493 return rewriteValueMIPS64_OpRsh8Ux8_0(v) 494 case OpRsh8x16: 495 return rewriteValueMIPS64_OpRsh8x16_0(v) 496 case OpRsh8x32: 497 return rewriteValueMIPS64_OpRsh8x32_0(v) 498 case OpRsh8x64: 499 return rewriteValueMIPS64_OpRsh8x64_0(v) 500 case OpRsh8x8: 501 return rewriteValueMIPS64_OpRsh8x8_0(v) 502 case OpSelect0: 503 return rewriteValueMIPS64_OpSelect0_0(v) 504 case OpSelect1: 505 return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v) 506 case OpSignExt16to32: 507 return rewriteValueMIPS64_OpSignExt16to32_0(v) 508 case OpSignExt16to64: 509 return rewriteValueMIPS64_OpSignExt16to64_0(v) 510 case OpSignExt32to64: 511 return rewriteValueMIPS64_OpSignExt32to64_0(v) 512 case OpSignExt8to16: 513 return rewriteValueMIPS64_OpSignExt8to16_0(v) 514 case OpSignExt8to32: 515 return rewriteValueMIPS64_OpSignExt8to32_0(v) 516 case OpSignExt8to64: 517 return rewriteValueMIPS64_OpSignExt8to64_0(v) 518 case OpSlicemask: 519 return rewriteValueMIPS64_OpSlicemask_0(v) 520 case OpStaticCall: 521 return rewriteValueMIPS64_OpStaticCall_0(v) 522 case OpStore: 523 return rewriteValueMIPS64_OpStore_0(v) 524 case OpSub16: 525 return rewriteValueMIPS64_OpSub16_0(v) 526 case OpSub32: 527 return rewriteValueMIPS64_OpSub32_0(v) 528 case OpSub32F: 529 return rewriteValueMIPS64_OpSub32F_0(v) 530 case OpSub64: 531 return rewriteValueMIPS64_OpSub64_0(v) 532 case OpSub64F: 533 return rewriteValueMIPS64_OpSub64F_0(v) 534 case OpSub8: 535 return rewriteValueMIPS64_OpSub8_0(v) 536 case OpSubPtr: 537 return rewriteValueMIPS64_OpSubPtr_0(v) 538 case OpTrunc16to8: 539 return rewriteValueMIPS64_OpTrunc16to8_0(v) 540 case OpTrunc32to16: 541 return rewriteValueMIPS64_OpTrunc32to16_0(v) 542 case OpTrunc32to8: 543 return rewriteValueMIPS64_OpTrunc32to8_0(v) 544 case OpTrunc64to16: 545 return rewriteValueMIPS64_OpTrunc64to16_0(v) 546 case OpTrunc64to32: 547 return rewriteValueMIPS64_OpTrunc64to32_0(v) 548 case OpTrunc64to8: 549 return rewriteValueMIPS64_OpTrunc64to8_0(v) 550 case OpXor16: 551 return rewriteValueMIPS64_OpXor16_0(v) 552 case OpXor32: 553 return rewriteValueMIPS64_OpXor32_0(v) 554 case OpXor64: 555 return rewriteValueMIPS64_OpXor64_0(v) 556 case OpXor8: 557 return rewriteValueMIPS64_OpXor8_0(v) 558 case OpZero: 559 return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v) 560 case OpZeroExt16to32: 561 return rewriteValueMIPS64_OpZeroExt16to32_0(v) 562 case OpZeroExt16to64: 563 return rewriteValueMIPS64_OpZeroExt16to64_0(v) 564 case OpZeroExt32to64: 565 return rewriteValueMIPS64_OpZeroExt32to64_0(v) 566 case OpZeroExt8to16: 567 return rewriteValueMIPS64_OpZeroExt8to16_0(v) 568 case OpZeroExt8to32: 569 return rewriteValueMIPS64_OpZeroExt8to32_0(v) 570 case OpZeroExt8to64: 571 return rewriteValueMIPS64_OpZeroExt8to64_0(v) 572 } 573 return false 574 } 575 func rewriteValueMIPS64_OpAdd16_0(v *Value) bool { 576 // match: (Add16 x y) 577 // cond: 578 // result: (ADDV x y) 579 for { 580 _ = v.Args[1] 581 x := v.Args[0] 582 y := v.Args[1] 583 v.reset(OpMIPS64ADDV) 584 v.AddArg(x) 585 v.AddArg(y) 586 return true 587 } 588 } 589 func rewriteValueMIPS64_OpAdd32_0(v *Value) bool { 590 // match: (Add32 x y) 591 // cond: 592 // result: (ADDV x y) 593 for { 594 _ = v.Args[1] 595 x := v.Args[0] 596 y := v.Args[1] 597 v.reset(OpMIPS64ADDV) 598 v.AddArg(x) 599 v.AddArg(y) 600 return true 601 } 602 } 603 func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool { 604 // match: (Add32F x y) 605 // cond: 606 // result: (ADDF x y) 607 for { 608 _ = v.Args[1] 609 x := v.Args[0] 610 y := v.Args[1] 611 v.reset(OpMIPS64ADDF) 612 v.AddArg(x) 613 v.AddArg(y) 614 return true 615 } 616 } 617 func rewriteValueMIPS64_OpAdd64_0(v *Value) bool { 618 // match: (Add64 x y) 619 // cond: 620 // result: (ADDV x y) 621 for { 622 _ = v.Args[1] 623 x := v.Args[0] 624 y := v.Args[1] 625 v.reset(OpMIPS64ADDV) 626 v.AddArg(x) 627 v.AddArg(y) 628 return true 629 } 630 } 631 func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool { 632 // match: (Add64F x y) 633 // cond: 634 // result: (ADDD x y) 635 for { 636 _ = v.Args[1] 637 x := v.Args[0] 638 y := v.Args[1] 639 v.reset(OpMIPS64ADDD) 640 v.AddArg(x) 641 v.AddArg(y) 642 return true 643 } 644 } 645 func rewriteValueMIPS64_OpAdd8_0(v *Value) bool { 646 // match: (Add8 x y) 647 // cond: 648 // result: (ADDV x y) 649 for { 650 _ = v.Args[1] 651 x := v.Args[0] 652 y := v.Args[1] 653 v.reset(OpMIPS64ADDV) 654 v.AddArg(x) 655 v.AddArg(y) 656 return true 657 } 658 } 659 func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool { 660 // match: (AddPtr x y) 661 // cond: 662 // result: (ADDV x y) 663 for { 664 _ = v.Args[1] 665 x := v.Args[0] 666 y := v.Args[1] 667 v.reset(OpMIPS64ADDV) 668 v.AddArg(x) 669 v.AddArg(y) 670 return true 671 } 672 } 673 func rewriteValueMIPS64_OpAddr_0(v *Value) bool { 674 // match: (Addr {sym} base) 675 // cond: 676 // result: (MOVVaddr {sym} base) 677 for { 678 sym := v.Aux 679 base := v.Args[0] 680 v.reset(OpMIPS64MOVVaddr) 681 v.Aux = sym 682 v.AddArg(base) 683 return true 684 } 685 } 686 func rewriteValueMIPS64_OpAnd16_0(v *Value) bool { 687 // match: (And16 x y) 688 // cond: 689 // result: (AND x y) 690 for { 691 _ = v.Args[1] 692 x := v.Args[0] 693 y := v.Args[1] 694 v.reset(OpMIPS64AND) 695 v.AddArg(x) 696 v.AddArg(y) 697 return true 698 } 699 } 700 func rewriteValueMIPS64_OpAnd32_0(v *Value) bool { 701 // match: (And32 x y) 702 // cond: 703 // result: (AND x y) 704 for { 705 _ = v.Args[1] 706 x := v.Args[0] 707 y := v.Args[1] 708 v.reset(OpMIPS64AND) 709 v.AddArg(x) 710 v.AddArg(y) 711 return true 712 } 713 } 714 func rewriteValueMIPS64_OpAnd64_0(v *Value) bool { 715 // match: (And64 x y) 716 // cond: 717 // result: (AND x y) 718 for { 719 _ = v.Args[1] 720 x := v.Args[0] 721 y := v.Args[1] 722 v.reset(OpMIPS64AND) 723 v.AddArg(x) 724 v.AddArg(y) 725 return true 726 } 727 } 728 func rewriteValueMIPS64_OpAnd8_0(v *Value) bool { 729 // match: (And8 x y) 730 // cond: 731 // result: (AND x y) 732 for { 733 _ = v.Args[1] 734 x := v.Args[0] 735 y := v.Args[1] 736 v.reset(OpMIPS64AND) 737 v.AddArg(x) 738 v.AddArg(y) 739 return true 740 } 741 } 742 func rewriteValueMIPS64_OpAndB_0(v *Value) bool { 743 // match: (AndB x y) 744 // cond: 745 // result: (AND x y) 746 for { 747 _ = v.Args[1] 748 x := v.Args[0] 749 y := v.Args[1] 750 v.reset(OpMIPS64AND) 751 v.AddArg(x) 752 v.AddArg(y) 753 return true 754 } 755 } 756 func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool { 757 b := v.Block 758 _ = b 759 // match: (Avg64u <t> x y) 760 // cond: 761 // result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y) 762 for { 763 t := v.Type 764 _ = v.Args[1] 765 x := v.Args[0] 766 y := v.Args[1] 767 v.reset(OpMIPS64ADDV) 768 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t) 769 v0.AuxInt = 1 770 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t) 771 v1.AddArg(x) 772 v1.AddArg(y) 773 v0.AddArg(v1) 774 v.AddArg(v0) 775 v.AddArg(y) 776 return true 777 } 778 } 779 func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool { 780 // match: (ClosureCall [argwid] entry closure mem) 781 // cond: 782 // result: (CALLclosure [argwid] entry closure mem) 783 for { 784 argwid := v.AuxInt 785 _ = v.Args[2] 786 entry := v.Args[0] 787 closure := v.Args[1] 788 mem := v.Args[2] 789 v.reset(OpMIPS64CALLclosure) 790 v.AuxInt = argwid 791 v.AddArg(entry) 792 v.AddArg(closure) 793 v.AddArg(mem) 794 return true 795 } 796 } 797 func rewriteValueMIPS64_OpCom16_0(v *Value) bool { 798 b := v.Block 799 _ = b 800 typ := &b.Func.Config.Types 801 _ = typ 802 // match: (Com16 x) 803 // cond: 804 // result: (NOR (MOVVconst [0]) x) 805 for { 806 x := v.Args[0] 807 v.reset(OpMIPS64NOR) 808 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 809 v0.AuxInt = 0 810 v.AddArg(v0) 811 v.AddArg(x) 812 return true 813 } 814 } 815 func rewriteValueMIPS64_OpCom32_0(v *Value) bool { 816 b := v.Block 817 _ = b 818 typ := &b.Func.Config.Types 819 _ = typ 820 // match: (Com32 x) 821 // cond: 822 // result: (NOR (MOVVconst [0]) x) 823 for { 824 x := v.Args[0] 825 v.reset(OpMIPS64NOR) 826 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 827 v0.AuxInt = 0 828 v.AddArg(v0) 829 v.AddArg(x) 830 return true 831 } 832 } 833 func rewriteValueMIPS64_OpCom64_0(v *Value) bool { 834 b := v.Block 835 _ = b 836 typ := &b.Func.Config.Types 837 _ = typ 838 // match: (Com64 x) 839 // cond: 840 // result: (NOR (MOVVconst [0]) x) 841 for { 842 x := v.Args[0] 843 v.reset(OpMIPS64NOR) 844 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 845 v0.AuxInt = 0 846 v.AddArg(v0) 847 v.AddArg(x) 848 return true 849 } 850 } 851 func rewriteValueMIPS64_OpCom8_0(v *Value) bool { 852 b := v.Block 853 _ = b 854 typ := &b.Func.Config.Types 855 _ = typ 856 // match: (Com8 x) 857 // cond: 858 // result: (NOR (MOVVconst [0]) x) 859 for { 860 x := v.Args[0] 861 v.reset(OpMIPS64NOR) 862 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 863 v0.AuxInt = 0 864 v.AddArg(v0) 865 v.AddArg(x) 866 return true 867 } 868 } 869 func rewriteValueMIPS64_OpConst16_0(v *Value) bool { 870 // match: (Const16 [val]) 871 // cond: 872 // result: (MOVVconst [val]) 873 for { 874 val := v.AuxInt 875 v.reset(OpMIPS64MOVVconst) 876 v.AuxInt = val 877 return true 878 } 879 } 880 func rewriteValueMIPS64_OpConst32_0(v *Value) bool { 881 // match: (Const32 [val]) 882 // cond: 883 // result: (MOVVconst [val]) 884 for { 885 val := v.AuxInt 886 v.reset(OpMIPS64MOVVconst) 887 v.AuxInt = val 888 return true 889 } 890 } 891 func rewriteValueMIPS64_OpConst32F_0(v *Value) bool { 892 // match: (Const32F [val]) 893 // cond: 894 // result: (MOVFconst [val]) 895 for { 896 val := v.AuxInt 897 v.reset(OpMIPS64MOVFconst) 898 v.AuxInt = val 899 return true 900 } 901 } 902 func rewriteValueMIPS64_OpConst64_0(v *Value) bool { 903 // match: (Const64 [val]) 904 // cond: 905 // result: (MOVVconst [val]) 906 for { 907 val := v.AuxInt 908 v.reset(OpMIPS64MOVVconst) 909 v.AuxInt = val 910 return true 911 } 912 } 913 func rewriteValueMIPS64_OpConst64F_0(v *Value) bool { 914 // match: (Const64F [val]) 915 // cond: 916 // result: (MOVDconst [val]) 917 for { 918 val := v.AuxInt 919 v.reset(OpMIPS64MOVDconst) 920 v.AuxInt = val 921 return true 922 } 923 } 924 func rewriteValueMIPS64_OpConst8_0(v *Value) bool { 925 // match: (Const8 [val]) 926 // cond: 927 // result: (MOVVconst [val]) 928 for { 929 val := v.AuxInt 930 v.reset(OpMIPS64MOVVconst) 931 v.AuxInt = val 932 return true 933 } 934 } 935 func rewriteValueMIPS64_OpConstBool_0(v *Value) bool { 936 // match: (ConstBool [b]) 937 // cond: 938 // result: (MOVVconst [b]) 939 for { 940 b := v.AuxInt 941 v.reset(OpMIPS64MOVVconst) 942 v.AuxInt = b 943 return true 944 } 945 } 946 func rewriteValueMIPS64_OpConstNil_0(v *Value) bool { 947 // match: (ConstNil) 948 // cond: 949 // result: (MOVVconst [0]) 950 for { 951 v.reset(OpMIPS64MOVVconst) 952 v.AuxInt = 0 953 return true 954 } 955 } 956 func rewriteValueMIPS64_OpConvert_0(v *Value) bool { 957 // match: (Convert x mem) 958 // cond: 959 // result: (MOVVconvert x mem) 960 for { 961 _ = v.Args[1] 962 x := v.Args[0] 963 mem := v.Args[1] 964 v.reset(OpMIPS64MOVVconvert) 965 v.AddArg(x) 966 v.AddArg(mem) 967 return true 968 } 969 } 970 func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool { 971 // match: (Cvt32Fto32 x) 972 // cond: 973 // result: (TRUNCFW x) 974 for { 975 x := v.Args[0] 976 v.reset(OpMIPS64TRUNCFW) 977 v.AddArg(x) 978 return true 979 } 980 } 981 func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool { 982 // match: (Cvt32Fto64 x) 983 // cond: 984 // result: (TRUNCFV x) 985 for { 986 x := v.Args[0] 987 v.reset(OpMIPS64TRUNCFV) 988 v.AddArg(x) 989 return true 990 } 991 } 992 func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool { 993 // match: (Cvt32Fto64F x) 994 // cond: 995 // result: (MOVFD x) 996 for { 997 x := v.Args[0] 998 v.reset(OpMIPS64MOVFD) 999 v.AddArg(x) 1000 return true 1001 } 1002 } 1003 func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool { 1004 // match: (Cvt32to32F x) 1005 // cond: 1006 // result: (MOVWF x) 1007 for { 1008 x := v.Args[0] 1009 v.reset(OpMIPS64MOVWF) 1010 v.AddArg(x) 1011 return true 1012 } 1013 } 1014 func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool { 1015 // match: (Cvt32to64F x) 1016 // cond: 1017 // result: (MOVWD x) 1018 for { 1019 x := v.Args[0] 1020 v.reset(OpMIPS64MOVWD) 1021 v.AddArg(x) 1022 return true 1023 } 1024 } 1025 func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool { 1026 // match: (Cvt64Fto32 x) 1027 // cond: 1028 // result: (TRUNCDW x) 1029 for { 1030 x := v.Args[0] 1031 v.reset(OpMIPS64TRUNCDW) 1032 v.AddArg(x) 1033 return true 1034 } 1035 } 1036 func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool { 1037 // match: (Cvt64Fto32F x) 1038 // cond: 1039 // result: (MOVDF x) 1040 for { 1041 x := v.Args[0] 1042 v.reset(OpMIPS64MOVDF) 1043 v.AddArg(x) 1044 return true 1045 } 1046 } 1047 func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool { 1048 // match: (Cvt64Fto64 x) 1049 // cond: 1050 // result: (TRUNCDV x) 1051 for { 1052 x := v.Args[0] 1053 v.reset(OpMIPS64TRUNCDV) 1054 v.AddArg(x) 1055 return true 1056 } 1057 } 1058 func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool { 1059 // match: (Cvt64to32F x) 1060 // cond: 1061 // result: (MOVVF x) 1062 for { 1063 x := v.Args[0] 1064 v.reset(OpMIPS64MOVVF) 1065 v.AddArg(x) 1066 return true 1067 } 1068 } 1069 func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool { 1070 // match: (Cvt64to64F x) 1071 // cond: 1072 // result: (MOVVD x) 1073 for { 1074 x := v.Args[0] 1075 v.reset(OpMIPS64MOVVD) 1076 v.AddArg(x) 1077 return true 1078 } 1079 } 1080 func rewriteValueMIPS64_OpDiv16_0(v *Value) bool { 1081 b := v.Block 1082 _ = b 1083 typ := &b.Func.Config.Types 1084 _ = typ 1085 // match: (Div16 x y) 1086 // cond: 1087 // result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 1088 for { 1089 _ = v.Args[1] 1090 x := v.Args[0] 1091 y := v.Args[1] 1092 v.reset(OpSelect1) 1093 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1094 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1095 v1.AddArg(x) 1096 v0.AddArg(v1) 1097 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1098 v2.AddArg(y) 1099 v0.AddArg(v2) 1100 v.AddArg(v0) 1101 return true 1102 } 1103 } 1104 func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool { 1105 b := v.Block 1106 _ = b 1107 typ := &b.Func.Config.Types 1108 _ = typ 1109 // match: (Div16u x y) 1110 // cond: 1111 // result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 1112 for { 1113 _ = v.Args[1] 1114 x := v.Args[0] 1115 y := v.Args[1] 1116 v.reset(OpSelect1) 1117 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1118 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1119 v1.AddArg(x) 1120 v0.AddArg(v1) 1121 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1122 v2.AddArg(y) 1123 v0.AddArg(v2) 1124 v.AddArg(v0) 1125 return true 1126 } 1127 } 1128 func rewriteValueMIPS64_OpDiv32_0(v *Value) bool { 1129 b := v.Block 1130 _ = b 1131 typ := &b.Func.Config.Types 1132 _ = typ 1133 // match: (Div32 x y) 1134 // cond: 1135 // result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 1136 for { 1137 _ = v.Args[1] 1138 x := v.Args[0] 1139 y := v.Args[1] 1140 v.reset(OpSelect1) 1141 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1142 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1143 v1.AddArg(x) 1144 v0.AddArg(v1) 1145 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1146 v2.AddArg(y) 1147 v0.AddArg(v2) 1148 v.AddArg(v0) 1149 return true 1150 } 1151 } 1152 func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool { 1153 // match: (Div32F x y) 1154 // cond: 1155 // result: (DIVF x y) 1156 for { 1157 _ = v.Args[1] 1158 x := v.Args[0] 1159 y := v.Args[1] 1160 v.reset(OpMIPS64DIVF) 1161 v.AddArg(x) 1162 v.AddArg(y) 1163 return true 1164 } 1165 } 1166 func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool { 1167 b := v.Block 1168 _ = b 1169 typ := &b.Func.Config.Types 1170 _ = typ 1171 // match: (Div32u x y) 1172 // cond: 1173 // result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 1174 for { 1175 _ = v.Args[1] 1176 x := v.Args[0] 1177 y := v.Args[1] 1178 v.reset(OpSelect1) 1179 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1180 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1181 v1.AddArg(x) 1182 v0.AddArg(v1) 1183 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1184 v2.AddArg(y) 1185 v0.AddArg(v2) 1186 v.AddArg(v0) 1187 return true 1188 } 1189 } 1190 func rewriteValueMIPS64_OpDiv64_0(v *Value) bool { 1191 b := v.Block 1192 _ = b 1193 typ := &b.Func.Config.Types 1194 _ = typ 1195 // match: (Div64 x y) 1196 // cond: 1197 // result: (Select1 (DIVV x y)) 1198 for { 1199 _ = v.Args[1] 1200 x := v.Args[0] 1201 y := v.Args[1] 1202 v.reset(OpSelect1) 1203 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1204 v0.AddArg(x) 1205 v0.AddArg(y) 1206 v.AddArg(v0) 1207 return true 1208 } 1209 } 1210 func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool { 1211 // match: (Div64F x y) 1212 // cond: 1213 // result: (DIVD x y) 1214 for { 1215 _ = v.Args[1] 1216 x := v.Args[0] 1217 y := v.Args[1] 1218 v.reset(OpMIPS64DIVD) 1219 v.AddArg(x) 1220 v.AddArg(y) 1221 return true 1222 } 1223 } 1224 func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool { 1225 b := v.Block 1226 _ = b 1227 typ := &b.Func.Config.Types 1228 _ = typ 1229 // match: (Div64u x y) 1230 // cond: 1231 // result: (Select1 (DIVVU x y)) 1232 for { 1233 _ = v.Args[1] 1234 x := v.Args[0] 1235 y := v.Args[1] 1236 v.reset(OpSelect1) 1237 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1238 v0.AddArg(x) 1239 v0.AddArg(y) 1240 v.AddArg(v0) 1241 return true 1242 } 1243 } 1244 func rewriteValueMIPS64_OpDiv8_0(v *Value) bool { 1245 b := v.Block 1246 _ = b 1247 typ := &b.Func.Config.Types 1248 _ = typ 1249 // match: (Div8 x y) 1250 // cond: 1251 // result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 1252 for { 1253 _ = v.Args[1] 1254 x := v.Args[0] 1255 y := v.Args[1] 1256 v.reset(OpSelect1) 1257 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1258 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1259 v1.AddArg(x) 1260 v0.AddArg(v1) 1261 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1262 v2.AddArg(y) 1263 v0.AddArg(v2) 1264 v.AddArg(v0) 1265 return true 1266 } 1267 } 1268 func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool { 1269 b := v.Block 1270 _ = b 1271 typ := &b.Func.Config.Types 1272 _ = typ 1273 // match: (Div8u x y) 1274 // cond: 1275 // result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 1276 for { 1277 _ = v.Args[1] 1278 x := v.Args[0] 1279 y := v.Args[1] 1280 v.reset(OpSelect1) 1281 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1282 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1283 v1.AddArg(x) 1284 v0.AddArg(v1) 1285 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1286 v2.AddArg(y) 1287 v0.AddArg(v2) 1288 v.AddArg(v0) 1289 return true 1290 } 1291 } 1292 func rewriteValueMIPS64_OpEq16_0(v *Value) bool { 1293 b := v.Block 1294 _ = b 1295 typ := &b.Func.Config.Types 1296 _ = typ 1297 // match: (Eq16 x y) 1298 // cond: 1299 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y))) 1300 for { 1301 _ = v.Args[1] 1302 x := v.Args[0] 1303 y := v.Args[1] 1304 v.reset(OpMIPS64SGTU) 1305 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1306 v0.AuxInt = 1 1307 v.AddArg(v0) 1308 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1309 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1310 v2.AddArg(x) 1311 v1.AddArg(v2) 1312 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1313 v3.AddArg(y) 1314 v1.AddArg(v3) 1315 v.AddArg(v1) 1316 return true 1317 } 1318 } 1319 func rewriteValueMIPS64_OpEq32_0(v *Value) bool { 1320 b := v.Block 1321 _ = b 1322 typ := &b.Func.Config.Types 1323 _ = typ 1324 // match: (Eq32 x y) 1325 // cond: 1326 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y))) 1327 for { 1328 _ = v.Args[1] 1329 x := v.Args[0] 1330 y := v.Args[1] 1331 v.reset(OpMIPS64SGTU) 1332 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1333 v0.AuxInt = 1 1334 v.AddArg(v0) 1335 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1336 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1337 v2.AddArg(x) 1338 v1.AddArg(v2) 1339 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1340 v3.AddArg(y) 1341 v1.AddArg(v3) 1342 v.AddArg(v1) 1343 return true 1344 } 1345 } 1346 func rewriteValueMIPS64_OpEq32F_0(v *Value) bool { 1347 b := v.Block 1348 _ = b 1349 // match: (Eq32F x y) 1350 // cond: 1351 // result: (FPFlagTrue (CMPEQF x y)) 1352 for { 1353 _ = v.Args[1] 1354 x := v.Args[0] 1355 y := v.Args[1] 1356 v.reset(OpMIPS64FPFlagTrue) 1357 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags) 1358 v0.AddArg(x) 1359 v0.AddArg(y) 1360 v.AddArg(v0) 1361 return true 1362 } 1363 } 1364 func rewriteValueMIPS64_OpEq64_0(v *Value) bool { 1365 b := v.Block 1366 _ = b 1367 typ := &b.Func.Config.Types 1368 _ = typ 1369 // match: (Eq64 x y) 1370 // cond: 1371 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1372 for { 1373 _ = v.Args[1] 1374 x := v.Args[0] 1375 y := v.Args[1] 1376 v.reset(OpMIPS64SGTU) 1377 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1378 v0.AuxInt = 1 1379 v.AddArg(v0) 1380 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1381 v1.AddArg(x) 1382 v1.AddArg(y) 1383 v.AddArg(v1) 1384 return true 1385 } 1386 } 1387 func rewriteValueMIPS64_OpEq64F_0(v *Value) bool { 1388 b := v.Block 1389 _ = b 1390 // match: (Eq64F x y) 1391 // cond: 1392 // result: (FPFlagTrue (CMPEQD x y)) 1393 for { 1394 _ = v.Args[1] 1395 x := v.Args[0] 1396 y := v.Args[1] 1397 v.reset(OpMIPS64FPFlagTrue) 1398 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags) 1399 v0.AddArg(x) 1400 v0.AddArg(y) 1401 v.AddArg(v0) 1402 return true 1403 } 1404 } 1405 func rewriteValueMIPS64_OpEq8_0(v *Value) bool { 1406 b := v.Block 1407 _ = b 1408 typ := &b.Func.Config.Types 1409 _ = typ 1410 // match: (Eq8 x y) 1411 // cond: 1412 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y))) 1413 for { 1414 _ = v.Args[1] 1415 x := v.Args[0] 1416 y := v.Args[1] 1417 v.reset(OpMIPS64SGTU) 1418 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1419 v0.AuxInt = 1 1420 v.AddArg(v0) 1421 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1422 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1423 v2.AddArg(x) 1424 v1.AddArg(v2) 1425 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1426 v3.AddArg(y) 1427 v1.AddArg(v3) 1428 v.AddArg(v1) 1429 return true 1430 } 1431 } 1432 func rewriteValueMIPS64_OpEqB_0(v *Value) bool { 1433 b := v.Block 1434 _ = b 1435 typ := &b.Func.Config.Types 1436 _ = typ 1437 // match: (EqB x y) 1438 // cond: 1439 // result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y)) 1440 for { 1441 _ = v.Args[1] 1442 x := v.Args[0] 1443 y := v.Args[1] 1444 v.reset(OpMIPS64XOR) 1445 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1446 v0.AuxInt = 1 1447 v.AddArg(v0) 1448 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool) 1449 v1.AddArg(x) 1450 v1.AddArg(y) 1451 v.AddArg(v1) 1452 return true 1453 } 1454 } 1455 func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool { 1456 b := v.Block 1457 _ = b 1458 typ := &b.Func.Config.Types 1459 _ = typ 1460 // match: (EqPtr x y) 1461 // cond: 1462 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1463 for { 1464 _ = v.Args[1] 1465 x := v.Args[0] 1466 y := v.Args[1] 1467 v.reset(OpMIPS64SGTU) 1468 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1469 v0.AuxInt = 1 1470 v.AddArg(v0) 1471 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1472 v1.AddArg(x) 1473 v1.AddArg(y) 1474 v.AddArg(v1) 1475 return true 1476 } 1477 } 1478 func rewriteValueMIPS64_OpGeq16_0(v *Value) bool { 1479 b := v.Block 1480 _ = b 1481 typ := &b.Func.Config.Types 1482 _ = typ 1483 // match: (Geq16 x y) 1484 // cond: 1485 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x))) 1486 for { 1487 _ = v.Args[1] 1488 x := v.Args[0] 1489 y := v.Args[1] 1490 v.reset(OpMIPS64XOR) 1491 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1492 v0.AuxInt = 1 1493 v.AddArg(v0) 1494 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1495 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1496 v2.AddArg(y) 1497 v1.AddArg(v2) 1498 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1499 v3.AddArg(x) 1500 v1.AddArg(v3) 1501 v.AddArg(v1) 1502 return true 1503 } 1504 } 1505 func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool { 1506 b := v.Block 1507 _ = b 1508 typ := &b.Func.Config.Types 1509 _ = typ 1510 // match: (Geq16U x y) 1511 // cond: 1512 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))) 1513 for { 1514 _ = v.Args[1] 1515 x := v.Args[0] 1516 y := v.Args[1] 1517 v.reset(OpMIPS64XOR) 1518 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1519 v0.AuxInt = 1 1520 v.AddArg(v0) 1521 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1522 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1523 v2.AddArg(y) 1524 v1.AddArg(v2) 1525 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1526 v3.AddArg(x) 1527 v1.AddArg(v3) 1528 v.AddArg(v1) 1529 return true 1530 } 1531 } 1532 func rewriteValueMIPS64_OpGeq32_0(v *Value) bool { 1533 b := v.Block 1534 _ = b 1535 typ := &b.Func.Config.Types 1536 _ = typ 1537 // match: (Geq32 x y) 1538 // cond: 1539 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x))) 1540 for { 1541 _ = v.Args[1] 1542 x := v.Args[0] 1543 y := v.Args[1] 1544 v.reset(OpMIPS64XOR) 1545 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1546 v0.AuxInt = 1 1547 v.AddArg(v0) 1548 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1549 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1550 v2.AddArg(y) 1551 v1.AddArg(v2) 1552 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1553 v3.AddArg(x) 1554 v1.AddArg(v3) 1555 v.AddArg(v1) 1556 return true 1557 } 1558 } 1559 func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool { 1560 b := v.Block 1561 _ = b 1562 // match: (Geq32F x y) 1563 // cond: 1564 // result: (FPFlagTrue (CMPGEF x y)) 1565 for { 1566 _ = v.Args[1] 1567 x := v.Args[0] 1568 y := v.Args[1] 1569 v.reset(OpMIPS64FPFlagTrue) 1570 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags) 1571 v0.AddArg(x) 1572 v0.AddArg(y) 1573 v.AddArg(v0) 1574 return true 1575 } 1576 } 1577 func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool { 1578 b := v.Block 1579 _ = b 1580 typ := &b.Func.Config.Types 1581 _ = typ 1582 // match: (Geq32U x y) 1583 // cond: 1584 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))) 1585 for { 1586 _ = v.Args[1] 1587 x := v.Args[0] 1588 y := v.Args[1] 1589 v.reset(OpMIPS64XOR) 1590 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1591 v0.AuxInt = 1 1592 v.AddArg(v0) 1593 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1594 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1595 v2.AddArg(y) 1596 v1.AddArg(v2) 1597 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1598 v3.AddArg(x) 1599 v1.AddArg(v3) 1600 v.AddArg(v1) 1601 return true 1602 } 1603 } 1604 func rewriteValueMIPS64_OpGeq64_0(v *Value) bool { 1605 b := v.Block 1606 _ = b 1607 typ := &b.Func.Config.Types 1608 _ = typ 1609 // match: (Geq64 x y) 1610 // cond: 1611 // result: (XOR (MOVVconst [1]) (SGT y x)) 1612 for { 1613 _ = v.Args[1] 1614 x := v.Args[0] 1615 y := v.Args[1] 1616 v.reset(OpMIPS64XOR) 1617 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1618 v0.AuxInt = 1 1619 v.AddArg(v0) 1620 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1621 v1.AddArg(y) 1622 v1.AddArg(x) 1623 v.AddArg(v1) 1624 return true 1625 } 1626 } 1627 func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool { 1628 b := v.Block 1629 _ = b 1630 // match: (Geq64F x y) 1631 // cond: 1632 // result: (FPFlagTrue (CMPGED x y)) 1633 for { 1634 _ = v.Args[1] 1635 x := v.Args[0] 1636 y := v.Args[1] 1637 v.reset(OpMIPS64FPFlagTrue) 1638 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags) 1639 v0.AddArg(x) 1640 v0.AddArg(y) 1641 v.AddArg(v0) 1642 return true 1643 } 1644 } 1645 func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool { 1646 b := v.Block 1647 _ = b 1648 typ := &b.Func.Config.Types 1649 _ = typ 1650 // match: (Geq64U x y) 1651 // cond: 1652 // result: (XOR (MOVVconst [1]) (SGTU y x)) 1653 for { 1654 _ = v.Args[1] 1655 x := v.Args[0] 1656 y := v.Args[1] 1657 v.reset(OpMIPS64XOR) 1658 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1659 v0.AuxInt = 1 1660 v.AddArg(v0) 1661 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1662 v1.AddArg(y) 1663 v1.AddArg(x) 1664 v.AddArg(v1) 1665 return true 1666 } 1667 } 1668 func rewriteValueMIPS64_OpGeq8_0(v *Value) bool { 1669 b := v.Block 1670 _ = b 1671 typ := &b.Func.Config.Types 1672 _ = typ 1673 // match: (Geq8 x y) 1674 // cond: 1675 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x))) 1676 for { 1677 _ = v.Args[1] 1678 x := v.Args[0] 1679 y := v.Args[1] 1680 v.reset(OpMIPS64XOR) 1681 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1682 v0.AuxInt = 1 1683 v.AddArg(v0) 1684 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1685 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1686 v2.AddArg(y) 1687 v1.AddArg(v2) 1688 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1689 v3.AddArg(x) 1690 v1.AddArg(v3) 1691 v.AddArg(v1) 1692 return true 1693 } 1694 } 1695 func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool { 1696 b := v.Block 1697 _ = b 1698 typ := &b.Func.Config.Types 1699 _ = typ 1700 // match: (Geq8U x y) 1701 // cond: 1702 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))) 1703 for { 1704 _ = v.Args[1] 1705 x := v.Args[0] 1706 y := v.Args[1] 1707 v.reset(OpMIPS64XOR) 1708 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1709 v0.AuxInt = 1 1710 v.AddArg(v0) 1711 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1712 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1713 v2.AddArg(y) 1714 v1.AddArg(v2) 1715 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1716 v3.AddArg(x) 1717 v1.AddArg(v3) 1718 v.AddArg(v1) 1719 return true 1720 } 1721 } 1722 func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool { 1723 // match: (GetClosurePtr) 1724 // cond: 1725 // result: (LoweredGetClosurePtr) 1726 for { 1727 v.reset(OpMIPS64LoweredGetClosurePtr) 1728 return true 1729 } 1730 } 1731 func rewriteValueMIPS64_OpGreater16_0(v *Value) bool { 1732 b := v.Block 1733 _ = b 1734 typ := &b.Func.Config.Types 1735 _ = typ 1736 // match: (Greater16 x y) 1737 // cond: 1738 // result: (SGT (SignExt16to64 x) (SignExt16to64 y)) 1739 for { 1740 _ = v.Args[1] 1741 x := v.Args[0] 1742 y := v.Args[1] 1743 v.reset(OpMIPS64SGT) 1744 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1745 v0.AddArg(x) 1746 v.AddArg(v0) 1747 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1748 v1.AddArg(y) 1749 v.AddArg(v1) 1750 return true 1751 } 1752 } 1753 func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool { 1754 b := v.Block 1755 _ = b 1756 typ := &b.Func.Config.Types 1757 _ = typ 1758 // match: (Greater16U x y) 1759 // cond: 1760 // result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)) 1761 for { 1762 _ = v.Args[1] 1763 x := v.Args[0] 1764 y := v.Args[1] 1765 v.reset(OpMIPS64SGTU) 1766 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1767 v0.AddArg(x) 1768 v.AddArg(v0) 1769 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1770 v1.AddArg(y) 1771 v.AddArg(v1) 1772 return true 1773 } 1774 } 1775 func rewriteValueMIPS64_OpGreater32_0(v *Value) bool { 1776 b := v.Block 1777 _ = b 1778 typ := &b.Func.Config.Types 1779 _ = typ 1780 // match: (Greater32 x y) 1781 // cond: 1782 // result: (SGT (SignExt32to64 x) (SignExt32to64 y)) 1783 for { 1784 _ = v.Args[1] 1785 x := v.Args[0] 1786 y := v.Args[1] 1787 v.reset(OpMIPS64SGT) 1788 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1789 v0.AddArg(x) 1790 v.AddArg(v0) 1791 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1792 v1.AddArg(y) 1793 v.AddArg(v1) 1794 return true 1795 } 1796 } 1797 func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool { 1798 b := v.Block 1799 _ = b 1800 // match: (Greater32F x y) 1801 // cond: 1802 // result: (FPFlagTrue (CMPGTF x y)) 1803 for { 1804 _ = v.Args[1] 1805 x := v.Args[0] 1806 y := v.Args[1] 1807 v.reset(OpMIPS64FPFlagTrue) 1808 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags) 1809 v0.AddArg(x) 1810 v0.AddArg(y) 1811 v.AddArg(v0) 1812 return true 1813 } 1814 } 1815 func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool { 1816 b := v.Block 1817 _ = b 1818 typ := &b.Func.Config.Types 1819 _ = typ 1820 // match: (Greater32U x y) 1821 // cond: 1822 // result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)) 1823 for { 1824 _ = v.Args[1] 1825 x := v.Args[0] 1826 y := v.Args[1] 1827 v.reset(OpMIPS64SGTU) 1828 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1829 v0.AddArg(x) 1830 v.AddArg(v0) 1831 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1832 v1.AddArg(y) 1833 v.AddArg(v1) 1834 return true 1835 } 1836 } 1837 func rewriteValueMIPS64_OpGreater64_0(v *Value) bool { 1838 // match: (Greater64 x y) 1839 // cond: 1840 // result: (SGT x y) 1841 for { 1842 _ = v.Args[1] 1843 x := v.Args[0] 1844 y := v.Args[1] 1845 v.reset(OpMIPS64SGT) 1846 v.AddArg(x) 1847 v.AddArg(y) 1848 return true 1849 } 1850 } 1851 func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool { 1852 b := v.Block 1853 _ = b 1854 // match: (Greater64F x y) 1855 // cond: 1856 // result: (FPFlagTrue (CMPGTD x y)) 1857 for { 1858 _ = v.Args[1] 1859 x := v.Args[0] 1860 y := v.Args[1] 1861 v.reset(OpMIPS64FPFlagTrue) 1862 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags) 1863 v0.AddArg(x) 1864 v0.AddArg(y) 1865 v.AddArg(v0) 1866 return true 1867 } 1868 } 1869 func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool { 1870 // match: (Greater64U x y) 1871 // cond: 1872 // result: (SGTU x y) 1873 for { 1874 _ = v.Args[1] 1875 x := v.Args[0] 1876 y := v.Args[1] 1877 v.reset(OpMIPS64SGTU) 1878 v.AddArg(x) 1879 v.AddArg(y) 1880 return true 1881 } 1882 } 1883 func rewriteValueMIPS64_OpGreater8_0(v *Value) bool { 1884 b := v.Block 1885 _ = b 1886 typ := &b.Func.Config.Types 1887 _ = typ 1888 // match: (Greater8 x y) 1889 // cond: 1890 // result: (SGT (SignExt8to64 x) (SignExt8to64 y)) 1891 for { 1892 _ = v.Args[1] 1893 x := v.Args[0] 1894 y := v.Args[1] 1895 v.reset(OpMIPS64SGT) 1896 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1897 v0.AddArg(x) 1898 v.AddArg(v0) 1899 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1900 v1.AddArg(y) 1901 v.AddArg(v1) 1902 return true 1903 } 1904 } 1905 func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool { 1906 b := v.Block 1907 _ = b 1908 typ := &b.Func.Config.Types 1909 _ = typ 1910 // match: (Greater8U x y) 1911 // cond: 1912 // result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)) 1913 for { 1914 _ = v.Args[1] 1915 x := v.Args[0] 1916 y := v.Args[1] 1917 v.reset(OpMIPS64SGTU) 1918 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1919 v0.AddArg(x) 1920 v.AddArg(v0) 1921 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1922 v1.AddArg(y) 1923 v.AddArg(v1) 1924 return true 1925 } 1926 } 1927 func rewriteValueMIPS64_OpHmul32_0(v *Value) bool { 1928 b := v.Block 1929 _ = b 1930 typ := &b.Func.Config.Types 1931 _ = typ 1932 // match: (Hmul32 x y) 1933 // cond: 1934 // result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32]) 1935 for { 1936 _ = v.Args[1] 1937 x := v.Args[0] 1938 y := v.Args[1] 1939 v.reset(OpMIPS64SRAVconst) 1940 v.AuxInt = 32 1941 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64) 1942 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64)) 1943 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1944 v2.AddArg(x) 1945 v1.AddArg(v2) 1946 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1947 v3.AddArg(y) 1948 v1.AddArg(v3) 1949 v0.AddArg(v1) 1950 v.AddArg(v0) 1951 return true 1952 } 1953 } 1954 func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool { 1955 b := v.Block 1956 _ = b 1957 typ := &b.Func.Config.Types 1958 _ = typ 1959 // match: (Hmul32u x y) 1960 // cond: 1961 // result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32]) 1962 for { 1963 _ = v.Args[1] 1964 x := v.Args[0] 1965 y := v.Args[1] 1966 v.reset(OpMIPS64SRLVconst) 1967 v.AuxInt = 32 1968 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 1969 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1970 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1971 v2.AddArg(x) 1972 v1.AddArg(v2) 1973 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1974 v3.AddArg(y) 1975 v1.AddArg(v3) 1976 v0.AddArg(v1) 1977 v.AddArg(v0) 1978 return true 1979 } 1980 } 1981 func rewriteValueMIPS64_OpHmul64_0(v *Value) bool { 1982 b := v.Block 1983 _ = b 1984 typ := &b.Func.Config.Types 1985 _ = typ 1986 // match: (Hmul64 x y) 1987 // cond: 1988 // result: (Select0 (MULV x y)) 1989 for { 1990 _ = v.Args[1] 1991 x := v.Args[0] 1992 y := v.Args[1] 1993 v.reset(OpSelect0) 1994 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64)) 1995 v0.AddArg(x) 1996 v0.AddArg(y) 1997 v.AddArg(v0) 1998 return true 1999 } 2000 } 2001 func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool { 2002 b := v.Block 2003 _ = b 2004 typ := &b.Func.Config.Types 2005 _ = typ 2006 // match: (Hmul64u x y) 2007 // cond: 2008 // result: (Select0 (MULVU x y)) 2009 for { 2010 _ = v.Args[1] 2011 x := v.Args[0] 2012 y := v.Args[1] 2013 v.reset(OpSelect0) 2014 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 2015 v0.AddArg(x) 2016 v0.AddArg(y) 2017 v.AddArg(v0) 2018 return true 2019 } 2020 } 2021 func rewriteValueMIPS64_OpInterCall_0(v *Value) bool { 2022 // match: (InterCall [argwid] entry mem) 2023 // cond: 2024 // result: (CALLinter [argwid] entry mem) 2025 for { 2026 argwid := v.AuxInt 2027 _ = v.Args[1] 2028 entry := v.Args[0] 2029 mem := v.Args[1] 2030 v.reset(OpMIPS64CALLinter) 2031 v.AuxInt = argwid 2032 v.AddArg(entry) 2033 v.AddArg(mem) 2034 return true 2035 } 2036 } 2037 func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool { 2038 // match: (IsInBounds idx len) 2039 // cond: 2040 // result: (SGTU len idx) 2041 for { 2042 _ = v.Args[1] 2043 idx := v.Args[0] 2044 len := v.Args[1] 2045 v.reset(OpMIPS64SGTU) 2046 v.AddArg(len) 2047 v.AddArg(idx) 2048 return true 2049 } 2050 } 2051 func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool { 2052 b := v.Block 2053 _ = b 2054 typ := &b.Func.Config.Types 2055 _ = typ 2056 // match: (IsNonNil ptr) 2057 // cond: 2058 // result: (SGTU ptr (MOVVconst [0])) 2059 for { 2060 ptr := v.Args[0] 2061 v.reset(OpMIPS64SGTU) 2062 v.AddArg(ptr) 2063 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2064 v0.AuxInt = 0 2065 v.AddArg(v0) 2066 return true 2067 } 2068 } 2069 func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool { 2070 b := v.Block 2071 _ = b 2072 typ := &b.Func.Config.Types 2073 _ = typ 2074 // match: (IsSliceInBounds idx len) 2075 // cond: 2076 // result: (XOR (MOVVconst [1]) (SGTU idx len)) 2077 for { 2078 _ = v.Args[1] 2079 idx := v.Args[0] 2080 len := v.Args[1] 2081 v.reset(OpMIPS64XOR) 2082 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2083 v0.AuxInt = 1 2084 v.AddArg(v0) 2085 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2086 v1.AddArg(idx) 2087 v1.AddArg(len) 2088 v.AddArg(v1) 2089 return true 2090 } 2091 } 2092 func rewriteValueMIPS64_OpLeq16_0(v *Value) bool { 2093 b := v.Block 2094 _ = b 2095 typ := &b.Func.Config.Types 2096 _ = typ 2097 // match: (Leq16 x y) 2098 // cond: 2099 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y))) 2100 for { 2101 _ = v.Args[1] 2102 x := v.Args[0] 2103 y := v.Args[1] 2104 v.reset(OpMIPS64XOR) 2105 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2106 v0.AuxInt = 1 2107 v.AddArg(v0) 2108 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2109 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2110 v2.AddArg(x) 2111 v1.AddArg(v2) 2112 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2113 v3.AddArg(y) 2114 v1.AddArg(v3) 2115 v.AddArg(v1) 2116 return true 2117 } 2118 } 2119 func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool { 2120 b := v.Block 2121 _ = b 2122 typ := &b.Func.Config.Types 2123 _ = typ 2124 // match: (Leq16U x y) 2125 // cond: 2126 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))) 2127 for { 2128 _ = v.Args[1] 2129 x := v.Args[0] 2130 y := v.Args[1] 2131 v.reset(OpMIPS64XOR) 2132 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2133 v0.AuxInt = 1 2134 v.AddArg(v0) 2135 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2136 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2137 v2.AddArg(x) 2138 v1.AddArg(v2) 2139 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2140 v3.AddArg(y) 2141 v1.AddArg(v3) 2142 v.AddArg(v1) 2143 return true 2144 } 2145 } 2146 func rewriteValueMIPS64_OpLeq32_0(v *Value) bool { 2147 b := v.Block 2148 _ = b 2149 typ := &b.Func.Config.Types 2150 _ = typ 2151 // match: (Leq32 x y) 2152 // cond: 2153 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y))) 2154 for { 2155 _ = v.Args[1] 2156 x := v.Args[0] 2157 y := v.Args[1] 2158 v.reset(OpMIPS64XOR) 2159 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2160 v0.AuxInt = 1 2161 v.AddArg(v0) 2162 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2163 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2164 v2.AddArg(x) 2165 v1.AddArg(v2) 2166 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2167 v3.AddArg(y) 2168 v1.AddArg(v3) 2169 v.AddArg(v1) 2170 return true 2171 } 2172 } 2173 func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool { 2174 b := v.Block 2175 _ = b 2176 // match: (Leq32F x y) 2177 // cond: 2178 // result: (FPFlagTrue (CMPGEF y x)) 2179 for { 2180 _ = v.Args[1] 2181 x := v.Args[0] 2182 y := v.Args[1] 2183 v.reset(OpMIPS64FPFlagTrue) 2184 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags) 2185 v0.AddArg(y) 2186 v0.AddArg(x) 2187 v.AddArg(v0) 2188 return true 2189 } 2190 } 2191 func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool { 2192 b := v.Block 2193 _ = b 2194 typ := &b.Func.Config.Types 2195 _ = typ 2196 // match: (Leq32U x y) 2197 // cond: 2198 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))) 2199 for { 2200 _ = v.Args[1] 2201 x := v.Args[0] 2202 y := v.Args[1] 2203 v.reset(OpMIPS64XOR) 2204 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2205 v0.AuxInt = 1 2206 v.AddArg(v0) 2207 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2208 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2209 v2.AddArg(x) 2210 v1.AddArg(v2) 2211 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2212 v3.AddArg(y) 2213 v1.AddArg(v3) 2214 v.AddArg(v1) 2215 return true 2216 } 2217 } 2218 func rewriteValueMIPS64_OpLeq64_0(v *Value) bool { 2219 b := v.Block 2220 _ = b 2221 typ := &b.Func.Config.Types 2222 _ = typ 2223 // match: (Leq64 x y) 2224 // cond: 2225 // result: (XOR (MOVVconst [1]) (SGT x y)) 2226 for { 2227 _ = v.Args[1] 2228 x := v.Args[0] 2229 y := v.Args[1] 2230 v.reset(OpMIPS64XOR) 2231 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2232 v0.AuxInt = 1 2233 v.AddArg(v0) 2234 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2235 v1.AddArg(x) 2236 v1.AddArg(y) 2237 v.AddArg(v1) 2238 return true 2239 } 2240 } 2241 func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool { 2242 b := v.Block 2243 _ = b 2244 // match: (Leq64F x y) 2245 // cond: 2246 // result: (FPFlagTrue (CMPGED y x)) 2247 for { 2248 _ = v.Args[1] 2249 x := v.Args[0] 2250 y := v.Args[1] 2251 v.reset(OpMIPS64FPFlagTrue) 2252 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags) 2253 v0.AddArg(y) 2254 v0.AddArg(x) 2255 v.AddArg(v0) 2256 return true 2257 } 2258 } 2259 func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool { 2260 b := v.Block 2261 _ = b 2262 typ := &b.Func.Config.Types 2263 _ = typ 2264 // match: (Leq64U x y) 2265 // cond: 2266 // result: (XOR (MOVVconst [1]) (SGTU x y)) 2267 for { 2268 _ = v.Args[1] 2269 x := v.Args[0] 2270 y := v.Args[1] 2271 v.reset(OpMIPS64XOR) 2272 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2273 v0.AuxInt = 1 2274 v.AddArg(v0) 2275 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2276 v1.AddArg(x) 2277 v1.AddArg(y) 2278 v.AddArg(v1) 2279 return true 2280 } 2281 } 2282 func rewriteValueMIPS64_OpLeq8_0(v *Value) bool { 2283 b := v.Block 2284 _ = b 2285 typ := &b.Func.Config.Types 2286 _ = typ 2287 // match: (Leq8 x y) 2288 // cond: 2289 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y))) 2290 for { 2291 _ = v.Args[1] 2292 x := v.Args[0] 2293 y := v.Args[1] 2294 v.reset(OpMIPS64XOR) 2295 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2296 v0.AuxInt = 1 2297 v.AddArg(v0) 2298 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2299 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2300 v2.AddArg(x) 2301 v1.AddArg(v2) 2302 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2303 v3.AddArg(y) 2304 v1.AddArg(v3) 2305 v.AddArg(v1) 2306 return true 2307 } 2308 } 2309 func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool { 2310 b := v.Block 2311 _ = b 2312 typ := &b.Func.Config.Types 2313 _ = typ 2314 // match: (Leq8U x y) 2315 // cond: 2316 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))) 2317 for { 2318 _ = v.Args[1] 2319 x := v.Args[0] 2320 y := v.Args[1] 2321 v.reset(OpMIPS64XOR) 2322 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2323 v0.AuxInt = 1 2324 v.AddArg(v0) 2325 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2326 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2327 v2.AddArg(x) 2328 v1.AddArg(v2) 2329 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2330 v3.AddArg(y) 2331 v1.AddArg(v3) 2332 v.AddArg(v1) 2333 return true 2334 } 2335 } 2336 func rewriteValueMIPS64_OpLess16_0(v *Value) bool { 2337 b := v.Block 2338 _ = b 2339 typ := &b.Func.Config.Types 2340 _ = typ 2341 // match: (Less16 x y) 2342 // cond: 2343 // result: (SGT (SignExt16to64 y) (SignExt16to64 x)) 2344 for { 2345 _ = v.Args[1] 2346 x := v.Args[0] 2347 y := v.Args[1] 2348 v.reset(OpMIPS64SGT) 2349 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2350 v0.AddArg(y) 2351 v.AddArg(v0) 2352 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2353 v1.AddArg(x) 2354 v.AddArg(v1) 2355 return true 2356 } 2357 } 2358 func rewriteValueMIPS64_OpLess16U_0(v *Value) bool { 2359 b := v.Block 2360 _ = b 2361 typ := &b.Func.Config.Types 2362 _ = typ 2363 // match: (Less16U x y) 2364 // cond: 2365 // result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)) 2366 for { 2367 _ = v.Args[1] 2368 x := v.Args[0] 2369 y := v.Args[1] 2370 v.reset(OpMIPS64SGTU) 2371 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2372 v0.AddArg(y) 2373 v.AddArg(v0) 2374 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2375 v1.AddArg(x) 2376 v.AddArg(v1) 2377 return true 2378 } 2379 } 2380 func rewriteValueMIPS64_OpLess32_0(v *Value) bool { 2381 b := v.Block 2382 _ = b 2383 typ := &b.Func.Config.Types 2384 _ = typ 2385 // match: (Less32 x y) 2386 // cond: 2387 // result: (SGT (SignExt32to64 y) (SignExt32to64 x)) 2388 for { 2389 _ = v.Args[1] 2390 x := v.Args[0] 2391 y := v.Args[1] 2392 v.reset(OpMIPS64SGT) 2393 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2394 v0.AddArg(y) 2395 v.AddArg(v0) 2396 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2397 v1.AddArg(x) 2398 v.AddArg(v1) 2399 return true 2400 } 2401 } 2402 func rewriteValueMIPS64_OpLess32F_0(v *Value) bool { 2403 b := v.Block 2404 _ = b 2405 // match: (Less32F x y) 2406 // cond: 2407 // result: (FPFlagTrue (CMPGTF y x)) 2408 for { 2409 _ = v.Args[1] 2410 x := v.Args[0] 2411 y := v.Args[1] 2412 v.reset(OpMIPS64FPFlagTrue) 2413 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags) 2414 v0.AddArg(y) 2415 v0.AddArg(x) 2416 v.AddArg(v0) 2417 return true 2418 } 2419 } 2420 func rewriteValueMIPS64_OpLess32U_0(v *Value) bool { 2421 b := v.Block 2422 _ = b 2423 typ := &b.Func.Config.Types 2424 _ = typ 2425 // match: (Less32U x y) 2426 // cond: 2427 // result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)) 2428 for { 2429 _ = v.Args[1] 2430 x := v.Args[0] 2431 y := v.Args[1] 2432 v.reset(OpMIPS64SGTU) 2433 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2434 v0.AddArg(y) 2435 v.AddArg(v0) 2436 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2437 v1.AddArg(x) 2438 v.AddArg(v1) 2439 return true 2440 } 2441 } 2442 func rewriteValueMIPS64_OpLess64_0(v *Value) bool { 2443 // match: (Less64 x y) 2444 // cond: 2445 // result: (SGT y x) 2446 for { 2447 _ = v.Args[1] 2448 x := v.Args[0] 2449 y := v.Args[1] 2450 v.reset(OpMIPS64SGT) 2451 v.AddArg(y) 2452 v.AddArg(x) 2453 return true 2454 } 2455 } 2456 func rewriteValueMIPS64_OpLess64F_0(v *Value) bool { 2457 b := v.Block 2458 _ = b 2459 // match: (Less64F x y) 2460 // cond: 2461 // result: (FPFlagTrue (CMPGTD y x)) 2462 for { 2463 _ = v.Args[1] 2464 x := v.Args[0] 2465 y := v.Args[1] 2466 v.reset(OpMIPS64FPFlagTrue) 2467 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags) 2468 v0.AddArg(y) 2469 v0.AddArg(x) 2470 v.AddArg(v0) 2471 return true 2472 } 2473 } 2474 func rewriteValueMIPS64_OpLess64U_0(v *Value) bool { 2475 // match: (Less64U x y) 2476 // cond: 2477 // result: (SGTU y x) 2478 for { 2479 _ = v.Args[1] 2480 x := v.Args[0] 2481 y := v.Args[1] 2482 v.reset(OpMIPS64SGTU) 2483 v.AddArg(y) 2484 v.AddArg(x) 2485 return true 2486 } 2487 } 2488 func rewriteValueMIPS64_OpLess8_0(v *Value) bool { 2489 b := v.Block 2490 _ = b 2491 typ := &b.Func.Config.Types 2492 _ = typ 2493 // match: (Less8 x y) 2494 // cond: 2495 // result: (SGT (SignExt8to64 y) (SignExt8to64 x)) 2496 for { 2497 _ = v.Args[1] 2498 x := v.Args[0] 2499 y := v.Args[1] 2500 v.reset(OpMIPS64SGT) 2501 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2502 v0.AddArg(y) 2503 v.AddArg(v0) 2504 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2505 v1.AddArg(x) 2506 v.AddArg(v1) 2507 return true 2508 } 2509 } 2510 func rewriteValueMIPS64_OpLess8U_0(v *Value) bool { 2511 b := v.Block 2512 _ = b 2513 typ := &b.Func.Config.Types 2514 _ = typ 2515 // match: (Less8U x y) 2516 // cond: 2517 // result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)) 2518 for { 2519 _ = v.Args[1] 2520 x := v.Args[0] 2521 y := v.Args[1] 2522 v.reset(OpMIPS64SGTU) 2523 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2524 v0.AddArg(y) 2525 v.AddArg(v0) 2526 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2527 v1.AddArg(x) 2528 v.AddArg(v1) 2529 return true 2530 } 2531 } 2532 func rewriteValueMIPS64_OpLoad_0(v *Value) bool { 2533 // match: (Load <t> ptr mem) 2534 // cond: t.IsBoolean() 2535 // result: (MOVBUload ptr mem) 2536 for { 2537 t := v.Type 2538 _ = v.Args[1] 2539 ptr := v.Args[0] 2540 mem := v.Args[1] 2541 if !(t.IsBoolean()) { 2542 break 2543 } 2544 v.reset(OpMIPS64MOVBUload) 2545 v.AddArg(ptr) 2546 v.AddArg(mem) 2547 return true 2548 } 2549 // match: (Load <t> ptr mem) 2550 // cond: (is8BitInt(t) && isSigned(t)) 2551 // result: (MOVBload ptr mem) 2552 for { 2553 t := v.Type 2554 _ = v.Args[1] 2555 ptr := v.Args[0] 2556 mem := v.Args[1] 2557 if !(is8BitInt(t) && isSigned(t)) { 2558 break 2559 } 2560 v.reset(OpMIPS64MOVBload) 2561 v.AddArg(ptr) 2562 v.AddArg(mem) 2563 return true 2564 } 2565 // match: (Load <t> ptr mem) 2566 // cond: (is8BitInt(t) && !isSigned(t)) 2567 // result: (MOVBUload ptr mem) 2568 for { 2569 t := v.Type 2570 _ = v.Args[1] 2571 ptr := v.Args[0] 2572 mem := v.Args[1] 2573 if !(is8BitInt(t) && !isSigned(t)) { 2574 break 2575 } 2576 v.reset(OpMIPS64MOVBUload) 2577 v.AddArg(ptr) 2578 v.AddArg(mem) 2579 return true 2580 } 2581 // match: (Load <t> ptr mem) 2582 // cond: (is16BitInt(t) && isSigned(t)) 2583 // result: (MOVHload ptr mem) 2584 for { 2585 t := v.Type 2586 _ = v.Args[1] 2587 ptr := v.Args[0] 2588 mem := v.Args[1] 2589 if !(is16BitInt(t) && isSigned(t)) { 2590 break 2591 } 2592 v.reset(OpMIPS64MOVHload) 2593 v.AddArg(ptr) 2594 v.AddArg(mem) 2595 return true 2596 } 2597 // match: (Load <t> ptr mem) 2598 // cond: (is16BitInt(t) && !isSigned(t)) 2599 // result: (MOVHUload ptr mem) 2600 for { 2601 t := v.Type 2602 _ = v.Args[1] 2603 ptr := v.Args[0] 2604 mem := v.Args[1] 2605 if !(is16BitInt(t) && !isSigned(t)) { 2606 break 2607 } 2608 v.reset(OpMIPS64MOVHUload) 2609 v.AddArg(ptr) 2610 v.AddArg(mem) 2611 return true 2612 } 2613 // match: (Load <t> ptr mem) 2614 // cond: (is32BitInt(t) && isSigned(t)) 2615 // result: (MOVWload ptr mem) 2616 for { 2617 t := v.Type 2618 _ = v.Args[1] 2619 ptr := v.Args[0] 2620 mem := v.Args[1] 2621 if !(is32BitInt(t) && isSigned(t)) { 2622 break 2623 } 2624 v.reset(OpMIPS64MOVWload) 2625 v.AddArg(ptr) 2626 v.AddArg(mem) 2627 return true 2628 } 2629 // match: (Load <t> ptr mem) 2630 // cond: (is32BitInt(t) && !isSigned(t)) 2631 // result: (MOVWUload ptr mem) 2632 for { 2633 t := v.Type 2634 _ = v.Args[1] 2635 ptr := v.Args[0] 2636 mem := v.Args[1] 2637 if !(is32BitInt(t) && !isSigned(t)) { 2638 break 2639 } 2640 v.reset(OpMIPS64MOVWUload) 2641 v.AddArg(ptr) 2642 v.AddArg(mem) 2643 return true 2644 } 2645 // match: (Load <t> ptr mem) 2646 // cond: (is64BitInt(t) || isPtr(t)) 2647 // result: (MOVVload ptr mem) 2648 for { 2649 t := v.Type 2650 _ = v.Args[1] 2651 ptr := v.Args[0] 2652 mem := v.Args[1] 2653 if !(is64BitInt(t) || isPtr(t)) { 2654 break 2655 } 2656 v.reset(OpMIPS64MOVVload) 2657 v.AddArg(ptr) 2658 v.AddArg(mem) 2659 return true 2660 } 2661 // match: (Load <t> ptr mem) 2662 // cond: is32BitFloat(t) 2663 // result: (MOVFload ptr mem) 2664 for { 2665 t := v.Type 2666 _ = v.Args[1] 2667 ptr := v.Args[0] 2668 mem := v.Args[1] 2669 if !(is32BitFloat(t)) { 2670 break 2671 } 2672 v.reset(OpMIPS64MOVFload) 2673 v.AddArg(ptr) 2674 v.AddArg(mem) 2675 return true 2676 } 2677 // match: (Load <t> ptr mem) 2678 // cond: is64BitFloat(t) 2679 // result: (MOVDload ptr mem) 2680 for { 2681 t := v.Type 2682 _ = v.Args[1] 2683 ptr := v.Args[0] 2684 mem := v.Args[1] 2685 if !(is64BitFloat(t)) { 2686 break 2687 } 2688 v.reset(OpMIPS64MOVDload) 2689 v.AddArg(ptr) 2690 v.AddArg(mem) 2691 return true 2692 } 2693 return false 2694 } 2695 func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool { 2696 b := v.Block 2697 _ = b 2698 typ := &b.Func.Config.Types 2699 _ = typ 2700 // match: (Lsh16x16 <t> x y) 2701 // cond: 2702 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 2703 for { 2704 t := v.Type 2705 _ = v.Args[1] 2706 x := v.Args[0] 2707 y := v.Args[1] 2708 v.reset(OpMIPS64AND) 2709 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2710 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2711 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2712 v2.AuxInt = 64 2713 v1.AddArg(v2) 2714 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2715 v3.AddArg(y) 2716 v1.AddArg(v3) 2717 v0.AddArg(v1) 2718 v.AddArg(v0) 2719 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2720 v4.AddArg(x) 2721 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2722 v5.AddArg(y) 2723 v4.AddArg(v5) 2724 v.AddArg(v4) 2725 return true 2726 } 2727 } 2728 func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool { 2729 b := v.Block 2730 _ = b 2731 typ := &b.Func.Config.Types 2732 _ = typ 2733 // match: (Lsh16x32 <t> x y) 2734 // cond: 2735 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 2736 for { 2737 t := v.Type 2738 _ = v.Args[1] 2739 x := v.Args[0] 2740 y := v.Args[1] 2741 v.reset(OpMIPS64AND) 2742 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2743 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2744 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2745 v2.AuxInt = 64 2746 v1.AddArg(v2) 2747 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2748 v3.AddArg(y) 2749 v1.AddArg(v3) 2750 v0.AddArg(v1) 2751 v.AddArg(v0) 2752 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2753 v4.AddArg(x) 2754 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2755 v5.AddArg(y) 2756 v4.AddArg(v5) 2757 v.AddArg(v4) 2758 return true 2759 } 2760 } 2761 func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool { 2762 b := v.Block 2763 _ = b 2764 typ := &b.Func.Config.Types 2765 _ = typ 2766 // match: (Lsh16x64 <t> x y) 2767 // cond: 2768 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y)) 2769 for { 2770 t := v.Type 2771 _ = v.Args[1] 2772 x := v.Args[0] 2773 y := v.Args[1] 2774 v.reset(OpMIPS64AND) 2775 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2776 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2777 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2778 v2.AuxInt = 64 2779 v1.AddArg(v2) 2780 v1.AddArg(y) 2781 v0.AddArg(v1) 2782 v.AddArg(v0) 2783 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2784 v3.AddArg(x) 2785 v3.AddArg(y) 2786 v.AddArg(v3) 2787 return true 2788 } 2789 } 2790 func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool { 2791 b := v.Block 2792 _ = b 2793 typ := &b.Func.Config.Types 2794 _ = typ 2795 // match: (Lsh16x8 <t> x y) 2796 // cond: 2797 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 2798 for { 2799 t := v.Type 2800 _ = v.Args[1] 2801 x := v.Args[0] 2802 y := v.Args[1] 2803 v.reset(OpMIPS64AND) 2804 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2805 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2806 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2807 v2.AuxInt = 64 2808 v1.AddArg(v2) 2809 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2810 v3.AddArg(y) 2811 v1.AddArg(v3) 2812 v0.AddArg(v1) 2813 v.AddArg(v0) 2814 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2815 v4.AddArg(x) 2816 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2817 v5.AddArg(y) 2818 v4.AddArg(v5) 2819 v.AddArg(v4) 2820 return true 2821 } 2822 } 2823 func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool { 2824 b := v.Block 2825 _ = b 2826 typ := &b.Func.Config.Types 2827 _ = typ 2828 // match: (Lsh32x16 <t> x y) 2829 // cond: 2830 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 2831 for { 2832 t := v.Type 2833 _ = v.Args[1] 2834 x := v.Args[0] 2835 y := v.Args[1] 2836 v.reset(OpMIPS64AND) 2837 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2838 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2839 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2840 v2.AuxInt = 64 2841 v1.AddArg(v2) 2842 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2843 v3.AddArg(y) 2844 v1.AddArg(v3) 2845 v0.AddArg(v1) 2846 v.AddArg(v0) 2847 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2848 v4.AddArg(x) 2849 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2850 v5.AddArg(y) 2851 v4.AddArg(v5) 2852 v.AddArg(v4) 2853 return true 2854 } 2855 } 2856 func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool { 2857 b := v.Block 2858 _ = b 2859 typ := &b.Func.Config.Types 2860 _ = typ 2861 // match: (Lsh32x32 <t> x y) 2862 // cond: 2863 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 2864 for { 2865 t := v.Type 2866 _ = v.Args[1] 2867 x := v.Args[0] 2868 y := v.Args[1] 2869 v.reset(OpMIPS64AND) 2870 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2871 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2872 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2873 v2.AuxInt = 64 2874 v1.AddArg(v2) 2875 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2876 v3.AddArg(y) 2877 v1.AddArg(v3) 2878 v0.AddArg(v1) 2879 v.AddArg(v0) 2880 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2881 v4.AddArg(x) 2882 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2883 v5.AddArg(y) 2884 v4.AddArg(v5) 2885 v.AddArg(v4) 2886 return true 2887 } 2888 } 2889 func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool { 2890 b := v.Block 2891 _ = b 2892 typ := &b.Func.Config.Types 2893 _ = typ 2894 // match: (Lsh32x64 <t> x y) 2895 // cond: 2896 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y)) 2897 for { 2898 t := v.Type 2899 _ = v.Args[1] 2900 x := v.Args[0] 2901 y := v.Args[1] 2902 v.reset(OpMIPS64AND) 2903 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2904 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2905 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2906 v2.AuxInt = 64 2907 v1.AddArg(v2) 2908 v1.AddArg(y) 2909 v0.AddArg(v1) 2910 v.AddArg(v0) 2911 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2912 v3.AddArg(x) 2913 v3.AddArg(y) 2914 v.AddArg(v3) 2915 return true 2916 } 2917 } 2918 func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool { 2919 b := v.Block 2920 _ = b 2921 typ := &b.Func.Config.Types 2922 _ = typ 2923 // match: (Lsh32x8 <t> x y) 2924 // cond: 2925 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 2926 for { 2927 t := v.Type 2928 _ = v.Args[1] 2929 x := v.Args[0] 2930 y := v.Args[1] 2931 v.reset(OpMIPS64AND) 2932 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2933 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2934 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2935 v2.AuxInt = 64 2936 v1.AddArg(v2) 2937 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2938 v3.AddArg(y) 2939 v1.AddArg(v3) 2940 v0.AddArg(v1) 2941 v.AddArg(v0) 2942 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2943 v4.AddArg(x) 2944 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2945 v5.AddArg(y) 2946 v4.AddArg(v5) 2947 v.AddArg(v4) 2948 return true 2949 } 2950 } 2951 func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool { 2952 b := v.Block 2953 _ = b 2954 typ := &b.Func.Config.Types 2955 _ = typ 2956 // match: (Lsh64x16 <t> x y) 2957 // cond: 2958 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 2959 for { 2960 t := v.Type 2961 _ = v.Args[1] 2962 x := v.Args[0] 2963 y := v.Args[1] 2964 v.reset(OpMIPS64AND) 2965 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2966 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2967 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 2968 v2.AuxInt = 64 2969 v1.AddArg(v2) 2970 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2971 v3.AddArg(y) 2972 v1.AddArg(v3) 2973 v0.AddArg(v1) 2974 v.AddArg(v0) 2975 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2976 v4.AddArg(x) 2977 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2978 v5.AddArg(y) 2979 v4.AddArg(v5) 2980 v.AddArg(v4) 2981 return true 2982 } 2983 } 2984 func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool { 2985 b := v.Block 2986 _ = b 2987 typ := &b.Func.Config.Types 2988 _ = typ 2989 // match: (Lsh64x32 <t> x y) 2990 // cond: 2991 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 2992 for { 2993 t := v.Type 2994 _ = v.Args[1] 2995 x := v.Args[0] 2996 y := v.Args[1] 2997 v.reset(OpMIPS64AND) 2998 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2999 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3000 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3001 v2.AuxInt = 64 3002 v1.AddArg(v2) 3003 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3004 v3.AddArg(y) 3005 v1.AddArg(v3) 3006 v0.AddArg(v1) 3007 v.AddArg(v0) 3008 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3009 v4.AddArg(x) 3010 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3011 v5.AddArg(y) 3012 v4.AddArg(v5) 3013 v.AddArg(v4) 3014 return true 3015 } 3016 } 3017 func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool { 3018 b := v.Block 3019 _ = b 3020 typ := &b.Func.Config.Types 3021 _ = typ 3022 // match: (Lsh64x64 <t> x y) 3023 // cond: 3024 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3025 for { 3026 t := v.Type 3027 _ = v.Args[1] 3028 x := v.Args[0] 3029 y := v.Args[1] 3030 v.reset(OpMIPS64AND) 3031 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3032 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3033 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3034 v2.AuxInt = 64 3035 v1.AddArg(v2) 3036 v1.AddArg(y) 3037 v0.AddArg(v1) 3038 v.AddArg(v0) 3039 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3040 v3.AddArg(x) 3041 v3.AddArg(y) 3042 v.AddArg(v3) 3043 return true 3044 } 3045 } 3046 func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool { 3047 b := v.Block 3048 _ = b 3049 typ := &b.Func.Config.Types 3050 _ = typ 3051 // match: (Lsh64x8 <t> x y) 3052 // cond: 3053 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3054 for { 3055 t := v.Type 3056 _ = v.Args[1] 3057 x := v.Args[0] 3058 y := v.Args[1] 3059 v.reset(OpMIPS64AND) 3060 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3061 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3062 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3063 v2.AuxInt = 64 3064 v1.AddArg(v2) 3065 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3066 v3.AddArg(y) 3067 v1.AddArg(v3) 3068 v0.AddArg(v1) 3069 v.AddArg(v0) 3070 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3071 v4.AddArg(x) 3072 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3073 v5.AddArg(y) 3074 v4.AddArg(v5) 3075 v.AddArg(v4) 3076 return true 3077 } 3078 } 3079 func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool { 3080 b := v.Block 3081 _ = b 3082 typ := &b.Func.Config.Types 3083 _ = typ 3084 // match: (Lsh8x16 <t> x y) 3085 // cond: 3086 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3087 for { 3088 t := v.Type 3089 _ = v.Args[1] 3090 x := v.Args[0] 3091 y := v.Args[1] 3092 v.reset(OpMIPS64AND) 3093 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3094 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3095 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3096 v2.AuxInt = 64 3097 v1.AddArg(v2) 3098 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3099 v3.AddArg(y) 3100 v1.AddArg(v3) 3101 v0.AddArg(v1) 3102 v.AddArg(v0) 3103 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3104 v4.AddArg(x) 3105 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3106 v5.AddArg(y) 3107 v4.AddArg(v5) 3108 v.AddArg(v4) 3109 return true 3110 } 3111 } 3112 func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool { 3113 b := v.Block 3114 _ = b 3115 typ := &b.Func.Config.Types 3116 _ = typ 3117 // match: (Lsh8x32 <t> x y) 3118 // cond: 3119 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3120 for { 3121 t := v.Type 3122 _ = v.Args[1] 3123 x := v.Args[0] 3124 y := v.Args[1] 3125 v.reset(OpMIPS64AND) 3126 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3127 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3128 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3129 v2.AuxInt = 64 3130 v1.AddArg(v2) 3131 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3132 v3.AddArg(y) 3133 v1.AddArg(v3) 3134 v0.AddArg(v1) 3135 v.AddArg(v0) 3136 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3137 v4.AddArg(x) 3138 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3139 v5.AddArg(y) 3140 v4.AddArg(v5) 3141 v.AddArg(v4) 3142 return true 3143 } 3144 } 3145 func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool { 3146 b := v.Block 3147 _ = b 3148 typ := &b.Func.Config.Types 3149 _ = typ 3150 // match: (Lsh8x64 <t> x y) 3151 // cond: 3152 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3153 for { 3154 t := v.Type 3155 _ = v.Args[1] 3156 x := v.Args[0] 3157 y := v.Args[1] 3158 v.reset(OpMIPS64AND) 3159 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3160 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3161 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3162 v2.AuxInt = 64 3163 v1.AddArg(v2) 3164 v1.AddArg(y) 3165 v0.AddArg(v1) 3166 v.AddArg(v0) 3167 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3168 v3.AddArg(x) 3169 v3.AddArg(y) 3170 v.AddArg(v3) 3171 return true 3172 } 3173 } 3174 func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool { 3175 b := v.Block 3176 _ = b 3177 typ := &b.Func.Config.Types 3178 _ = typ 3179 // match: (Lsh8x8 <t> x y) 3180 // cond: 3181 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3182 for { 3183 t := v.Type 3184 _ = v.Args[1] 3185 x := v.Args[0] 3186 y := v.Args[1] 3187 v.reset(OpMIPS64AND) 3188 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3189 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3190 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 3191 v2.AuxInt = 64 3192 v1.AddArg(v2) 3193 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3194 v3.AddArg(y) 3195 v1.AddArg(v3) 3196 v0.AddArg(v1) 3197 v.AddArg(v0) 3198 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3199 v4.AddArg(x) 3200 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3201 v5.AddArg(y) 3202 v4.AddArg(v5) 3203 v.AddArg(v4) 3204 return true 3205 } 3206 } 3207 func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool { 3208 // match: (ADDV x (MOVVconst [c])) 3209 // cond: is32Bit(c) 3210 // result: (ADDVconst [c] x) 3211 for { 3212 _ = v.Args[1] 3213 x := v.Args[0] 3214 v_1 := v.Args[1] 3215 if v_1.Op != OpMIPS64MOVVconst { 3216 break 3217 } 3218 c := v_1.AuxInt 3219 if !(is32Bit(c)) { 3220 break 3221 } 3222 v.reset(OpMIPS64ADDVconst) 3223 v.AuxInt = c 3224 v.AddArg(x) 3225 return true 3226 } 3227 // match: (ADDV (MOVVconst [c]) x) 3228 // cond: is32Bit(c) 3229 // result: (ADDVconst [c] x) 3230 for { 3231 _ = v.Args[1] 3232 v_0 := v.Args[0] 3233 if v_0.Op != OpMIPS64MOVVconst { 3234 break 3235 } 3236 c := v_0.AuxInt 3237 x := v.Args[1] 3238 if !(is32Bit(c)) { 3239 break 3240 } 3241 v.reset(OpMIPS64ADDVconst) 3242 v.AuxInt = c 3243 v.AddArg(x) 3244 return true 3245 } 3246 // match: (ADDV x (NEGV y)) 3247 // cond: 3248 // result: (SUBV x y) 3249 for { 3250 _ = v.Args[1] 3251 x := v.Args[0] 3252 v_1 := v.Args[1] 3253 if v_1.Op != OpMIPS64NEGV { 3254 break 3255 } 3256 y := v_1.Args[0] 3257 v.reset(OpMIPS64SUBV) 3258 v.AddArg(x) 3259 v.AddArg(y) 3260 return true 3261 } 3262 // match: (ADDV (NEGV y) x) 3263 // cond: 3264 // result: (SUBV x y) 3265 for { 3266 _ = v.Args[1] 3267 v_0 := v.Args[0] 3268 if v_0.Op != OpMIPS64NEGV { 3269 break 3270 } 3271 y := v_0.Args[0] 3272 x := v.Args[1] 3273 v.reset(OpMIPS64SUBV) 3274 v.AddArg(x) 3275 v.AddArg(y) 3276 return true 3277 } 3278 return false 3279 } 3280 func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool { 3281 // match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr)) 3282 // cond: 3283 // result: (MOVVaddr [off1+off2] {sym} ptr) 3284 for { 3285 off1 := v.AuxInt 3286 v_0 := v.Args[0] 3287 if v_0.Op != OpMIPS64MOVVaddr { 3288 break 3289 } 3290 off2 := v_0.AuxInt 3291 sym := v_0.Aux 3292 ptr := v_0.Args[0] 3293 v.reset(OpMIPS64MOVVaddr) 3294 v.AuxInt = off1 + off2 3295 v.Aux = sym 3296 v.AddArg(ptr) 3297 return true 3298 } 3299 // match: (ADDVconst [0] x) 3300 // cond: 3301 // result: x 3302 for { 3303 if v.AuxInt != 0 { 3304 break 3305 } 3306 x := v.Args[0] 3307 v.reset(OpCopy) 3308 v.Type = x.Type 3309 v.AddArg(x) 3310 return true 3311 } 3312 // match: (ADDVconst [c] (MOVVconst [d])) 3313 // cond: 3314 // result: (MOVVconst [c+d]) 3315 for { 3316 c := v.AuxInt 3317 v_0 := v.Args[0] 3318 if v_0.Op != OpMIPS64MOVVconst { 3319 break 3320 } 3321 d := v_0.AuxInt 3322 v.reset(OpMIPS64MOVVconst) 3323 v.AuxInt = c + d 3324 return true 3325 } 3326 // match: (ADDVconst [c] (ADDVconst [d] x)) 3327 // cond: is32Bit(c+d) 3328 // result: (ADDVconst [c+d] x) 3329 for { 3330 c := v.AuxInt 3331 v_0 := v.Args[0] 3332 if v_0.Op != OpMIPS64ADDVconst { 3333 break 3334 } 3335 d := v_0.AuxInt 3336 x := v_0.Args[0] 3337 if !(is32Bit(c + d)) { 3338 break 3339 } 3340 v.reset(OpMIPS64ADDVconst) 3341 v.AuxInt = c + d 3342 v.AddArg(x) 3343 return true 3344 } 3345 // match: (ADDVconst [c] (SUBVconst [d] x)) 3346 // cond: is32Bit(c-d) 3347 // result: (ADDVconst [c-d] x) 3348 for { 3349 c := v.AuxInt 3350 v_0 := v.Args[0] 3351 if v_0.Op != OpMIPS64SUBVconst { 3352 break 3353 } 3354 d := v_0.AuxInt 3355 x := v_0.Args[0] 3356 if !(is32Bit(c - d)) { 3357 break 3358 } 3359 v.reset(OpMIPS64ADDVconst) 3360 v.AuxInt = c - d 3361 v.AddArg(x) 3362 return true 3363 } 3364 return false 3365 } 3366 func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool { 3367 // match: (AND x (MOVVconst [c])) 3368 // cond: is32Bit(c) 3369 // result: (ANDconst [c] x) 3370 for { 3371 _ = v.Args[1] 3372 x := v.Args[0] 3373 v_1 := v.Args[1] 3374 if v_1.Op != OpMIPS64MOVVconst { 3375 break 3376 } 3377 c := v_1.AuxInt 3378 if !(is32Bit(c)) { 3379 break 3380 } 3381 v.reset(OpMIPS64ANDconst) 3382 v.AuxInt = c 3383 v.AddArg(x) 3384 return true 3385 } 3386 // match: (AND (MOVVconst [c]) x) 3387 // cond: is32Bit(c) 3388 // result: (ANDconst [c] x) 3389 for { 3390 _ = v.Args[1] 3391 v_0 := v.Args[0] 3392 if v_0.Op != OpMIPS64MOVVconst { 3393 break 3394 } 3395 c := v_0.AuxInt 3396 x := v.Args[1] 3397 if !(is32Bit(c)) { 3398 break 3399 } 3400 v.reset(OpMIPS64ANDconst) 3401 v.AuxInt = c 3402 v.AddArg(x) 3403 return true 3404 } 3405 // match: (AND x x) 3406 // cond: 3407 // result: x 3408 for { 3409 _ = v.Args[1] 3410 x := v.Args[0] 3411 if x != v.Args[1] { 3412 break 3413 } 3414 v.reset(OpCopy) 3415 v.Type = x.Type 3416 v.AddArg(x) 3417 return true 3418 } 3419 return false 3420 } 3421 func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool { 3422 // match: (ANDconst [0] _) 3423 // cond: 3424 // result: (MOVVconst [0]) 3425 for { 3426 if v.AuxInt != 0 { 3427 break 3428 } 3429 v.reset(OpMIPS64MOVVconst) 3430 v.AuxInt = 0 3431 return true 3432 } 3433 // match: (ANDconst [-1] x) 3434 // cond: 3435 // result: x 3436 for { 3437 if v.AuxInt != -1 { 3438 break 3439 } 3440 x := v.Args[0] 3441 v.reset(OpCopy) 3442 v.Type = x.Type 3443 v.AddArg(x) 3444 return true 3445 } 3446 // match: (ANDconst [c] (MOVVconst [d])) 3447 // cond: 3448 // result: (MOVVconst [c&d]) 3449 for { 3450 c := v.AuxInt 3451 v_0 := v.Args[0] 3452 if v_0.Op != OpMIPS64MOVVconst { 3453 break 3454 } 3455 d := v_0.AuxInt 3456 v.reset(OpMIPS64MOVVconst) 3457 v.AuxInt = c & d 3458 return true 3459 } 3460 // match: (ANDconst [c] (ANDconst [d] x)) 3461 // cond: 3462 // result: (ANDconst [c&d] x) 3463 for { 3464 c := v.AuxInt 3465 v_0 := v.Args[0] 3466 if v_0.Op != OpMIPS64ANDconst { 3467 break 3468 } 3469 d := v_0.AuxInt 3470 x := v_0.Args[0] 3471 v.reset(OpMIPS64ANDconst) 3472 v.AuxInt = c & d 3473 v.AddArg(x) 3474 return true 3475 } 3476 return false 3477 } 3478 func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool { 3479 // match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem) 3480 // cond: is32Bit(off1+off2) 3481 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3482 for { 3483 off1 := v.AuxInt 3484 sym := v.Aux 3485 _ = v.Args[1] 3486 v_0 := v.Args[0] 3487 if v_0.Op != OpMIPS64ADDVconst { 3488 break 3489 } 3490 off2 := v_0.AuxInt 3491 ptr := v_0.Args[0] 3492 mem := v.Args[1] 3493 if !(is32Bit(off1 + off2)) { 3494 break 3495 } 3496 v.reset(OpMIPS64MOVBUload) 3497 v.AuxInt = off1 + off2 3498 v.Aux = sym 3499 v.AddArg(ptr) 3500 v.AddArg(mem) 3501 return true 3502 } 3503 // match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3504 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3505 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3506 for { 3507 off1 := v.AuxInt 3508 sym1 := v.Aux 3509 _ = v.Args[1] 3510 v_0 := v.Args[0] 3511 if v_0.Op != OpMIPS64MOVVaddr { 3512 break 3513 } 3514 off2 := v_0.AuxInt 3515 sym2 := v_0.Aux 3516 ptr := v_0.Args[0] 3517 mem := v.Args[1] 3518 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3519 break 3520 } 3521 v.reset(OpMIPS64MOVBUload) 3522 v.AuxInt = off1 + off2 3523 v.Aux = mergeSym(sym1, sym2) 3524 v.AddArg(ptr) 3525 v.AddArg(mem) 3526 return true 3527 } 3528 return false 3529 } 3530 func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool { 3531 // match: (MOVBUreg x:(MOVBUload _ _)) 3532 // cond: 3533 // result: (MOVVreg x) 3534 for { 3535 x := v.Args[0] 3536 if x.Op != OpMIPS64MOVBUload { 3537 break 3538 } 3539 _ = x.Args[1] 3540 v.reset(OpMIPS64MOVVreg) 3541 v.AddArg(x) 3542 return true 3543 } 3544 // match: (MOVBUreg x:(MOVBUreg _)) 3545 // cond: 3546 // result: (MOVVreg x) 3547 for { 3548 x := v.Args[0] 3549 if x.Op != OpMIPS64MOVBUreg { 3550 break 3551 } 3552 v.reset(OpMIPS64MOVVreg) 3553 v.AddArg(x) 3554 return true 3555 } 3556 // match: (MOVBUreg (MOVVconst [c])) 3557 // cond: 3558 // result: (MOVVconst [int64(uint8(c))]) 3559 for { 3560 v_0 := v.Args[0] 3561 if v_0.Op != OpMIPS64MOVVconst { 3562 break 3563 } 3564 c := v_0.AuxInt 3565 v.reset(OpMIPS64MOVVconst) 3566 v.AuxInt = int64(uint8(c)) 3567 return true 3568 } 3569 return false 3570 } 3571 func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool { 3572 // match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem) 3573 // cond: is32Bit(off1+off2) 3574 // result: (MOVBload [off1+off2] {sym} ptr mem) 3575 for { 3576 off1 := v.AuxInt 3577 sym := v.Aux 3578 _ = v.Args[1] 3579 v_0 := v.Args[0] 3580 if v_0.Op != OpMIPS64ADDVconst { 3581 break 3582 } 3583 off2 := v_0.AuxInt 3584 ptr := v_0.Args[0] 3585 mem := v.Args[1] 3586 if !(is32Bit(off1 + off2)) { 3587 break 3588 } 3589 v.reset(OpMIPS64MOVBload) 3590 v.AuxInt = off1 + off2 3591 v.Aux = sym 3592 v.AddArg(ptr) 3593 v.AddArg(mem) 3594 return true 3595 } 3596 // match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3597 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3598 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3599 for { 3600 off1 := v.AuxInt 3601 sym1 := v.Aux 3602 _ = v.Args[1] 3603 v_0 := v.Args[0] 3604 if v_0.Op != OpMIPS64MOVVaddr { 3605 break 3606 } 3607 off2 := v_0.AuxInt 3608 sym2 := v_0.Aux 3609 ptr := v_0.Args[0] 3610 mem := v.Args[1] 3611 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3612 break 3613 } 3614 v.reset(OpMIPS64MOVBload) 3615 v.AuxInt = off1 + off2 3616 v.Aux = mergeSym(sym1, sym2) 3617 v.AddArg(ptr) 3618 v.AddArg(mem) 3619 return true 3620 } 3621 return false 3622 } 3623 func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool { 3624 // match: (MOVBreg x:(MOVBload _ _)) 3625 // cond: 3626 // result: (MOVVreg x) 3627 for { 3628 x := v.Args[0] 3629 if x.Op != OpMIPS64MOVBload { 3630 break 3631 } 3632 _ = x.Args[1] 3633 v.reset(OpMIPS64MOVVreg) 3634 v.AddArg(x) 3635 return true 3636 } 3637 // match: (MOVBreg x:(MOVBreg _)) 3638 // cond: 3639 // result: (MOVVreg x) 3640 for { 3641 x := v.Args[0] 3642 if x.Op != OpMIPS64MOVBreg { 3643 break 3644 } 3645 v.reset(OpMIPS64MOVVreg) 3646 v.AddArg(x) 3647 return true 3648 } 3649 // match: (MOVBreg (MOVVconst [c])) 3650 // cond: 3651 // result: (MOVVconst [int64(int8(c))]) 3652 for { 3653 v_0 := v.Args[0] 3654 if v_0.Op != OpMIPS64MOVVconst { 3655 break 3656 } 3657 c := v_0.AuxInt 3658 v.reset(OpMIPS64MOVVconst) 3659 v.AuxInt = int64(int8(c)) 3660 return true 3661 } 3662 return false 3663 } 3664 func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool { 3665 // match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 3666 // cond: is32Bit(off1+off2) 3667 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 3668 for { 3669 off1 := v.AuxInt 3670 sym := v.Aux 3671 _ = v.Args[2] 3672 v_0 := v.Args[0] 3673 if v_0.Op != OpMIPS64ADDVconst { 3674 break 3675 } 3676 off2 := v_0.AuxInt 3677 ptr := v_0.Args[0] 3678 val := v.Args[1] 3679 mem := v.Args[2] 3680 if !(is32Bit(off1 + off2)) { 3681 break 3682 } 3683 v.reset(OpMIPS64MOVBstore) 3684 v.AuxInt = off1 + off2 3685 v.Aux = sym 3686 v.AddArg(ptr) 3687 v.AddArg(val) 3688 v.AddArg(mem) 3689 return true 3690 } 3691 // match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 3692 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3693 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 3694 for { 3695 off1 := v.AuxInt 3696 sym1 := v.Aux 3697 _ = v.Args[2] 3698 v_0 := v.Args[0] 3699 if v_0.Op != OpMIPS64MOVVaddr { 3700 break 3701 } 3702 off2 := v_0.AuxInt 3703 sym2 := v_0.Aux 3704 ptr := v_0.Args[0] 3705 val := v.Args[1] 3706 mem := v.Args[2] 3707 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3708 break 3709 } 3710 v.reset(OpMIPS64MOVBstore) 3711 v.AuxInt = off1 + off2 3712 v.Aux = mergeSym(sym1, sym2) 3713 v.AddArg(ptr) 3714 v.AddArg(val) 3715 v.AddArg(mem) 3716 return true 3717 } 3718 // match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem) 3719 // cond: 3720 // result: (MOVBstorezero [off] {sym} ptr mem) 3721 for { 3722 off := v.AuxInt 3723 sym := v.Aux 3724 _ = v.Args[2] 3725 ptr := v.Args[0] 3726 v_1 := v.Args[1] 3727 if v_1.Op != OpMIPS64MOVVconst { 3728 break 3729 } 3730 if v_1.AuxInt != 0 { 3731 break 3732 } 3733 mem := v.Args[2] 3734 v.reset(OpMIPS64MOVBstorezero) 3735 v.AuxInt = off 3736 v.Aux = sym 3737 v.AddArg(ptr) 3738 v.AddArg(mem) 3739 return true 3740 } 3741 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 3742 // cond: 3743 // result: (MOVBstore [off] {sym} ptr x mem) 3744 for { 3745 off := v.AuxInt 3746 sym := v.Aux 3747 _ = v.Args[2] 3748 ptr := v.Args[0] 3749 v_1 := v.Args[1] 3750 if v_1.Op != OpMIPS64MOVBreg { 3751 break 3752 } 3753 x := v_1.Args[0] 3754 mem := v.Args[2] 3755 v.reset(OpMIPS64MOVBstore) 3756 v.AuxInt = off 3757 v.Aux = sym 3758 v.AddArg(ptr) 3759 v.AddArg(x) 3760 v.AddArg(mem) 3761 return true 3762 } 3763 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 3764 // cond: 3765 // result: (MOVBstore [off] {sym} ptr x mem) 3766 for { 3767 off := v.AuxInt 3768 sym := v.Aux 3769 _ = v.Args[2] 3770 ptr := v.Args[0] 3771 v_1 := v.Args[1] 3772 if v_1.Op != OpMIPS64MOVBUreg { 3773 break 3774 } 3775 x := v_1.Args[0] 3776 mem := v.Args[2] 3777 v.reset(OpMIPS64MOVBstore) 3778 v.AuxInt = off 3779 v.Aux = sym 3780 v.AddArg(ptr) 3781 v.AddArg(x) 3782 v.AddArg(mem) 3783 return true 3784 } 3785 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 3786 // cond: 3787 // result: (MOVBstore [off] {sym} ptr x mem) 3788 for { 3789 off := v.AuxInt 3790 sym := v.Aux 3791 _ = v.Args[2] 3792 ptr := v.Args[0] 3793 v_1 := v.Args[1] 3794 if v_1.Op != OpMIPS64MOVHreg { 3795 break 3796 } 3797 x := v_1.Args[0] 3798 mem := v.Args[2] 3799 v.reset(OpMIPS64MOVBstore) 3800 v.AuxInt = off 3801 v.Aux = sym 3802 v.AddArg(ptr) 3803 v.AddArg(x) 3804 v.AddArg(mem) 3805 return true 3806 } 3807 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 3808 // cond: 3809 // result: (MOVBstore [off] {sym} ptr x mem) 3810 for { 3811 off := v.AuxInt 3812 sym := v.Aux 3813 _ = v.Args[2] 3814 ptr := v.Args[0] 3815 v_1 := v.Args[1] 3816 if v_1.Op != OpMIPS64MOVHUreg { 3817 break 3818 } 3819 x := v_1.Args[0] 3820 mem := v.Args[2] 3821 v.reset(OpMIPS64MOVBstore) 3822 v.AuxInt = off 3823 v.Aux = sym 3824 v.AddArg(ptr) 3825 v.AddArg(x) 3826 v.AddArg(mem) 3827 return true 3828 } 3829 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 3830 // cond: 3831 // result: (MOVBstore [off] {sym} ptr x mem) 3832 for { 3833 off := v.AuxInt 3834 sym := v.Aux 3835 _ = v.Args[2] 3836 ptr := v.Args[0] 3837 v_1 := v.Args[1] 3838 if v_1.Op != OpMIPS64MOVWreg { 3839 break 3840 } 3841 x := v_1.Args[0] 3842 mem := v.Args[2] 3843 v.reset(OpMIPS64MOVBstore) 3844 v.AuxInt = off 3845 v.Aux = sym 3846 v.AddArg(ptr) 3847 v.AddArg(x) 3848 v.AddArg(mem) 3849 return true 3850 } 3851 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 3852 // cond: 3853 // result: (MOVBstore [off] {sym} ptr x mem) 3854 for { 3855 off := v.AuxInt 3856 sym := v.Aux 3857 _ = v.Args[2] 3858 ptr := v.Args[0] 3859 v_1 := v.Args[1] 3860 if v_1.Op != OpMIPS64MOVWUreg { 3861 break 3862 } 3863 x := v_1.Args[0] 3864 mem := v.Args[2] 3865 v.reset(OpMIPS64MOVBstore) 3866 v.AuxInt = off 3867 v.Aux = sym 3868 v.AddArg(ptr) 3869 v.AddArg(x) 3870 v.AddArg(mem) 3871 return true 3872 } 3873 return false 3874 } 3875 func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool { 3876 // match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 3877 // cond: is32Bit(off1+off2) 3878 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 3879 for { 3880 off1 := v.AuxInt 3881 sym := v.Aux 3882 _ = v.Args[1] 3883 v_0 := v.Args[0] 3884 if v_0.Op != OpMIPS64ADDVconst { 3885 break 3886 } 3887 off2 := v_0.AuxInt 3888 ptr := v_0.Args[0] 3889 mem := v.Args[1] 3890 if !(is32Bit(off1 + off2)) { 3891 break 3892 } 3893 v.reset(OpMIPS64MOVBstorezero) 3894 v.AuxInt = off1 + off2 3895 v.Aux = sym 3896 v.AddArg(ptr) 3897 v.AddArg(mem) 3898 return true 3899 } 3900 // match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3901 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3902 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3903 for { 3904 off1 := v.AuxInt 3905 sym1 := v.Aux 3906 _ = v.Args[1] 3907 v_0 := v.Args[0] 3908 if v_0.Op != OpMIPS64MOVVaddr { 3909 break 3910 } 3911 off2 := v_0.AuxInt 3912 sym2 := v_0.Aux 3913 ptr := v_0.Args[0] 3914 mem := v.Args[1] 3915 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3916 break 3917 } 3918 v.reset(OpMIPS64MOVBstorezero) 3919 v.AuxInt = off1 + off2 3920 v.Aux = mergeSym(sym1, sym2) 3921 v.AddArg(ptr) 3922 v.AddArg(mem) 3923 return true 3924 } 3925 return false 3926 } 3927 func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool { 3928 // match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem) 3929 // cond: is32Bit(off1+off2) 3930 // result: (MOVDload [off1+off2] {sym} ptr mem) 3931 for { 3932 off1 := v.AuxInt 3933 sym := v.Aux 3934 _ = v.Args[1] 3935 v_0 := v.Args[0] 3936 if v_0.Op != OpMIPS64ADDVconst { 3937 break 3938 } 3939 off2 := v_0.AuxInt 3940 ptr := v_0.Args[0] 3941 mem := v.Args[1] 3942 if !(is32Bit(off1 + off2)) { 3943 break 3944 } 3945 v.reset(OpMIPS64MOVDload) 3946 v.AuxInt = off1 + off2 3947 v.Aux = sym 3948 v.AddArg(ptr) 3949 v.AddArg(mem) 3950 return true 3951 } 3952 // match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3953 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3954 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3955 for { 3956 off1 := v.AuxInt 3957 sym1 := v.Aux 3958 _ = v.Args[1] 3959 v_0 := v.Args[0] 3960 if v_0.Op != OpMIPS64MOVVaddr { 3961 break 3962 } 3963 off2 := v_0.AuxInt 3964 sym2 := v_0.Aux 3965 ptr := v_0.Args[0] 3966 mem := v.Args[1] 3967 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3968 break 3969 } 3970 v.reset(OpMIPS64MOVDload) 3971 v.AuxInt = off1 + off2 3972 v.Aux = mergeSym(sym1, sym2) 3973 v.AddArg(ptr) 3974 v.AddArg(mem) 3975 return true 3976 } 3977 return false 3978 } 3979 func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool { 3980 // match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 3981 // cond: is32Bit(off1+off2) 3982 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 3983 for { 3984 off1 := v.AuxInt 3985 sym := v.Aux 3986 _ = v.Args[2] 3987 v_0 := v.Args[0] 3988 if v_0.Op != OpMIPS64ADDVconst { 3989 break 3990 } 3991 off2 := v_0.AuxInt 3992 ptr := v_0.Args[0] 3993 val := v.Args[1] 3994 mem := v.Args[2] 3995 if !(is32Bit(off1 + off2)) { 3996 break 3997 } 3998 v.reset(OpMIPS64MOVDstore) 3999 v.AuxInt = off1 + off2 4000 v.Aux = sym 4001 v.AddArg(ptr) 4002 v.AddArg(val) 4003 v.AddArg(mem) 4004 return true 4005 } 4006 // match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4007 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4008 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4009 for { 4010 off1 := v.AuxInt 4011 sym1 := v.Aux 4012 _ = v.Args[2] 4013 v_0 := v.Args[0] 4014 if v_0.Op != OpMIPS64MOVVaddr { 4015 break 4016 } 4017 off2 := v_0.AuxInt 4018 sym2 := v_0.Aux 4019 ptr := v_0.Args[0] 4020 val := v.Args[1] 4021 mem := v.Args[2] 4022 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4023 break 4024 } 4025 v.reset(OpMIPS64MOVDstore) 4026 v.AuxInt = off1 + off2 4027 v.Aux = mergeSym(sym1, sym2) 4028 v.AddArg(ptr) 4029 v.AddArg(val) 4030 v.AddArg(mem) 4031 return true 4032 } 4033 return false 4034 } 4035 func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool { 4036 // match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem) 4037 // cond: is32Bit(off1+off2) 4038 // result: (MOVFload [off1+off2] {sym} ptr mem) 4039 for { 4040 off1 := v.AuxInt 4041 sym := v.Aux 4042 _ = v.Args[1] 4043 v_0 := v.Args[0] 4044 if v_0.Op != OpMIPS64ADDVconst { 4045 break 4046 } 4047 off2 := v_0.AuxInt 4048 ptr := v_0.Args[0] 4049 mem := v.Args[1] 4050 if !(is32Bit(off1 + off2)) { 4051 break 4052 } 4053 v.reset(OpMIPS64MOVFload) 4054 v.AuxInt = off1 + off2 4055 v.Aux = sym 4056 v.AddArg(ptr) 4057 v.AddArg(mem) 4058 return true 4059 } 4060 // match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4061 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4062 // result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4063 for { 4064 off1 := v.AuxInt 4065 sym1 := v.Aux 4066 _ = v.Args[1] 4067 v_0 := v.Args[0] 4068 if v_0.Op != OpMIPS64MOVVaddr { 4069 break 4070 } 4071 off2 := v_0.AuxInt 4072 sym2 := v_0.Aux 4073 ptr := v_0.Args[0] 4074 mem := v.Args[1] 4075 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4076 break 4077 } 4078 v.reset(OpMIPS64MOVFload) 4079 v.AuxInt = off1 + off2 4080 v.Aux = mergeSym(sym1, sym2) 4081 v.AddArg(ptr) 4082 v.AddArg(mem) 4083 return true 4084 } 4085 return false 4086 } 4087 func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool { 4088 // match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4089 // cond: is32Bit(off1+off2) 4090 // result: (MOVFstore [off1+off2] {sym} ptr val mem) 4091 for { 4092 off1 := v.AuxInt 4093 sym := v.Aux 4094 _ = v.Args[2] 4095 v_0 := v.Args[0] 4096 if v_0.Op != OpMIPS64ADDVconst { 4097 break 4098 } 4099 off2 := v_0.AuxInt 4100 ptr := v_0.Args[0] 4101 val := v.Args[1] 4102 mem := v.Args[2] 4103 if !(is32Bit(off1 + off2)) { 4104 break 4105 } 4106 v.reset(OpMIPS64MOVFstore) 4107 v.AuxInt = off1 + off2 4108 v.Aux = sym 4109 v.AddArg(ptr) 4110 v.AddArg(val) 4111 v.AddArg(mem) 4112 return true 4113 } 4114 // match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4115 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4116 // result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4117 for { 4118 off1 := v.AuxInt 4119 sym1 := v.Aux 4120 _ = v.Args[2] 4121 v_0 := v.Args[0] 4122 if v_0.Op != OpMIPS64MOVVaddr { 4123 break 4124 } 4125 off2 := v_0.AuxInt 4126 sym2 := v_0.Aux 4127 ptr := v_0.Args[0] 4128 val := v.Args[1] 4129 mem := v.Args[2] 4130 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4131 break 4132 } 4133 v.reset(OpMIPS64MOVFstore) 4134 v.AuxInt = off1 + off2 4135 v.Aux = mergeSym(sym1, sym2) 4136 v.AddArg(ptr) 4137 v.AddArg(val) 4138 v.AddArg(mem) 4139 return true 4140 } 4141 return false 4142 } 4143 func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool { 4144 // match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem) 4145 // cond: is32Bit(off1+off2) 4146 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4147 for { 4148 off1 := v.AuxInt 4149 sym := v.Aux 4150 _ = v.Args[1] 4151 v_0 := v.Args[0] 4152 if v_0.Op != OpMIPS64ADDVconst { 4153 break 4154 } 4155 off2 := v_0.AuxInt 4156 ptr := v_0.Args[0] 4157 mem := v.Args[1] 4158 if !(is32Bit(off1 + off2)) { 4159 break 4160 } 4161 v.reset(OpMIPS64MOVHUload) 4162 v.AuxInt = off1 + off2 4163 v.Aux = sym 4164 v.AddArg(ptr) 4165 v.AddArg(mem) 4166 return true 4167 } 4168 // match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4169 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4170 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4171 for { 4172 off1 := v.AuxInt 4173 sym1 := v.Aux 4174 _ = v.Args[1] 4175 v_0 := v.Args[0] 4176 if v_0.Op != OpMIPS64MOVVaddr { 4177 break 4178 } 4179 off2 := v_0.AuxInt 4180 sym2 := v_0.Aux 4181 ptr := v_0.Args[0] 4182 mem := v.Args[1] 4183 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4184 break 4185 } 4186 v.reset(OpMIPS64MOVHUload) 4187 v.AuxInt = off1 + off2 4188 v.Aux = mergeSym(sym1, sym2) 4189 v.AddArg(ptr) 4190 v.AddArg(mem) 4191 return true 4192 } 4193 return false 4194 } 4195 func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool { 4196 // match: (MOVHUreg x:(MOVBUload _ _)) 4197 // cond: 4198 // result: (MOVVreg x) 4199 for { 4200 x := v.Args[0] 4201 if x.Op != OpMIPS64MOVBUload { 4202 break 4203 } 4204 _ = x.Args[1] 4205 v.reset(OpMIPS64MOVVreg) 4206 v.AddArg(x) 4207 return true 4208 } 4209 // match: (MOVHUreg x:(MOVHUload _ _)) 4210 // cond: 4211 // result: (MOVVreg x) 4212 for { 4213 x := v.Args[0] 4214 if x.Op != OpMIPS64MOVHUload { 4215 break 4216 } 4217 _ = x.Args[1] 4218 v.reset(OpMIPS64MOVVreg) 4219 v.AddArg(x) 4220 return true 4221 } 4222 // match: (MOVHUreg x:(MOVBUreg _)) 4223 // cond: 4224 // result: (MOVVreg x) 4225 for { 4226 x := v.Args[0] 4227 if x.Op != OpMIPS64MOVBUreg { 4228 break 4229 } 4230 v.reset(OpMIPS64MOVVreg) 4231 v.AddArg(x) 4232 return true 4233 } 4234 // match: (MOVHUreg x:(MOVHUreg _)) 4235 // cond: 4236 // result: (MOVVreg x) 4237 for { 4238 x := v.Args[0] 4239 if x.Op != OpMIPS64MOVHUreg { 4240 break 4241 } 4242 v.reset(OpMIPS64MOVVreg) 4243 v.AddArg(x) 4244 return true 4245 } 4246 // match: (MOVHUreg (MOVVconst [c])) 4247 // cond: 4248 // result: (MOVVconst [int64(uint16(c))]) 4249 for { 4250 v_0 := v.Args[0] 4251 if v_0.Op != OpMIPS64MOVVconst { 4252 break 4253 } 4254 c := v_0.AuxInt 4255 v.reset(OpMIPS64MOVVconst) 4256 v.AuxInt = int64(uint16(c)) 4257 return true 4258 } 4259 return false 4260 } 4261 func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool { 4262 // match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem) 4263 // cond: is32Bit(off1+off2) 4264 // result: (MOVHload [off1+off2] {sym} ptr mem) 4265 for { 4266 off1 := v.AuxInt 4267 sym := v.Aux 4268 _ = v.Args[1] 4269 v_0 := v.Args[0] 4270 if v_0.Op != OpMIPS64ADDVconst { 4271 break 4272 } 4273 off2 := v_0.AuxInt 4274 ptr := v_0.Args[0] 4275 mem := v.Args[1] 4276 if !(is32Bit(off1 + off2)) { 4277 break 4278 } 4279 v.reset(OpMIPS64MOVHload) 4280 v.AuxInt = off1 + off2 4281 v.Aux = sym 4282 v.AddArg(ptr) 4283 v.AddArg(mem) 4284 return true 4285 } 4286 // match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4287 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4288 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4289 for { 4290 off1 := v.AuxInt 4291 sym1 := v.Aux 4292 _ = v.Args[1] 4293 v_0 := v.Args[0] 4294 if v_0.Op != OpMIPS64MOVVaddr { 4295 break 4296 } 4297 off2 := v_0.AuxInt 4298 sym2 := v_0.Aux 4299 ptr := v_0.Args[0] 4300 mem := v.Args[1] 4301 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4302 break 4303 } 4304 v.reset(OpMIPS64MOVHload) 4305 v.AuxInt = off1 + off2 4306 v.Aux = mergeSym(sym1, sym2) 4307 v.AddArg(ptr) 4308 v.AddArg(mem) 4309 return true 4310 } 4311 return false 4312 } 4313 func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool { 4314 // match: (MOVHreg x:(MOVBload _ _)) 4315 // cond: 4316 // result: (MOVVreg x) 4317 for { 4318 x := v.Args[0] 4319 if x.Op != OpMIPS64MOVBload { 4320 break 4321 } 4322 _ = x.Args[1] 4323 v.reset(OpMIPS64MOVVreg) 4324 v.AddArg(x) 4325 return true 4326 } 4327 // match: (MOVHreg x:(MOVBUload _ _)) 4328 // cond: 4329 // result: (MOVVreg x) 4330 for { 4331 x := v.Args[0] 4332 if x.Op != OpMIPS64MOVBUload { 4333 break 4334 } 4335 _ = x.Args[1] 4336 v.reset(OpMIPS64MOVVreg) 4337 v.AddArg(x) 4338 return true 4339 } 4340 // match: (MOVHreg x:(MOVHload _ _)) 4341 // cond: 4342 // result: (MOVVreg x) 4343 for { 4344 x := v.Args[0] 4345 if x.Op != OpMIPS64MOVHload { 4346 break 4347 } 4348 _ = x.Args[1] 4349 v.reset(OpMIPS64MOVVreg) 4350 v.AddArg(x) 4351 return true 4352 } 4353 // match: (MOVHreg x:(MOVBreg _)) 4354 // cond: 4355 // result: (MOVVreg x) 4356 for { 4357 x := v.Args[0] 4358 if x.Op != OpMIPS64MOVBreg { 4359 break 4360 } 4361 v.reset(OpMIPS64MOVVreg) 4362 v.AddArg(x) 4363 return true 4364 } 4365 // match: (MOVHreg x:(MOVBUreg _)) 4366 // cond: 4367 // result: (MOVVreg x) 4368 for { 4369 x := v.Args[0] 4370 if x.Op != OpMIPS64MOVBUreg { 4371 break 4372 } 4373 v.reset(OpMIPS64MOVVreg) 4374 v.AddArg(x) 4375 return true 4376 } 4377 // match: (MOVHreg x:(MOVHreg _)) 4378 // cond: 4379 // result: (MOVVreg x) 4380 for { 4381 x := v.Args[0] 4382 if x.Op != OpMIPS64MOVHreg { 4383 break 4384 } 4385 v.reset(OpMIPS64MOVVreg) 4386 v.AddArg(x) 4387 return true 4388 } 4389 // match: (MOVHreg (MOVVconst [c])) 4390 // cond: 4391 // result: (MOVVconst [int64(int16(c))]) 4392 for { 4393 v_0 := v.Args[0] 4394 if v_0.Op != OpMIPS64MOVVconst { 4395 break 4396 } 4397 c := v_0.AuxInt 4398 v.reset(OpMIPS64MOVVconst) 4399 v.AuxInt = int64(int16(c)) 4400 return true 4401 } 4402 return false 4403 } 4404 func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool { 4405 // match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4406 // cond: is32Bit(off1+off2) 4407 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4408 for { 4409 off1 := v.AuxInt 4410 sym := v.Aux 4411 _ = v.Args[2] 4412 v_0 := v.Args[0] 4413 if v_0.Op != OpMIPS64ADDVconst { 4414 break 4415 } 4416 off2 := v_0.AuxInt 4417 ptr := v_0.Args[0] 4418 val := v.Args[1] 4419 mem := v.Args[2] 4420 if !(is32Bit(off1 + off2)) { 4421 break 4422 } 4423 v.reset(OpMIPS64MOVHstore) 4424 v.AuxInt = off1 + off2 4425 v.Aux = sym 4426 v.AddArg(ptr) 4427 v.AddArg(val) 4428 v.AddArg(mem) 4429 return true 4430 } 4431 // match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4432 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4433 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4434 for { 4435 off1 := v.AuxInt 4436 sym1 := v.Aux 4437 _ = v.Args[2] 4438 v_0 := v.Args[0] 4439 if v_0.Op != OpMIPS64MOVVaddr { 4440 break 4441 } 4442 off2 := v_0.AuxInt 4443 sym2 := v_0.Aux 4444 ptr := v_0.Args[0] 4445 val := v.Args[1] 4446 mem := v.Args[2] 4447 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4448 break 4449 } 4450 v.reset(OpMIPS64MOVHstore) 4451 v.AuxInt = off1 + off2 4452 v.Aux = mergeSym(sym1, sym2) 4453 v.AddArg(ptr) 4454 v.AddArg(val) 4455 v.AddArg(mem) 4456 return true 4457 } 4458 // match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem) 4459 // cond: 4460 // result: (MOVHstorezero [off] {sym} ptr mem) 4461 for { 4462 off := v.AuxInt 4463 sym := v.Aux 4464 _ = v.Args[2] 4465 ptr := v.Args[0] 4466 v_1 := v.Args[1] 4467 if v_1.Op != OpMIPS64MOVVconst { 4468 break 4469 } 4470 if v_1.AuxInt != 0 { 4471 break 4472 } 4473 mem := v.Args[2] 4474 v.reset(OpMIPS64MOVHstorezero) 4475 v.AuxInt = off 4476 v.Aux = sym 4477 v.AddArg(ptr) 4478 v.AddArg(mem) 4479 return true 4480 } 4481 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4482 // cond: 4483 // result: (MOVHstore [off] {sym} ptr x mem) 4484 for { 4485 off := v.AuxInt 4486 sym := v.Aux 4487 _ = v.Args[2] 4488 ptr := v.Args[0] 4489 v_1 := v.Args[1] 4490 if v_1.Op != OpMIPS64MOVHreg { 4491 break 4492 } 4493 x := v_1.Args[0] 4494 mem := v.Args[2] 4495 v.reset(OpMIPS64MOVHstore) 4496 v.AuxInt = off 4497 v.Aux = sym 4498 v.AddArg(ptr) 4499 v.AddArg(x) 4500 v.AddArg(mem) 4501 return true 4502 } 4503 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4504 // cond: 4505 // result: (MOVHstore [off] {sym} ptr x mem) 4506 for { 4507 off := v.AuxInt 4508 sym := v.Aux 4509 _ = v.Args[2] 4510 ptr := v.Args[0] 4511 v_1 := v.Args[1] 4512 if v_1.Op != OpMIPS64MOVHUreg { 4513 break 4514 } 4515 x := v_1.Args[0] 4516 mem := v.Args[2] 4517 v.reset(OpMIPS64MOVHstore) 4518 v.AuxInt = off 4519 v.Aux = sym 4520 v.AddArg(ptr) 4521 v.AddArg(x) 4522 v.AddArg(mem) 4523 return true 4524 } 4525 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4526 // cond: 4527 // result: (MOVHstore [off] {sym} ptr x mem) 4528 for { 4529 off := v.AuxInt 4530 sym := v.Aux 4531 _ = v.Args[2] 4532 ptr := v.Args[0] 4533 v_1 := v.Args[1] 4534 if v_1.Op != OpMIPS64MOVWreg { 4535 break 4536 } 4537 x := v_1.Args[0] 4538 mem := v.Args[2] 4539 v.reset(OpMIPS64MOVHstore) 4540 v.AuxInt = off 4541 v.Aux = sym 4542 v.AddArg(ptr) 4543 v.AddArg(x) 4544 v.AddArg(mem) 4545 return true 4546 } 4547 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4548 // cond: 4549 // result: (MOVHstore [off] {sym} ptr x mem) 4550 for { 4551 off := v.AuxInt 4552 sym := v.Aux 4553 _ = v.Args[2] 4554 ptr := v.Args[0] 4555 v_1 := v.Args[1] 4556 if v_1.Op != OpMIPS64MOVWUreg { 4557 break 4558 } 4559 x := v_1.Args[0] 4560 mem := v.Args[2] 4561 v.reset(OpMIPS64MOVHstore) 4562 v.AuxInt = off 4563 v.Aux = sym 4564 v.AddArg(ptr) 4565 v.AddArg(x) 4566 v.AddArg(mem) 4567 return true 4568 } 4569 return false 4570 } 4571 func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool { 4572 // match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 4573 // cond: is32Bit(off1+off2) 4574 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4575 for { 4576 off1 := v.AuxInt 4577 sym := v.Aux 4578 _ = v.Args[1] 4579 v_0 := v.Args[0] 4580 if v_0.Op != OpMIPS64ADDVconst { 4581 break 4582 } 4583 off2 := v_0.AuxInt 4584 ptr := v_0.Args[0] 4585 mem := v.Args[1] 4586 if !(is32Bit(off1 + off2)) { 4587 break 4588 } 4589 v.reset(OpMIPS64MOVHstorezero) 4590 v.AuxInt = off1 + off2 4591 v.Aux = sym 4592 v.AddArg(ptr) 4593 v.AddArg(mem) 4594 return true 4595 } 4596 // match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4597 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4598 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4599 for { 4600 off1 := v.AuxInt 4601 sym1 := v.Aux 4602 _ = v.Args[1] 4603 v_0 := v.Args[0] 4604 if v_0.Op != OpMIPS64MOVVaddr { 4605 break 4606 } 4607 off2 := v_0.AuxInt 4608 sym2 := v_0.Aux 4609 ptr := v_0.Args[0] 4610 mem := v.Args[1] 4611 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4612 break 4613 } 4614 v.reset(OpMIPS64MOVHstorezero) 4615 v.AuxInt = off1 + off2 4616 v.Aux = mergeSym(sym1, sym2) 4617 v.AddArg(ptr) 4618 v.AddArg(mem) 4619 return true 4620 } 4621 return false 4622 } 4623 func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool { 4624 // match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem) 4625 // cond: is32Bit(off1+off2) 4626 // result: (MOVVload [off1+off2] {sym} ptr mem) 4627 for { 4628 off1 := v.AuxInt 4629 sym := v.Aux 4630 _ = v.Args[1] 4631 v_0 := v.Args[0] 4632 if v_0.Op != OpMIPS64ADDVconst { 4633 break 4634 } 4635 off2 := v_0.AuxInt 4636 ptr := v_0.Args[0] 4637 mem := v.Args[1] 4638 if !(is32Bit(off1 + off2)) { 4639 break 4640 } 4641 v.reset(OpMIPS64MOVVload) 4642 v.AuxInt = off1 + off2 4643 v.Aux = sym 4644 v.AddArg(ptr) 4645 v.AddArg(mem) 4646 return true 4647 } 4648 // match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4649 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4650 // result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4651 for { 4652 off1 := v.AuxInt 4653 sym1 := v.Aux 4654 _ = v.Args[1] 4655 v_0 := v.Args[0] 4656 if v_0.Op != OpMIPS64MOVVaddr { 4657 break 4658 } 4659 off2 := v_0.AuxInt 4660 sym2 := v_0.Aux 4661 ptr := v_0.Args[0] 4662 mem := v.Args[1] 4663 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4664 break 4665 } 4666 v.reset(OpMIPS64MOVVload) 4667 v.AuxInt = off1 + off2 4668 v.Aux = mergeSym(sym1, sym2) 4669 v.AddArg(ptr) 4670 v.AddArg(mem) 4671 return true 4672 } 4673 return false 4674 } 4675 func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool { 4676 // match: (MOVVreg x) 4677 // cond: x.Uses == 1 4678 // result: (MOVVnop x) 4679 for { 4680 x := v.Args[0] 4681 if !(x.Uses == 1) { 4682 break 4683 } 4684 v.reset(OpMIPS64MOVVnop) 4685 v.AddArg(x) 4686 return true 4687 } 4688 // match: (MOVVreg (MOVVconst [c])) 4689 // cond: 4690 // result: (MOVVconst [c]) 4691 for { 4692 v_0 := v.Args[0] 4693 if v_0.Op != OpMIPS64MOVVconst { 4694 break 4695 } 4696 c := v_0.AuxInt 4697 v.reset(OpMIPS64MOVVconst) 4698 v.AuxInt = c 4699 return true 4700 } 4701 return false 4702 } 4703 func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool { 4704 // match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4705 // cond: is32Bit(off1+off2) 4706 // result: (MOVVstore [off1+off2] {sym} ptr val mem) 4707 for { 4708 off1 := v.AuxInt 4709 sym := v.Aux 4710 _ = v.Args[2] 4711 v_0 := v.Args[0] 4712 if v_0.Op != OpMIPS64ADDVconst { 4713 break 4714 } 4715 off2 := v_0.AuxInt 4716 ptr := v_0.Args[0] 4717 val := v.Args[1] 4718 mem := v.Args[2] 4719 if !(is32Bit(off1 + off2)) { 4720 break 4721 } 4722 v.reset(OpMIPS64MOVVstore) 4723 v.AuxInt = off1 + off2 4724 v.Aux = sym 4725 v.AddArg(ptr) 4726 v.AddArg(val) 4727 v.AddArg(mem) 4728 return true 4729 } 4730 // match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4731 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4732 // result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4733 for { 4734 off1 := v.AuxInt 4735 sym1 := v.Aux 4736 _ = v.Args[2] 4737 v_0 := v.Args[0] 4738 if v_0.Op != OpMIPS64MOVVaddr { 4739 break 4740 } 4741 off2 := v_0.AuxInt 4742 sym2 := v_0.Aux 4743 ptr := v_0.Args[0] 4744 val := v.Args[1] 4745 mem := v.Args[2] 4746 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4747 break 4748 } 4749 v.reset(OpMIPS64MOVVstore) 4750 v.AuxInt = off1 + off2 4751 v.Aux = mergeSym(sym1, sym2) 4752 v.AddArg(ptr) 4753 v.AddArg(val) 4754 v.AddArg(mem) 4755 return true 4756 } 4757 // match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem) 4758 // cond: 4759 // result: (MOVVstorezero [off] {sym} ptr mem) 4760 for { 4761 off := v.AuxInt 4762 sym := v.Aux 4763 _ = v.Args[2] 4764 ptr := v.Args[0] 4765 v_1 := v.Args[1] 4766 if v_1.Op != OpMIPS64MOVVconst { 4767 break 4768 } 4769 if v_1.AuxInt != 0 { 4770 break 4771 } 4772 mem := v.Args[2] 4773 v.reset(OpMIPS64MOVVstorezero) 4774 v.AuxInt = off 4775 v.Aux = sym 4776 v.AddArg(ptr) 4777 v.AddArg(mem) 4778 return true 4779 } 4780 return false 4781 } 4782 func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool { 4783 // match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 4784 // cond: is32Bit(off1+off2) 4785 // result: (MOVVstorezero [off1+off2] {sym} ptr mem) 4786 for { 4787 off1 := v.AuxInt 4788 sym := v.Aux 4789 _ = v.Args[1] 4790 v_0 := v.Args[0] 4791 if v_0.Op != OpMIPS64ADDVconst { 4792 break 4793 } 4794 off2 := v_0.AuxInt 4795 ptr := v_0.Args[0] 4796 mem := v.Args[1] 4797 if !(is32Bit(off1 + off2)) { 4798 break 4799 } 4800 v.reset(OpMIPS64MOVVstorezero) 4801 v.AuxInt = off1 + off2 4802 v.Aux = sym 4803 v.AddArg(ptr) 4804 v.AddArg(mem) 4805 return true 4806 } 4807 // match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4808 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4809 // result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4810 for { 4811 off1 := v.AuxInt 4812 sym1 := v.Aux 4813 _ = v.Args[1] 4814 v_0 := v.Args[0] 4815 if v_0.Op != OpMIPS64MOVVaddr { 4816 break 4817 } 4818 off2 := v_0.AuxInt 4819 sym2 := v_0.Aux 4820 ptr := v_0.Args[0] 4821 mem := v.Args[1] 4822 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4823 break 4824 } 4825 v.reset(OpMIPS64MOVVstorezero) 4826 v.AuxInt = off1 + off2 4827 v.Aux = mergeSym(sym1, sym2) 4828 v.AddArg(ptr) 4829 v.AddArg(mem) 4830 return true 4831 } 4832 return false 4833 } 4834 func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool { 4835 // match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem) 4836 // cond: is32Bit(off1+off2) 4837 // result: (MOVWUload [off1+off2] {sym} ptr mem) 4838 for { 4839 off1 := v.AuxInt 4840 sym := v.Aux 4841 _ = v.Args[1] 4842 v_0 := v.Args[0] 4843 if v_0.Op != OpMIPS64ADDVconst { 4844 break 4845 } 4846 off2 := v_0.AuxInt 4847 ptr := v_0.Args[0] 4848 mem := v.Args[1] 4849 if !(is32Bit(off1 + off2)) { 4850 break 4851 } 4852 v.reset(OpMIPS64MOVWUload) 4853 v.AuxInt = off1 + off2 4854 v.Aux = sym 4855 v.AddArg(ptr) 4856 v.AddArg(mem) 4857 return true 4858 } 4859 // match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4860 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4861 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4862 for { 4863 off1 := v.AuxInt 4864 sym1 := v.Aux 4865 _ = v.Args[1] 4866 v_0 := v.Args[0] 4867 if v_0.Op != OpMIPS64MOVVaddr { 4868 break 4869 } 4870 off2 := v_0.AuxInt 4871 sym2 := v_0.Aux 4872 ptr := v_0.Args[0] 4873 mem := v.Args[1] 4874 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4875 break 4876 } 4877 v.reset(OpMIPS64MOVWUload) 4878 v.AuxInt = off1 + off2 4879 v.Aux = mergeSym(sym1, sym2) 4880 v.AddArg(ptr) 4881 v.AddArg(mem) 4882 return true 4883 } 4884 return false 4885 } 4886 func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool { 4887 // match: (MOVWUreg x:(MOVBUload _ _)) 4888 // cond: 4889 // result: (MOVVreg x) 4890 for { 4891 x := v.Args[0] 4892 if x.Op != OpMIPS64MOVBUload { 4893 break 4894 } 4895 _ = x.Args[1] 4896 v.reset(OpMIPS64MOVVreg) 4897 v.AddArg(x) 4898 return true 4899 } 4900 // match: (MOVWUreg x:(MOVHUload _ _)) 4901 // cond: 4902 // result: (MOVVreg x) 4903 for { 4904 x := v.Args[0] 4905 if x.Op != OpMIPS64MOVHUload { 4906 break 4907 } 4908 _ = x.Args[1] 4909 v.reset(OpMIPS64MOVVreg) 4910 v.AddArg(x) 4911 return true 4912 } 4913 // match: (MOVWUreg x:(MOVWUload _ _)) 4914 // cond: 4915 // result: (MOVVreg x) 4916 for { 4917 x := v.Args[0] 4918 if x.Op != OpMIPS64MOVWUload { 4919 break 4920 } 4921 _ = x.Args[1] 4922 v.reset(OpMIPS64MOVVreg) 4923 v.AddArg(x) 4924 return true 4925 } 4926 // match: (MOVWUreg x:(MOVBUreg _)) 4927 // cond: 4928 // result: (MOVVreg x) 4929 for { 4930 x := v.Args[0] 4931 if x.Op != OpMIPS64MOVBUreg { 4932 break 4933 } 4934 v.reset(OpMIPS64MOVVreg) 4935 v.AddArg(x) 4936 return true 4937 } 4938 // match: (MOVWUreg x:(MOVHUreg _)) 4939 // cond: 4940 // result: (MOVVreg x) 4941 for { 4942 x := v.Args[0] 4943 if x.Op != OpMIPS64MOVHUreg { 4944 break 4945 } 4946 v.reset(OpMIPS64MOVVreg) 4947 v.AddArg(x) 4948 return true 4949 } 4950 // match: (MOVWUreg x:(MOVWUreg _)) 4951 // cond: 4952 // result: (MOVVreg x) 4953 for { 4954 x := v.Args[0] 4955 if x.Op != OpMIPS64MOVWUreg { 4956 break 4957 } 4958 v.reset(OpMIPS64MOVVreg) 4959 v.AddArg(x) 4960 return true 4961 } 4962 // match: (MOVWUreg (MOVVconst [c])) 4963 // cond: 4964 // result: (MOVVconst [int64(uint32(c))]) 4965 for { 4966 v_0 := v.Args[0] 4967 if v_0.Op != OpMIPS64MOVVconst { 4968 break 4969 } 4970 c := v_0.AuxInt 4971 v.reset(OpMIPS64MOVVconst) 4972 v.AuxInt = int64(uint32(c)) 4973 return true 4974 } 4975 return false 4976 } 4977 func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool { 4978 // match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem) 4979 // cond: is32Bit(off1+off2) 4980 // result: (MOVWload [off1+off2] {sym} ptr mem) 4981 for { 4982 off1 := v.AuxInt 4983 sym := v.Aux 4984 _ = v.Args[1] 4985 v_0 := v.Args[0] 4986 if v_0.Op != OpMIPS64ADDVconst { 4987 break 4988 } 4989 off2 := v_0.AuxInt 4990 ptr := v_0.Args[0] 4991 mem := v.Args[1] 4992 if !(is32Bit(off1 + off2)) { 4993 break 4994 } 4995 v.reset(OpMIPS64MOVWload) 4996 v.AuxInt = off1 + off2 4997 v.Aux = sym 4998 v.AddArg(ptr) 4999 v.AddArg(mem) 5000 return true 5001 } 5002 // match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5003 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5004 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5005 for { 5006 off1 := v.AuxInt 5007 sym1 := v.Aux 5008 _ = v.Args[1] 5009 v_0 := v.Args[0] 5010 if v_0.Op != OpMIPS64MOVVaddr { 5011 break 5012 } 5013 off2 := v_0.AuxInt 5014 sym2 := v_0.Aux 5015 ptr := v_0.Args[0] 5016 mem := v.Args[1] 5017 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5018 break 5019 } 5020 v.reset(OpMIPS64MOVWload) 5021 v.AuxInt = off1 + off2 5022 v.Aux = mergeSym(sym1, sym2) 5023 v.AddArg(ptr) 5024 v.AddArg(mem) 5025 return true 5026 } 5027 return false 5028 } 5029 func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool { 5030 // match: (MOVWreg x:(MOVBload _ _)) 5031 // cond: 5032 // result: (MOVVreg x) 5033 for { 5034 x := v.Args[0] 5035 if x.Op != OpMIPS64MOVBload { 5036 break 5037 } 5038 _ = x.Args[1] 5039 v.reset(OpMIPS64MOVVreg) 5040 v.AddArg(x) 5041 return true 5042 } 5043 // match: (MOVWreg x:(MOVBUload _ _)) 5044 // cond: 5045 // result: (MOVVreg x) 5046 for { 5047 x := v.Args[0] 5048 if x.Op != OpMIPS64MOVBUload { 5049 break 5050 } 5051 _ = x.Args[1] 5052 v.reset(OpMIPS64MOVVreg) 5053 v.AddArg(x) 5054 return true 5055 } 5056 // match: (MOVWreg x:(MOVHload _ _)) 5057 // cond: 5058 // result: (MOVVreg x) 5059 for { 5060 x := v.Args[0] 5061 if x.Op != OpMIPS64MOVHload { 5062 break 5063 } 5064 _ = x.Args[1] 5065 v.reset(OpMIPS64MOVVreg) 5066 v.AddArg(x) 5067 return true 5068 } 5069 // match: (MOVWreg x:(MOVHUload _ _)) 5070 // cond: 5071 // result: (MOVVreg x) 5072 for { 5073 x := v.Args[0] 5074 if x.Op != OpMIPS64MOVHUload { 5075 break 5076 } 5077 _ = x.Args[1] 5078 v.reset(OpMIPS64MOVVreg) 5079 v.AddArg(x) 5080 return true 5081 } 5082 // match: (MOVWreg x:(MOVWload _ _)) 5083 // cond: 5084 // result: (MOVVreg x) 5085 for { 5086 x := v.Args[0] 5087 if x.Op != OpMIPS64MOVWload { 5088 break 5089 } 5090 _ = x.Args[1] 5091 v.reset(OpMIPS64MOVVreg) 5092 v.AddArg(x) 5093 return true 5094 } 5095 // match: (MOVWreg x:(MOVBreg _)) 5096 // cond: 5097 // result: (MOVVreg x) 5098 for { 5099 x := v.Args[0] 5100 if x.Op != OpMIPS64MOVBreg { 5101 break 5102 } 5103 v.reset(OpMIPS64MOVVreg) 5104 v.AddArg(x) 5105 return true 5106 } 5107 // match: (MOVWreg x:(MOVBUreg _)) 5108 // cond: 5109 // result: (MOVVreg x) 5110 for { 5111 x := v.Args[0] 5112 if x.Op != OpMIPS64MOVBUreg { 5113 break 5114 } 5115 v.reset(OpMIPS64MOVVreg) 5116 v.AddArg(x) 5117 return true 5118 } 5119 // match: (MOVWreg x:(MOVHreg _)) 5120 // cond: 5121 // result: (MOVVreg x) 5122 for { 5123 x := v.Args[0] 5124 if x.Op != OpMIPS64MOVHreg { 5125 break 5126 } 5127 v.reset(OpMIPS64MOVVreg) 5128 v.AddArg(x) 5129 return true 5130 } 5131 // match: (MOVWreg x:(MOVHreg _)) 5132 // cond: 5133 // result: (MOVVreg x) 5134 for { 5135 x := v.Args[0] 5136 if x.Op != OpMIPS64MOVHreg { 5137 break 5138 } 5139 v.reset(OpMIPS64MOVVreg) 5140 v.AddArg(x) 5141 return true 5142 } 5143 // match: (MOVWreg x:(MOVWreg _)) 5144 // cond: 5145 // result: (MOVVreg x) 5146 for { 5147 x := v.Args[0] 5148 if x.Op != OpMIPS64MOVWreg { 5149 break 5150 } 5151 v.reset(OpMIPS64MOVVreg) 5152 v.AddArg(x) 5153 return true 5154 } 5155 return false 5156 } 5157 func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool { 5158 // match: (MOVWreg (MOVVconst [c])) 5159 // cond: 5160 // result: (MOVVconst [int64(int32(c))]) 5161 for { 5162 v_0 := v.Args[0] 5163 if v_0.Op != OpMIPS64MOVVconst { 5164 break 5165 } 5166 c := v_0.AuxInt 5167 v.reset(OpMIPS64MOVVconst) 5168 v.AuxInt = int64(int32(c)) 5169 return true 5170 } 5171 return false 5172 } 5173 func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool { 5174 // match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 5175 // cond: is32Bit(off1+off2) 5176 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5177 for { 5178 off1 := v.AuxInt 5179 sym := v.Aux 5180 _ = v.Args[2] 5181 v_0 := v.Args[0] 5182 if v_0.Op != OpMIPS64ADDVconst { 5183 break 5184 } 5185 off2 := v_0.AuxInt 5186 ptr := v_0.Args[0] 5187 val := v.Args[1] 5188 mem := v.Args[2] 5189 if !(is32Bit(off1 + off2)) { 5190 break 5191 } 5192 v.reset(OpMIPS64MOVWstore) 5193 v.AuxInt = off1 + off2 5194 v.Aux = sym 5195 v.AddArg(ptr) 5196 v.AddArg(val) 5197 v.AddArg(mem) 5198 return true 5199 } 5200 // match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 5201 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5202 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5203 for { 5204 off1 := v.AuxInt 5205 sym1 := v.Aux 5206 _ = v.Args[2] 5207 v_0 := v.Args[0] 5208 if v_0.Op != OpMIPS64MOVVaddr { 5209 break 5210 } 5211 off2 := v_0.AuxInt 5212 sym2 := v_0.Aux 5213 ptr := v_0.Args[0] 5214 val := v.Args[1] 5215 mem := v.Args[2] 5216 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5217 break 5218 } 5219 v.reset(OpMIPS64MOVWstore) 5220 v.AuxInt = off1 + off2 5221 v.Aux = mergeSym(sym1, sym2) 5222 v.AddArg(ptr) 5223 v.AddArg(val) 5224 v.AddArg(mem) 5225 return true 5226 } 5227 // match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem) 5228 // cond: 5229 // result: (MOVWstorezero [off] {sym} ptr mem) 5230 for { 5231 off := v.AuxInt 5232 sym := v.Aux 5233 _ = v.Args[2] 5234 ptr := v.Args[0] 5235 v_1 := v.Args[1] 5236 if v_1.Op != OpMIPS64MOVVconst { 5237 break 5238 } 5239 if v_1.AuxInt != 0 { 5240 break 5241 } 5242 mem := v.Args[2] 5243 v.reset(OpMIPS64MOVWstorezero) 5244 v.AuxInt = off 5245 v.Aux = sym 5246 v.AddArg(ptr) 5247 v.AddArg(mem) 5248 return true 5249 } 5250 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5251 // cond: 5252 // result: (MOVWstore [off] {sym} ptr x mem) 5253 for { 5254 off := v.AuxInt 5255 sym := v.Aux 5256 _ = v.Args[2] 5257 ptr := v.Args[0] 5258 v_1 := v.Args[1] 5259 if v_1.Op != OpMIPS64MOVWreg { 5260 break 5261 } 5262 x := v_1.Args[0] 5263 mem := v.Args[2] 5264 v.reset(OpMIPS64MOVWstore) 5265 v.AuxInt = off 5266 v.Aux = sym 5267 v.AddArg(ptr) 5268 v.AddArg(x) 5269 v.AddArg(mem) 5270 return true 5271 } 5272 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5273 // cond: 5274 // result: (MOVWstore [off] {sym} ptr x mem) 5275 for { 5276 off := v.AuxInt 5277 sym := v.Aux 5278 _ = v.Args[2] 5279 ptr := v.Args[0] 5280 v_1 := v.Args[1] 5281 if v_1.Op != OpMIPS64MOVWUreg { 5282 break 5283 } 5284 x := v_1.Args[0] 5285 mem := v.Args[2] 5286 v.reset(OpMIPS64MOVWstore) 5287 v.AuxInt = off 5288 v.Aux = sym 5289 v.AddArg(ptr) 5290 v.AddArg(x) 5291 v.AddArg(mem) 5292 return true 5293 } 5294 return false 5295 } 5296 func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool { 5297 // match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 5298 // cond: is32Bit(off1+off2) 5299 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5300 for { 5301 off1 := v.AuxInt 5302 sym := v.Aux 5303 _ = v.Args[1] 5304 v_0 := v.Args[0] 5305 if v_0.Op != OpMIPS64ADDVconst { 5306 break 5307 } 5308 off2 := v_0.AuxInt 5309 ptr := v_0.Args[0] 5310 mem := v.Args[1] 5311 if !(is32Bit(off1 + off2)) { 5312 break 5313 } 5314 v.reset(OpMIPS64MOVWstorezero) 5315 v.AuxInt = off1 + off2 5316 v.Aux = sym 5317 v.AddArg(ptr) 5318 v.AddArg(mem) 5319 return true 5320 } 5321 // match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5322 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5323 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5324 for { 5325 off1 := v.AuxInt 5326 sym1 := v.Aux 5327 _ = v.Args[1] 5328 v_0 := v.Args[0] 5329 if v_0.Op != OpMIPS64MOVVaddr { 5330 break 5331 } 5332 off2 := v_0.AuxInt 5333 sym2 := v_0.Aux 5334 ptr := v_0.Args[0] 5335 mem := v.Args[1] 5336 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5337 break 5338 } 5339 v.reset(OpMIPS64MOVWstorezero) 5340 v.AuxInt = off1 + off2 5341 v.Aux = mergeSym(sym1, sym2) 5342 v.AddArg(ptr) 5343 v.AddArg(mem) 5344 return true 5345 } 5346 return false 5347 } 5348 func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool { 5349 // match: (NEGV (MOVVconst [c])) 5350 // cond: 5351 // result: (MOVVconst [-c]) 5352 for { 5353 v_0 := v.Args[0] 5354 if v_0.Op != OpMIPS64MOVVconst { 5355 break 5356 } 5357 c := v_0.AuxInt 5358 v.reset(OpMIPS64MOVVconst) 5359 v.AuxInt = -c 5360 return true 5361 } 5362 return false 5363 } 5364 func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool { 5365 // match: (NOR x (MOVVconst [c])) 5366 // cond: is32Bit(c) 5367 // result: (NORconst [c] x) 5368 for { 5369 _ = v.Args[1] 5370 x := v.Args[0] 5371 v_1 := v.Args[1] 5372 if v_1.Op != OpMIPS64MOVVconst { 5373 break 5374 } 5375 c := v_1.AuxInt 5376 if !(is32Bit(c)) { 5377 break 5378 } 5379 v.reset(OpMIPS64NORconst) 5380 v.AuxInt = c 5381 v.AddArg(x) 5382 return true 5383 } 5384 // match: (NOR (MOVVconst [c]) x) 5385 // cond: is32Bit(c) 5386 // result: (NORconst [c] x) 5387 for { 5388 _ = v.Args[1] 5389 v_0 := v.Args[0] 5390 if v_0.Op != OpMIPS64MOVVconst { 5391 break 5392 } 5393 c := v_0.AuxInt 5394 x := v.Args[1] 5395 if !(is32Bit(c)) { 5396 break 5397 } 5398 v.reset(OpMIPS64NORconst) 5399 v.AuxInt = c 5400 v.AddArg(x) 5401 return true 5402 } 5403 return false 5404 } 5405 func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool { 5406 // match: (NORconst [c] (MOVVconst [d])) 5407 // cond: 5408 // result: (MOVVconst [^(c|d)]) 5409 for { 5410 c := v.AuxInt 5411 v_0 := v.Args[0] 5412 if v_0.Op != OpMIPS64MOVVconst { 5413 break 5414 } 5415 d := v_0.AuxInt 5416 v.reset(OpMIPS64MOVVconst) 5417 v.AuxInt = ^(c | d) 5418 return true 5419 } 5420 return false 5421 } 5422 func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool { 5423 // match: (OR x (MOVVconst [c])) 5424 // cond: is32Bit(c) 5425 // result: (ORconst [c] x) 5426 for { 5427 _ = v.Args[1] 5428 x := v.Args[0] 5429 v_1 := v.Args[1] 5430 if v_1.Op != OpMIPS64MOVVconst { 5431 break 5432 } 5433 c := v_1.AuxInt 5434 if !(is32Bit(c)) { 5435 break 5436 } 5437 v.reset(OpMIPS64ORconst) 5438 v.AuxInt = c 5439 v.AddArg(x) 5440 return true 5441 } 5442 // match: (OR (MOVVconst [c]) x) 5443 // cond: is32Bit(c) 5444 // result: (ORconst [c] x) 5445 for { 5446 _ = v.Args[1] 5447 v_0 := v.Args[0] 5448 if v_0.Op != OpMIPS64MOVVconst { 5449 break 5450 } 5451 c := v_0.AuxInt 5452 x := v.Args[1] 5453 if !(is32Bit(c)) { 5454 break 5455 } 5456 v.reset(OpMIPS64ORconst) 5457 v.AuxInt = c 5458 v.AddArg(x) 5459 return true 5460 } 5461 // match: (OR x x) 5462 // cond: 5463 // result: x 5464 for { 5465 _ = v.Args[1] 5466 x := v.Args[0] 5467 if x != v.Args[1] { 5468 break 5469 } 5470 v.reset(OpCopy) 5471 v.Type = x.Type 5472 v.AddArg(x) 5473 return true 5474 } 5475 return false 5476 } 5477 func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool { 5478 // match: (ORconst [0] x) 5479 // cond: 5480 // result: x 5481 for { 5482 if v.AuxInt != 0 { 5483 break 5484 } 5485 x := v.Args[0] 5486 v.reset(OpCopy) 5487 v.Type = x.Type 5488 v.AddArg(x) 5489 return true 5490 } 5491 // match: (ORconst [-1] _) 5492 // cond: 5493 // result: (MOVVconst [-1]) 5494 for { 5495 if v.AuxInt != -1 { 5496 break 5497 } 5498 v.reset(OpMIPS64MOVVconst) 5499 v.AuxInt = -1 5500 return true 5501 } 5502 // match: (ORconst [c] (MOVVconst [d])) 5503 // cond: 5504 // result: (MOVVconst [c|d]) 5505 for { 5506 c := v.AuxInt 5507 v_0 := v.Args[0] 5508 if v_0.Op != OpMIPS64MOVVconst { 5509 break 5510 } 5511 d := v_0.AuxInt 5512 v.reset(OpMIPS64MOVVconst) 5513 v.AuxInt = c | d 5514 return true 5515 } 5516 // match: (ORconst [c] (ORconst [d] x)) 5517 // cond: is32Bit(c|d) 5518 // result: (ORconst [c|d] x) 5519 for { 5520 c := v.AuxInt 5521 v_0 := v.Args[0] 5522 if v_0.Op != OpMIPS64ORconst { 5523 break 5524 } 5525 d := v_0.AuxInt 5526 x := v_0.Args[0] 5527 if !(is32Bit(c | d)) { 5528 break 5529 } 5530 v.reset(OpMIPS64ORconst) 5531 v.AuxInt = c | d 5532 v.AddArg(x) 5533 return true 5534 } 5535 return false 5536 } 5537 func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool { 5538 // match: (SGT (MOVVconst [c]) x) 5539 // cond: is32Bit(c) 5540 // result: (SGTconst [c] x) 5541 for { 5542 _ = v.Args[1] 5543 v_0 := v.Args[0] 5544 if v_0.Op != OpMIPS64MOVVconst { 5545 break 5546 } 5547 c := v_0.AuxInt 5548 x := v.Args[1] 5549 if !(is32Bit(c)) { 5550 break 5551 } 5552 v.reset(OpMIPS64SGTconst) 5553 v.AuxInt = c 5554 v.AddArg(x) 5555 return true 5556 } 5557 return false 5558 } 5559 func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool { 5560 // match: (SGTU (MOVVconst [c]) x) 5561 // cond: is32Bit(c) 5562 // result: (SGTUconst [c] x) 5563 for { 5564 _ = v.Args[1] 5565 v_0 := v.Args[0] 5566 if v_0.Op != OpMIPS64MOVVconst { 5567 break 5568 } 5569 c := v_0.AuxInt 5570 x := v.Args[1] 5571 if !(is32Bit(c)) { 5572 break 5573 } 5574 v.reset(OpMIPS64SGTUconst) 5575 v.AuxInt = c 5576 v.AddArg(x) 5577 return true 5578 } 5579 return false 5580 } 5581 func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool { 5582 // match: (SGTUconst [c] (MOVVconst [d])) 5583 // cond: uint64(c)>uint64(d) 5584 // result: (MOVVconst [1]) 5585 for { 5586 c := v.AuxInt 5587 v_0 := v.Args[0] 5588 if v_0.Op != OpMIPS64MOVVconst { 5589 break 5590 } 5591 d := v_0.AuxInt 5592 if !(uint64(c) > uint64(d)) { 5593 break 5594 } 5595 v.reset(OpMIPS64MOVVconst) 5596 v.AuxInt = 1 5597 return true 5598 } 5599 // match: (SGTUconst [c] (MOVVconst [d])) 5600 // cond: uint64(c)<=uint64(d) 5601 // result: (MOVVconst [0]) 5602 for { 5603 c := v.AuxInt 5604 v_0 := v.Args[0] 5605 if v_0.Op != OpMIPS64MOVVconst { 5606 break 5607 } 5608 d := v_0.AuxInt 5609 if !(uint64(c) <= uint64(d)) { 5610 break 5611 } 5612 v.reset(OpMIPS64MOVVconst) 5613 v.AuxInt = 0 5614 return true 5615 } 5616 // match: (SGTUconst [c] (MOVBUreg _)) 5617 // cond: 0xff < uint64(c) 5618 // result: (MOVVconst [1]) 5619 for { 5620 c := v.AuxInt 5621 v_0 := v.Args[0] 5622 if v_0.Op != OpMIPS64MOVBUreg { 5623 break 5624 } 5625 if !(0xff < uint64(c)) { 5626 break 5627 } 5628 v.reset(OpMIPS64MOVVconst) 5629 v.AuxInt = 1 5630 return true 5631 } 5632 // match: (SGTUconst [c] (MOVHUreg _)) 5633 // cond: 0xffff < uint64(c) 5634 // result: (MOVVconst [1]) 5635 for { 5636 c := v.AuxInt 5637 v_0 := v.Args[0] 5638 if v_0.Op != OpMIPS64MOVHUreg { 5639 break 5640 } 5641 if !(0xffff < uint64(c)) { 5642 break 5643 } 5644 v.reset(OpMIPS64MOVVconst) 5645 v.AuxInt = 1 5646 return true 5647 } 5648 // match: (SGTUconst [c] (ANDconst [m] _)) 5649 // cond: uint64(m) < uint64(c) 5650 // result: (MOVVconst [1]) 5651 for { 5652 c := v.AuxInt 5653 v_0 := v.Args[0] 5654 if v_0.Op != OpMIPS64ANDconst { 5655 break 5656 } 5657 m := v_0.AuxInt 5658 if !(uint64(m) < uint64(c)) { 5659 break 5660 } 5661 v.reset(OpMIPS64MOVVconst) 5662 v.AuxInt = 1 5663 return true 5664 } 5665 // match: (SGTUconst [c] (SRLVconst _ [d])) 5666 // cond: 0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c) 5667 // result: (MOVVconst [1]) 5668 for { 5669 c := v.AuxInt 5670 v_0 := v.Args[0] 5671 if v_0.Op != OpMIPS64SRLVconst { 5672 break 5673 } 5674 d := v_0.AuxInt 5675 if !(0 < d && d <= 63 && 1<<uint64(64-d) <= uint64(c)) { 5676 break 5677 } 5678 v.reset(OpMIPS64MOVVconst) 5679 v.AuxInt = 1 5680 return true 5681 } 5682 return false 5683 } 5684 func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool { 5685 // match: (SGTconst [c] (MOVVconst [d])) 5686 // cond: int64(c)>int64(d) 5687 // result: (MOVVconst [1]) 5688 for { 5689 c := v.AuxInt 5690 v_0 := v.Args[0] 5691 if v_0.Op != OpMIPS64MOVVconst { 5692 break 5693 } 5694 d := v_0.AuxInt 5695 if !(int64(c) > int64(d)) { 5696 break 5697 } 5698 v.reset(OpMIPS64MOVVconst) 5699 v.AuxInt = 1 5700 return true 5701 } 5702 // match: (SGTconst [c] (MOVVconst [d])) 5703 // cond: int64(c)<=int64(d) 5704 // result: (MOVVconst [0]) 5705 for { 5706 c := v.AuxInt 5707 v_0 := v.Args[0] 5708 if v_0.Op != OpMIPS64MOVVconst { 5709 break 5710 } 5711 d := v_0.AuxInt 5712 if !(int64(c) <= int64(d)) { 5713 break 5714 } 5715 v.reset(OpMIPS64MOVVconst) 5716 v.AuxInt = 0 5717 return true 5718 } 5719 // match: (SGTconst [c] (MOVBreg _)) 5720 // cond: 0x7f < int64(c) 5721 // result: (MOVVconst [1]) 5722 for { 5723 c := v.AuxInt 5724 v_0 := v.Args[0] 5725 if v_0.Op != OpMIPS64MOVBreg { 5726 break 5727 } 5728 if !(0x7f < int64(c)) { 5729 break 5730 } 5731 v.reset(OpMIPS64MOVVconst) 5732 v.AuxInt = 1 5733 return true 5734 } 5735 // match: (SGTconst [c] (MOVBreg _)) 5736 // cond: int64(c) <= -0x80 5737 // result: (MOVVconst [0]) 5738 for { 5739 c := v.AuxInt 5740 v_0 := v.Args[0] 5741 if v_0.Op != OpMIPS64MOVBreg { 5742 break 5743 } 5744 if !(int64(c) <= -0x80) { 5745 break 5746 } 5747 v.reset(OpMIPS64MOVVconst) 5748 v.AuxInt = 0 5749 return true 5750 } 5751 // match: (SGTconst [c] (MOVBUreg _)) 5752 // cond: 0xff < int64(c) 5753 // result: (MOVVconst [1]) 5754 for { 5755 c := v.AuxInt 5756 v_0 := v.Args[0] 5757 if v_0.Op != OpMIPS64MOVBUreg { 5758 break 5759 } 5760 if !(0xff < int64(c)) { 5761 break 5762 } 5763 v.reset(OpMIPS64MOVVconst) 5764 v.AuxInt = 1 5765 return true 5766 } 5767 // match: (SGTconst [c] (MOVBUreg _)) 5768 // cond: int64(c) < 0 5769 // result: (MOVVconst [0]) 5770 for { 5771 c := v.AuxInt 5772 v_0 := v.Args[0] 5773 if v_0.Op != OpMIPS64MOVBUreg { 5774 break 5775 } 5776 if !(int64(c) < 0) { 5777 break 5778 } 5779 v.reset(OpMIPS64MOVVconst) 5780 v.AuxInt = 0 5781 return true 5782 } 5783 // match: (SGTconst [c] (MOVHreg _)) 5784 // cond: 0x7fff < int64(c) 5785 // result: (MOVVconst [1]) 5786 for { 5787 c := v.AuxInt 5788 v_0 := v.Args[0] 5789 if v_0.Op != OpMIPS64MOVHreg { 5790 break 5791 } 5792 if !(0x7fff < int64(c)) { 5793 break 5794 } 5795 v.reset(OpMIPS64MOVVconst) 5796 v.AuxInt = 1 5797 return true 5798 } 5799 // match: (SGTconst [c] (MOVHreg _)) 5800 // cond: int64(c) <= -0x8000 5801 // result: (MOVVconst [0]) 5802 for { 5803 c := v.AuxInt 5804 v_0 := v.Args[0] 5805 if v_0.Op != OpMIPS64MOVHreg { 5806 break 5807 } 5808 if !(int64(c) <= -0x8000) { 5809 break 5810 } 5811 v.reset(OpMIPS64MOVVconst) 5812 v.AuxInt = 0 5813 return true 5814 } 5815 // match: (SGTconst [c] (MOVHUreg _)) 5816 // cond: 0xffff < int64(c) 5817 // result: (MOVVconst [1]) 5818 for { 5819 c := v.AuxInt 5820 v_0 := v.Args[0] 5821 if v_0.Op != OpMIPS64MOVHUreg { 5822 break 5823 } 5824 if !(0xffff < int64(c)) { 5825 break 5826 } 5827 v.reset(OpMIPS64MOVVconst) 5828 v.AuxInt = 1 5829 return true 5830 } 5831 // match: (SGTconst [c] (MOVHUreg _)) 5832 // cond: int64(c) < 0 5833 // result: (MOVVconst [0]) 5834 for { 5835 c := v.AuxInt 5836 v_0 := v.Args[0] 5837 if v_0.Op != OpMIPS64MOVHUreg { 5838 break 5839 } 5840 if !(int64(c) < 0) { 5841 break 5842 } 5843 v.reset(OpMIPS64MOVVconst) 5844 v.AuxInt = 0 5845 return true 5846 } 5847 return false 5848 } 5849 func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool { 5850 // match: (SGTconst [c] (MOVWUreg _)) 5851 // cond: int64(c) < 0 5852 // result: (MOVVconst [0]) 5853 for { 5854 c := v.AuxInt 5855 v_0 := v.Args[0] 5856 if v_0.Op != OpMIPS64MOVWUreg { 5857 break 5858 } 5859 if !(int64(c) < 0) { 5860 break 5861 } 5862 v.reset(OpMIPS64MOVVconst) 5863 v.AuxInt = 0 5864 return true 5865 } 5866 // match: (SGTconst [c] (ANDconst [m] _)) 5867 // cond: 0 <= m && m < c 5868 // result: (MOVVconst [1]) 5869 for { 5870 c := v.AuxInt 5871 v_0 := v.Args[0] 5872 if v_0.Op != OpMIPS64ANDconst { 5873 break 5874 } 5875 m := v_0.AuxInt 5876 if !(0 <= m && m < c) { 5877 break 5878 } 5879 v.reset(OpMIPS64MOVVconst) 5880 v.AuxInt = 1 5881 return true 5882 } 5883 // match: (SGTconst [c] (SRLVconst _ [d])) 5884 // cond: 0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c 5885 // result: (MOVVconst [1]) 5886 for { 5887 c := v.AuxInt 5888 v_0 := v.Args[0] 5889 if v_0.Op != OpMIPS64SRLVconst { 5890 break 5891 } 5892 d := v_0.AuxInt 5893 if !(0 <= c && 0 < d && d <= 63 && 1<<uint64(64-d) <= c) { 5894 break 5895 } 5896 v.reset(OpMIPS64MOVVconst) 5897 v.AuxInt = 1 5898 return true 5899 } 5900 return false 5901 } 5902 func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool { 5903 // match: (SLLV _ (MOVVconst [c])) 5904 // cond: uint64(c)>=64 5905 // result: (MOVVconst [0]) 5906 for { 5907 _ = v.Args[1] 5908 v_1 := v.Args[1] 5909 if v_1.Op != OpMIPS64MOVVconst { 5910 break 5911 } 5912 c := v_1.AuxInt 5913 if !(uint64(c) >= 64) { 5914 break 5915 } 5916 v.reset(OpMIPS64MOVVconst) 5917 v.AuxInt = 0 5918 return true 5919 } 5920 // match: (SLLV x (MOVVconst [c])) 5921 // cond: 5922 // result: (SLLVconst x [c]) 5923 for { 5924 _ = v.Args[1] 5925 x := v.Args[0] 5926 v_1 := v.Args[1] 5927 if v_1.Op != OpMIPS64MOVVconst { 5928 break 5929 } 5930 c := v_1.AuxInt 5931 v.reset(OpMIPS64SLLVconst) 5932 v.AuxInt = c 5933 v.AddArg(x) 5934 return true 5935 } 5936 return false 5937 } 5938 func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool { 5939 // match: (SLLVconst [c] (MOVVconst [d])) 5940 // cond: 5941 // result: (MOVVconst [int64(d)<<uint64(c)]) 5942 for { 5943 c := v.AuxInt 5944 v_0 := v.Args[0] 5945 if v_0.Op != OpMIPS64MOVVconst { 5946 break 5947 } 5948 d := v_0.AuxInt 5949 v.reset(OpMIPS64MOVVconst) 5950 v.AuxInt = int64(d) << uint64(c) 5951 return true 5952 } 5953 return false 5954 } 5955 func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool { 5956 // match: (SRAV x (MOVVconst [c])) 5957 // cond: uint64(c)>=64 5958 // result: (SRAVconst x [63]) 5959 for { 5960 _ = v.Args[1] 5961 x := v.Args[0] 5962 v_1 := v.Args[1] 5963 if v_1.Op != OpMIPS64MOVVconst { 5964 break 5965 } 5966 c := v_1.AuxInt 5967 if !(uint64(c) >= 64) { 5968 break 5969 } 5970 v.reset(OpMIPS64SRAVconst) 5971 v.AuxInt = 63 5972 v.AddArg(x) 5973 return true 5974 } 5975 // match: (SRAV x (MOVVconst [c])) 5976 // cond: 5977 // result: (SRAVconst x [c]) 5978 for { 5979 _ = v.Args[1] 5980 x := v.Args[0] 5981 v_1 := v.Args[1] 5982 if v_1.Op != OpMIPS64MOVVconst { 5983 break 5984 } 5985 c := v_1.AuxInt 5986 v.reset(OpMIPS64SRAVconst) 5987 v.AuxInt = c 5988 v.AddArg(x) 5989 return true 5990 } 5991 return false 5992 } 5993 func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool { 5994 // match: (SRAVconst [c] (MOVVconst [d])) 5995 // cond: 5996 // result: (MOVVconst [int64(d)>>uint64(c)]) 5997 for { 5998 c := v.AuxInt 5999 v_0 := v.Args[0] 6000 if v_0.Op != OpMIPS64MOVVconst { 6001 break 6002 } 6003 d := v_0.AuxInt 6004 v.reset(OpMIPS64MOVVconst) 6005 v.AuxInt = int64(d) >> uint64(c) 6006 return true 6007 } 6008 return false 6009 } 6010 func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool { 6011 // match: (SRLV _ (MOVVconst [c])) 6012 // cond: uint64(c)>=64 6013 // result: (MOVVconst [0]) 6014 for { 6015 _ = v.Args[1] 6016 v_1 := v.Args[1] 6017 if v_1.Op != OpMIPS64MOVVconst { 6018 break 6019 } 6020 c := v_1.AuxInt 6021 if !(uint64(c) >= 64) { 6022 break 6023 } 6024 v.reset(OpMIPS64MOVVconst) 6025 v.AuxInt = 0 6026 return true 6027 } 6028 // match: (SRLV x (MOVVconst [c])) 6029 // cond: 6030 // result: (SRLVconst x [c]) 6031 for { 6032 _ = v.Args[1] 6033 x := v.Args[0] 6034 v_1 := v.Args[1] 6035 if v_1.Op != OpMIPS64MOVVconst { 6036 break 6037 } 6038 c := v_1.AuxInt 6039 v.reset(OpMIPS64SRLVconst) 6040 v.AuxInt = c 6041 v.AddArg(x) 6042 return true 6043 } 6044 return false 6045 } 6046 func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool { 6047 // match: (SRLVconst [c] (MOVVconst [d])) 6048 // cond: 6049 // result: (MOVVconst [int64(uint64(d)>>uint64(c))]) 6050 for { 6051 c := v.AuxInt 6052 v_0 := v.Args[0] 6053 if v_0.Op != OpMIPS64MOVVconst { 6054 break 6055 } 6056 d := v_0.AuxInt 6057 v.reset(OpMIPS64MOVVconst) 6058 v.AuxInt = int64(uint64(d) >> uint64(c)) 6059 return true 6060 } 6061 return false 6062 } 6063 func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool { 6064 // match: (SUBV x (MOVVconst [c])) 6065 // cond: is32Bit(c) 6066 // result: (SUBVconst [c] x) 6067 for { 6068 _ = v.Args[1] 6069 x := v.Args[0] 6070 v_1 := v.Args[1] 6071 if v_1.Op != OpMIPS64MOVVconst { 6072 break 6073 } 6074 c := v_1.AuxInt 6075 if !(is32Bit(c)) { 6076 break 6077 } 6078 v.reset(OpMIPS64SUBVconst) 6079 v.AuxInt = c 6080 v.AddArg(x) 6081 return true 6082 } 6083 // match: (SUBV x x) 6084 // cond: 6085 // result: (MOVVconst [0]) 6086 for { 6087 _ = v.Args[1] 6088 x := v.Args[0] 6089 if x != v.Args[1] { 6090 break 6091 } 6092 v.reset(OpMIPS64MOVVconst) 6093 v.AuxInt = 0 6094 return true 6095 } 6096 // match: (SUBV (MOVVconst [0]) x) 6097 // cond: 6098 // result: (NEGV x) 6099 for { 6100 _ = v.Args[1] 6101 v_0 := v.Args[0] 6102 if v_0.Op != OpMIPS64MOVVconst { 6103 break 6104 } 6105 if v_0.AuxInt != 0 { 6106 break 6107 } 6108 x := v.Args[1] 6109 v.reset(OpMIPS64NEGV) 6110 v.AddArg(x) 6111 return true 6112 } 6113 return false 6114 } 6115 func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool { 6116 // match: (SUBVconst [0] x) 6117 // cond: 6118 // result: x 6119 for { 6120 if v.AuxInt != 0 { 6121 break 6122 } 6123 x := v.Args[0] 6124 v.reset(OpCopy) 6125 v.Type = x.Type 6126 v.AddArg(x) 6127 return true 6128 } 6129 // match: (SUBVconst [c] (MOVVconst [d])) 6130 // cond: 6131 // result: (MOVVconst [d-c]) 6132 for { 6133 c := v.AuxInt 6134 v_0 := v.Args[0] 6135 if v_0.Op != OpMIPS64MOVVconst { 6136 break 6137 } 6138 d := v_0.AuxInt 6139 v.reset(OpMIPS64MOVVconst) 6140 v.AuxInt = d - c 6141 return true 6142 } 6143 // match: (SUBVconst [c] (SUBVconst [d] x)) 6144 // cond: is32Bit(-c-d) 6145 // result: (ADDVconst [-c-d] x) 6146 for { 6147 c := v.AuxInt 6148 v_0 := v.Args[0] 6149 if v_0.Op != OpMIPS64SUBVconst { 6150 break 6151 } 6152 d := v_0.AuxInt 6153 x := v_0.Args[0] 6154 if !(is32Bit(-c - d)) { 6155 break 6156 } 6157 v.reset(OpMIPS64ADDVconst) 6158 v.AuxInt = -c - d 6159 v.AddArg(x) 6160 return true 6161 } 6162 // match: (SUBVconst [c] (ADDVconst [d] x)) 6163 // cond: is32Bit(-c+d) 6164 // result: (ADDVconst [-c+d] x) 6165 for { 6166 c := v.AuxInt 6167 v_0 := v.Args[0] 6168 if v_0.Op != OpMIPS64ADDVconst { 6169 break 6170 } 6171 d := v_0.AuxInt 6172 x := v_0.Args[0] 6173 if !(is32Bit(-c + d)) { 6174 break 6175 } 6176 v.reset(OpMIPS64ADDVconst) 6177 v.AuxInt = -c + d 6178 v.AddArg(x) 6179 return true 6180 } 6181 return false 6182 } 6183 func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool { 6184 // match: (XOR x (MOVVconst [c])) 6185 // cond: is32Bit(c) 6186 // result: (XORconst [c] x) 6187 for { 6188 _ = v.Args[1] 6189 x := v.Args[0] 6190 v_1 := v.Args[1] 6191 if v_1.Op != OpMIPS64MOVVconst { 6192 break 6193 } 6194 c := v_1.AuxInt 6195 if !(is32Bit(c)) { 6196 break 6197 } 6198 v.reset(OpMIPS64XORconst) 6199 v.AuxInt = c 6200 v.AddArg(x) 6201 return true 6202 } 6203 // match: (XOR (MOVVconst [c]) x) 6204 // cond: is32Bit(c) 6205 // result: (XORconst [c] x) 6206 for { 6207 _ = v.Args[1] 6208 v_0 := v.Args[0] 6209 if v_0.Op != OpMIPS64MOVVconst { 6210 break 6211 } 6212 c := v_0.AuxInt 6213 x := v.Args[1] 6214 if !(is32Bit(c)) { 6215 break 6216 } 6217 v.reset(OpMIPS64XORconst) 6218 v.AuxInt = c 6219 v.AddArg(x) 6220 return true 6221 } 6222 // match: (XOR x x) 6223 // cond: 6224 // result: (MOVVconst [0]) 6225 for { 6226 _ = v.Args[1] 6227 x := v.Args[0] 6228 if x != v.Args[1] { 6229 break 6230 } 6231 v.reset(OpMIPS64MOVVconst) 6232 v.AuxInt = 0 6233 return true 6234 } 6235 return false 6236 } 6237 func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool { 6238 // match: (XORconst [0] x) 6239 // cond: 6240 // result: x 6241 for { 6242 if v.AuxInt != 0 { 6243 break 6244 } 6245 x := v.Args[0] 6246 v.reset(OpCopy) 6247 v.Type = x.Type 6248 v.AddArg(x) 6249 return true 6250 } 6251 // match: (XORconst [-1] x) 6252 // cond: 6253 // result: (NORconst [0] x) 6254 for { 6255 if v.AuxInt != -1 { 6256 break 6257 } 6258 x := v.Args[0] 6259 v.reset(OpMIPS64NORconst) 6260 v.AuxInt = 0 6261 v.AddArg(x) 6262 return true 6263 } 6264 // match: (XORconst [c] (MOVVconst [d])) 6265 // cond: 6266 // result: (MOVVconst [c^d]) 6267 for { 6268 c := v.AuxInt 6269 v_0 := v.Args[0] 6270 if v_0.Op != OpMIPS64MOVVconst { 6271 break 6272 } 6273 d := v_0.AuxInt 6274 v.reset(OpMIPS64MOVVconst) 6275 v.AuxInt = c ^ d 6276 return true 6277 } 6278 // match: (XORconst [c] (XORconst [d] x)) 6279 // cond: is32Bit(c^d) 6280 // result: (XORconst [c^d] x) 6281 for { 6282 c := v.AuxInt 6283 v_0 := v.Args[0] 6284 if v_0.Op != OpMIPS64XORconst { 6285 break 6286 } 6287 d := v_0.AuxInt 6288 x := v_0.Args[0] 6289 if !(is32Bit(c ^ d)) { 6290 break 6291 } 6292 v.reset(OpMIPS64XORconst) 6293 v.AuxInt = c ^ d 6294 v.AddArg(x) 6295 return true 6296 } 6297 return false 6298 } 6299 func rewriteValueMIPS64_OpMod16_0(v *Value) bool { 6300 b := v.Block 6301 _ = b 6302 typ := &b.Func.Config.Types 6303 _ = typ 6304 // match: (Mod16 x y) 6305 // cond: 6306 // result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 6307 for { 6308 _ = v.Args[1] 6309 x := v.Args[0] 6310 y := v.Args[1] 6311 v.reset(OpSelect0) 6312 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6313 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6314 v1.AddArg(x) 6315 v0.AddArg(v1) 6316 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6317 v2.AddArg(y) 6318 v0.AddArg(v2) 6319 v.AddArg(v0) 6320 return true 6321 } 6322 } 6323 func rewriteValueMIPS64_OpMod16u_0(v *Value) bool { 6324 b := v.Block 6325 _ = b 6326 typ := &b.Func.Config.Types 6327 _ = typ 6328 // match: (Mod16u x y) 6329 // cond: 6330 // result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 6331 for { 6332 _ = v.Args[1] 6333 x := v.Args[0] 6334 y := v.Args[1] 6335 v.reset(OpSelect0) 6336 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6337 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6338 v1.AddArg(x) 6339 v0.AddArg(v1) 6340 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6341 v2.AddArg(y) 6342 v0.AddArg(v2) 6343 v.AddArg(v0) 6344 return true 6345 } 6346 } 6347 func rewriteValueMIPS64_OpMod32_0(v *Value) bool { 6348 b := v.Block 6349 _ = b 6350 typ := &b.Func.Config.Types 6351 _ = typ 6352 // match: (Mod32 x y) 6353 // cond: 6354 // result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 6355 for { 6356 _ = v.Args[1] 6357 x := v.Args[0] 6358 y := v.Args[1] 6359 v.reset(OpSelect0) 6360 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6361 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6362 v1.AddArg(x) 6363 v0.AddArg(v1) 6364 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6365 v2.AddArg(y) 6366 v0.AddArg(v2) 6367 v.AddArg(v0) 6368 return true 6369 } 6370 } 6371 func rewriteValueMIPS64_OpMod32u_0(v *Value) bool { 6372 b := v.Block 6373 _ = b 6374 typ := &b.Func.Config.Types 6375 _ = typ 6376 // match: (Mod32u x y) 6377 // cond: 6378 // result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 6379 for { 6380 _ = v.Args[1] 6381 x := v.Args[0] 6382 y := v.Args[1] 6383 v.reset(OpSelect0) 6384 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6385 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6386 v1.AddArg(x) 6387 v0.AddArg(v1) 6388 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6389 v2.AddArg(y) 6390 v0.AddArg(v2) 6391 v.AddArg(v0) 6392 return true 6393 } 6394 } 6395 func rewriteValueMIPS64_OpMod64_0(v *Value) bool { 6396 b := v.Block 6397 _ = b 6398 typ := &b.Func.Config.Types 6399 _ = typ 6400 // match: (Mod64 x y) 6401 // cond: 6402 // result: (Select0 (DIVV x y)) 6403 for { 6404 _ = v.Args[1] 6405 x := v.Args[0] 6406 y := v.Args[1] 6407 v.reset(OpSelect0) 6408 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6409 v0.AddArg(x) 6410 v0.AddArg(y) 6411 v.AddArg(v0) 6412 return true 6413 } 6414 } 6415 func rewriteValueMIPS64_OpMod64u_0(v *Value) bool { 6416 b := v.Block 6417 _ = b 6418 typ := &b.Func.Config.Types 6419 _ = typ 6420 // match: (Mod64u x y) 6421 // cond: 6422 // result: (Select0 (DIVVU x y)) 6423 for { 6424 _ = v.Args[1] 6425 x := v.Args[0] 6426 y := v.Args[1] 6427 v.reset(OpSelect0) 6428 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6429 v0.AddArg(x) 6430 v0.AddArg(y) 6431 v.AddArg(v0) 6432 return true 6433 } 6434 } 6435 func rewriteValueMIPS64_OpMod8_0(v *Value) bool { 6436 b := v.Block 6437 _ = b 6438 typ := &b.Func.Config.Types 6439 _ = typ 6440 // match: (Mod8 x y) 6441 // cond: 6442 // result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 6443 for { 6444 _ = v.Args[1] 6445 x := v.Args[0] 6446 y := v.Args[1] 6447 v.reset(OpSelect0) 6448 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6449 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6450 v1.AddArg(x) 6451 v0.AddArg(v1) 6452 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6453 v2.AddArg(y) 6454 v0.AddArg(v2) 6455 v.AddArg(v0) 6456 return true 6457 } 6458 } 6459 func rewriteValueMIPS64_OpMod8u_0(v *Value) bool { 6460 b := v.Block 6461 _ = b 6462 typ := &b.Func.Config.Types 6463 _ = typ 6464 // match: (Mod8u x y) 6465 // cond: 6466 // result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 6467 for { 6468 _ = v.Args[1] 6469 x := v.Args[0] 6470 y := v.Args[1] 6471 v.reset(OpSelect0) 6472 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6473 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6474 v1.AddArg(x) 6475 v0.AddArg(v1) 6476 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6477 v2.AddArg(y) 6478 v0.AddArg(v2) 6479 v.AddArg(v0) 6480 return true 6481 } 6482 } 6483 func rewriteValueMIPS64_OpMove_0(v *Value) bool { 6484 b := v.Block 6485 _ = b 6486 typ := &b.Func.Config.Types 6487 _ = typ 6488 // match: (Move [0] _ _ mem) 6489 // cond: 6490 // result: mem 6491 for { 6492 if v.AuxInt != 0 { 6493 break 6494 } 6495 _ = v.Args[2] 6496 mem := v.Args[2] 6497 v.reset(OpCopy) 6498 v.Type = mem.Type 6499 v.AddArg(mem) 6500 return true 6501 } 6502 // match: (Move [1] dst src mem) 6503 // cond: 6504 // result: (MOVBstore dst (MOVBload src mem) mem) 6505 for { 6506 if v.AuxInt != 1 { 6507 break 6508 } 6509 _ = v.Args[2] 6510 dst := v.Args[0] 6511 src := v.Args[1] 6512 mem := v.Args[2] 6513 v.reset(OpMIPS64MOVBstore) 6514 v.AddArg(dst) 6515 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6516 v0.AddArg(src) 6517 v0.AddArg(mem) 6518 v.AddArg(v0) 6519 v.AddArg(mem) 6520 return true 6521 } 6522 // match: (Move [2] {t} dst src mem) 6523 // cond: t.(*types.Type).Alignment()%2 == 0 6524 // result: (MOVHstore dst (MOVHload src mem) mem) 6525 for { 6526 if v.AuxInt != 2 { 6527 break 6528 } 6529 t := v.Aux 6530 _ = v.Args[2] 6531 dst := v.Args[0] 6532 src := v.Args[1] 6533 mem := v.Args[2] 6534 if !(t.(*types.Type).Alignment()%2 == 0) { 6535 break 6536 } 6537 v.reset(OpMIPS64MOVHstore) 6538 v.AddArg(dst) 6539 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6540 v0.AddArg(src) 6541 v0.AddArg(mem) 6542 v.AddArg(v0) 6543 v.AddArg(mem) 6544 return true 6545 } 6546 // match: (Move [2] dst src mem) 6547 // cond: 6548 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)) 6549 for { 6550 if v.AuxInt != 2 { 6551 break 6552 } 6553 _ = v.Args[2] 6554 dst := v.Args[0] 6555 src := v.Args[1] 6556 mem := v.Args[2] 6557 v.reset(OpMIPS64MOVBstore) 6558 v.AuxInt = 1 6559 v.AddArg(dst) 6560 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6561 v0.AuxInt = 1 6562 v0.AddArg(src) 6563 v0.AddArg(mem) 6564 v.AddArg(v0) 6565 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6566 v1.AddArg(dst) 6567 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6568 v2.AddArg(src) 6569 v2.AddArg(mem) 6570 v1.AddArg(v2) 6571 v1.AddArg(mem) 6572 v.AddArg(v1) 6573 return true 6574 } 6575 // match: (Move [4] {t} dst src mem) 6576 // cond: t.(*types.Type).Alignment()%4 == 0 6577 // result: (MOVWstore dst (MOVWload src mem) mem) 6578 for { 6579 if v.AuxInt != 4 { 6580 break 6581 } 6582 t := v.Aux 6583 _ = v.Args[2] 6584 dst := v.Args[0] 6585 src := v.Args[1] 6586 mem := v.Args[2] 6587 if !(t.(*types.Type).Alignment()%4 == 0) { 6588 break 6589 } 6590 v.reset(OpMIPS64MOVWstore) 6591 v.AddArg(dst) 6592 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6593 v0.AddArg(src) 6594 v0.AddArg(mem) 6595 v.AddArg(v0) 6596 v.AddArg(mem) 6597 return true 6598 } 6599 // match: (Move [4] {t} dst src mem) 6600 // cond: t.(*types.Type).Alignment()%2 == 0 6601 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)) 6602 for { 6603 if v.AuxInt != 4 { 6604 break 6605 } 6606 t := v.Aux 6607 _ = v.Args[2] 6608 dst := v.Args[0] 6609 src := v.Args[1] 6610 mem := v.Args[2] 6611 if !(t.(*types.Type).Alignment()%2 == 0) { 6612 break 6613 } 6614 v.reset(OpMIPS64MOVHstore) 6615 v.AuxInt = 2 6616 v.AddArg(dst) 6617 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6618 v0.AuxInt = 2 6619 v0.AddArg(src) 6620 v0.AddArg(mem) 6621 v.AddArg(v0) 6622 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6623 v1.AddArg(dst) 6624 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6625 v2.AddArg(src) 6626 v2.AddArg(mem) 6627 v1.AddArg(v2) 6628 v1.AddArg(mem) 6629 v.AddArg(v1) 6630 return true 6631 } 6632 // match: (Move [4] dst src mem) 6633 // cond: 6634 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))) 6635 for { 6636 if v.AuxInt != 4 { 6637 break 6638 } 6639 _ = v.Args[2] 6640 dst := v.Args[0] 6641 src := v.Args[1] 6642 mem := v.Args[2] 6643 v.reset(OpMIPS64MOVBstore) 6644 v.AuxInt = 3 6645 v.AddArg(dst) 6646 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6647 v0.AuxInt = 3 6648 v0.AddArg(src) 6649 v0.AddArg(mem) 6650 v.AddArg(v0) 6651 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6652 v1.AuxInt = 2 6653 v1.AddArg(dst) 6654 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6655 v2.AuxInt = 2 6656 v2.AddArg(src) 6657 v2.AddArg(mem) 6658 v1.AddArg(v2) 6659 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6660 v3.AuxInt = 1 6661 v3.AddArg(dst) 6662 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6663 v4.AuxInt = 1 6664 v4.AddArg(src) 6665 v4.AddArg(mem) 6666 v3.AddArg(v4) 6667 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6668 v5.AddArg(dst) 6669 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6670 v6.AddArg(src) 6671 v6.AddArg(mem) 6672 v5.AddArg(v6) 6673 v5.AddArg(mem) 6674 v3.AddArg(v5) 6675 v1.AddArg(v3) 6676 v.AddArg(v1) 6677 return true 6678 } 6679 // match: (Move [8] {t} dst src mem) 6680 // cond: t.(*types.Type).Alignment()%8 == 0 6681 // result: (MOVVstore dst (MOVVload src mem) mem) 6682 for { 6683 if v.AuxInt != 8 { 6684 break 6685 } 6686 t := v.Aux 6687 _ = v.Args[2] 6688 dst := v.Args[0] 6689 src := v.Args[1] 6690 mem := v.Args[2] 6691 if !(t.(*types.Type).Alignment()%8 == 0) { 6692 break 6693 } 6694 v.reset(OpMIPS64MOVVstore) 6695 v.AddArg(dst) 6696 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 6697 v0.AddArg(src) 6698 v0.AddArg(mem) 6699 v.AddArg(v0) 6700 v.AddArg(mem) 6701 return true 6702 } 6703 // match: (Move [8] {t} dst src mem) 6704 // cond: t.(*types.Type).Alignment()%4 == 0 6705 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) 6706 for { 6707 if v.AuxInt != 8 { 6708 break 6709 } 6710 t := v.Aux 6711 _ = v.Args[2] 6712 dst := v.Args[0] 6713 src := v.Args[1] 6714 mem := v.Args[2] 6715 if !(t.(*types.Type).Alignment()%4 == 0) { 6716 break 6717 } 6718 v.reset(OpMIPS64MOVWstore) 6719 v.AuxInt = 4 6720 v.AddArg(dst) 6721 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6722 v0.AuxInt = 4 6723 v0.AddArg(src) 6724 v0.AddArg(mem) 6725 v.AddArg(v0) 6726 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 6727 v1.AddArg(dst) 6728 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6729 v2.AddArg(src) 6730 v2.AddArg(mem) 6731 v1.AddArg(v2) 6732 v1.AddArg(mem) 6733 v.AddArg(v1) 6734 return true 6735 } 6736 // match: (Move [8] {t} dst src mem) 6737 // cond: t.(*types.Type).Alignment()%2 == 0 6738 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) 6739 for { 6740 if v.AuxInt != 8 { 6741 break 6742 } 6743 t := v.Aux 6744 _ = v.Args[2] 6745 dst := v.Args[0] 6746 src := v.Args[1] 6747 mem := v.Args[2] 6748 if !(t.(*types.Type).Alignment()%2 == 0) { 6749 break 6750 } 6751 v.reset(OpMIPS64MOVHstore) 6752 v.AuxInt = 6 6753 v.AddArg(dst) 6754 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6755 v0.AuxInt = 6 6756 v0.AddArg(src) 6757 v0.AddArg(mem) 6758 v.AddArg(v0) 6759 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6760 v1.AuxInt = 4 6761 v1.AddArg(dst) 6762 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6763 v2.AuxInt = 4 6764 v2.AddArg(src) 6765 v2.AddArg(mem) 6766 v1.AddArg(v2) 6767 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6768 v3.AuxInt = 2 6769 v3.AddArg(dst) 6770 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6771 v4.AuxInt = 2 6772 v4.AddArg(src) 6773 v4.AddArg(mem) 6774 v3.AddArg(v4) 6775 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6776 v5.AddArg(dst) 6777 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6778 v6.AddArg(src) 6779 v6.AddArg(mem) 6780 v5.AddArg(v6) 6781 v5.AddArg(mem) 6782 v3.AddArg(v5) 6783 v1.AddArg(v3) 6784 v.AddArg(v1) 6785 return true 6786 } 6787 return false 6788 } 6789 func rewriteValueMIPS64_OpMove_10(v *Value) bool { 6790 b := v.Block 6791 _ = b 6792 config := b.Func.Config 6793 _ = config 6794 typ := &b.Func.Config.Types 6795 _ = typ 6796 // match: (Move [3] dst src mem) 6797 // cond: 6798 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))) 6799 for { 6800 if v.AuxInt != 3 { 6801 break 6802 } 6803 _ = v.Args[2] 6804 dst := v.Args[0] 6805 src := v.Args[1] 6806 mem := v.Args[2] 6807 v.reset(OpMIPS64MOVBstore) 6808 v.AuxInt = 2 6809 v.AddArg(dst) 6810 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6811 v0.AuxInt = 2 6812 v0.AddArg(src) 6813 v0.AddArg(mem) 6814 v.AddArg(v0) 6815 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6816 v1.AuxInt = 1 6817 v1.AddArg(dst) 6818 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6819 v2.AuxInt = 1 6820 v2.AddArg(src) 6821 v2.AddArg(mem) 6822 v1.AddArg(v2) 6823 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6824 v3.AddArg(dst) 6825 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6826 v4.AddArg(src) 6827 v4.AddArg(mem) 6828 v3.AddArg(v4) 6829 v3.AddArg(mem) 6830 v1.AddArg(v3) 6831 v.AddArg(v1) 6832 return true 6833 } 6834 // match: (Move [6] {t} dst src mem) 6835 // cond: t.(*types.Type).Alignment()%2 == 0 6836 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) 6837 for { 6838 if v.AuxInt != 6 { 6839 break 6840 } 6841 t := v.Aux 6842 _ = v.Args[2] 6843 dst := v.Args[0] 6844 src := v.Args[1] 6845 mem := v.Args[2] 6846 if !(t.(*types.Type).Alignment()%2 == 0) { 6847 break 6848 } 6849 v.reset(OpMIPS64MOVHstore) 6850 v.AuxInt = 4 6851 v.AddArg(dst) 6852 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6853 v0.AuxInt = 4 6854 v0.AddArg(src) 6855 v0.AddArg(mem) 6856 v.AddArg(v0) 6857 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6858 v1.AuxInt = 2 6859 v1.AddArg(dst) 6860 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6861 v2.AuxInt = 2 6862 v2.AddArg(src) 6863 v2.AddArg(mem) 6864 v1.AddArg(v2) 6865 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6866 v3.AddArg(dst) 6867 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6868 v4.AddArg(src) 6869 v4.AddArg(mem) 6870 v3.AddArg(v4) 6871 v3.AddArg(mem) 6872 v1.AddArg(v3) 6873 v.AddArg(v1) 6874 return true 6875 } 6876 // match: (Move [12] {t} dst src mem) 6877 // cond: t.(*types.Type).Alignment()%4 == 0 6878 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) 6879 for { 6880 if v.AuxInt != 12 { 6881 break 6882 } 6883 t := v.Aux 6884 _ = v.Args[2] 6885 dst := v.Args[0] 6886 src := v.Args[1] 6887 mem := v.Args[2] 6888 if !(t.(*types.Type).Alignment()%4 == 0) { 6889 break 6890 } 6891 v.reset(OpMIPS64MOVWstore) 6892 v.AuxInt = 8 6893 v.AddArg(dst) 6894 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6895 v0.AuxInt = 8 6896 v0.AddArg(src) 6897 v0.AddArg(mem) 6898 v.AddArg(v0) 6899 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 6900 v1.AuxInt = 4 6901 v1.AddArg(dst) 6902 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6903 v2.AuxInt = 4 6904 v2.AddArg(src) 6905 v2.AddArg(mem) 6906 v1.AddArg(v2) 6907 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 6908 v3.AddArg(dst) 6909 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6910 v4.AddArg(src) 6911 v4.AddArg(mem) 6912 v3.AddArg(v4) 6913 v3.AddArg(mem) 6914 v1.AddArg(v3) 6915 v.AddArg(v1) 6916 return true 6917 } 6918 // match: (Move [16] {t} dst src mem) 6919 // cond: t.(*types.Type).Alignment()%8 == 0 6920 // result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)) 6921 for { 6922 if v.AuxInt != 16 { 6923 break 6924 } 6925 t := v.Aux 6926 _ = v.Args[2] 6927 dst := v.Args[0] 6928 src := v.Args[1] 6929 mem := v.Args[2] 6930 if !(t.(*types.Type).Alignment()%8 == 0) { 6931 break 6932 } 6933 v.reset(OpMIPS64MOVVstore) 6934 v.AuxInt = 8 6935 v.AddArg(dst) 6936 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 6937 v0.AuxInt = 8 6938 v0.AddArg(src) 6939 v0.AddArg(mem) 6940 v.AddArg(v0) 6941 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 6942 v1.AddArg(dst) 6943 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 6944 v2.AddArg(src) 6945 v2.AddArg(mem) 6946 v1.AddArg(v2) 6947 v1.AddArg(mem) 6948 v.AddArg(v1) 6949 return true 6950 } 6951 // match: (Move [24] {t} dst src mem) 6952 // cond: t.(*types.Type).Alignment()%8 == 0 6953 // result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))) 6954 for { 6955 if v.AuxInt != 24 { 6956 break 6957 } 6958 t := v.Aux 6959 _ = v.Args[2] 6960 dst := v.Args[0] 6961 src := v.Args[1] 6962 mem := v.Args[2] 6963 if !(t.(*types.Type).Alignment()%8 == 0) { 6964 break 6965 } 6966 v.reset(OpMIPS64MOVVstore) 6967 v.AuxInt = 16 6968 v.AddArg(dst) 6969 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 6970 v0.AuxInt = 16 6971 v0.AddArg(src) 6972 v0.AddArg(mem) 6973 v.AddArg(v0) 6974 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 6975 v1.AuxInt = 8 6976 v1.AddArg(dst) 6977 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 6978 v2.AuxInt = 8 6979 v2.AddArg(src) 6980 v2.AddArg(mem) 6981 v1.AddArg(v2) 6982 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 6983 v3.AddArg(dst) 6984 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 6985 v4.AddArg(src) 6986 v4.AddArg(mem) 6987 v3.AddArg(v4) 6988 v3.AddArg(mem) 6989 v1.AddArg(v3) 6990 v.AddArg(v1) 6991 return true 6992 } 6993 // match: (Move [s] {t} dst src mem) 6994 // cond: s > 24 || t.(*types.Type).Alignment()%8 != 0 6995 // result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 6996 for { 6997 s := v.AuxInt 6998 t := v.Aux 6999 _ = v.Args[2] 7000 dst := v.Args[0] 7001 src := v.Args[1] 7002 mem := v.Args[2] 7003 if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) { 7004 break 7005 } 7006 v.reset(OpMIPS64LoweredMove) 7007 v.AuxInt = t.(*types.Type).Alignment() 7008 v.AddArg(dst) 7009 v.AddArg(src) 7010 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type) 7011 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 7012 v0.AddArg(src) 7013 v.AddArg(v0) 7014 v.AddArg(mem) 7015 return true 7016 } 7017 return false 7018 } 7019 func rewriteValueMIPS64_OpMul16_0(v *Value) bool { 7020 b := v.Block 7021 _ = b 7022 typ := &b.Func.Config.Types 7023 _ = typ 7024 // match: (Mul16 x y) 7025 // cond: 7026 // result: (Select1 (MULVU x y)) 7027 for { 7028 _ = v.Args[1] 7029 x := v.Args[0] 7030 y := v.Args[1] 7031 v.reset(OpSelect1) 7032 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7033 v0.AddArg(x) 7034 v0.AddArg(y) 7035 v.AddArg(v0) 7036 return true 7037 } 7038 } 7039 func rewriteValueMIPS64_OpMul32_0(v *Value) bool { 7040 b := v.Block 7041 _ = b 7042 typ := &b.Func.Config.Types 7043 _ = typ 7044 // match: (Mul32 x y) 7045 // cond: 7046 // result: (Select1 (MULVU x y)) 7047 for { 7048 _ = v.Args[1] 7049 x := v.Args[0] 7050 y := v.Args[1] 7051 v.reset(OpSelect1) 7052 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7053 v0.AddArg(x) 7054 v0.AddArg(y) 7055 v.AddArg(v0) 7056 return true 7057 } 7058 } 7059 func rewriteValueMIPS64_OpMul32F_0(v *Value) bool { 7060 // match: (Mul32F x y) 7061 // cond: 7062 // result: (MULF x y) 7063 for { 7064 _ = v.Args[1] 7065 x := v.Args[0] 7066 y := v.Args[1] 7067 v.reset(OpMIPS64MULF) 7068 v.AddArg(x) 7069 v.AddArg(y) 7070 return true 7071 } 7072 } 7073 func rewriteValueMIPS64_OpMul64_0(v *Value) bool { 7074 b := v.Block 7075 _ = b 7076 typ := &b.Func.Config.Types 7077 _ = typ 7078 // match: (Mul64 x y) 7079 // cond: 7080 // result: (Select1 (MULVU x y)) 7081 for { 7082 _ = v.Args[1] 7083 x := v.Args[0] 7084 y := v.Args[1] 7085 v.reset(OpSelect1) 7086 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7087 v0.AddArg(x) 7088 v0.AddArg(y) 7089 v.AddArg(v0) 7090 return true 7091 } 7092 } 7093 func rewriteValueMIPS64_OpMul64F_0(v *Value) bool { 7094 // match: (Mul64F x y) 7095 // cond: 7096 // result: (MULD x y) 7097 for { 7098 _ = v.Args[1] 7099 x := v.Args[0] 7100 y := v.Args[1] 7101 v.reset(OpMIPS64MULD) 7102 v.AddArg(x) 7103 v.AddArg(y) 7104 return true 7105 } 7106 } 7107 func rewriteValueMIPS64_OpMul8_0(v *Value) bool { 7108 b := v.Block 7109 _ = b 7110 typ := &b.Func.Config.Types 7111 _ = typ 7112 // match: (Mul8 x y) 7113 // cond: 7114 // result: (Select1 (MULVU x y)) 7115 for { 7116 _ = v.Args[1] 7117 x := v.Args[0] 7118 y := v.Args[1] 7119 v.reset(OpSelect1) 7120 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7121 v0.AddArg(x) 7122 v0.AddArg(y) 7123 v.AddArg(v0) 7124 return true 7125 } 7126 } 7127 func rewriteValueMIPS64_OpNeg16_0(v *Value) bool { 7128 // match: (Neg16 x) 7129 // cond: 7130 // result: (NEGV x) 7131 for { 7132 x := v.Args[0] 7133 v.reset(OpMIPS64NEGV) 7134 v.AddArg(x) 7135 return true 7136 } 7137 } 7138 func rewriteValueMIPS64_OpNeg32_0(v *Value) bool { 7139 // match: (Neg32 x) 7140 // cond: 7141 // result: (NEGV x) 7142 for { 7143 x := v.Args[0] 7144 v.reset(OpMIPS64NEGV) 7145 v.AddArg(x) 7146 return true 7147 } 7148 } 7149 func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool { 7150 // match: (Neg32F x) 7151 // cond: 7152 // result: (NEGF x) 7153 for { 7154 x := v.Args[0] 7155 v.reset(OpMIPS64NEGF) 7156 v.AddArg(x) 7157 return true 7158 } 7159 } 7160 func rewriteValueMIPS64_OpNeg64_0(v *Value) bool { 7161 // match: (Neg64 x) 7162 // cond: 7163 // result: (NEGV x) 7164 for { 7165 x := v.Args[0] 7166 v.reset(OpMIPS64NEGV) 7167 v.AddArg(x) 7168 return true 7169 } 7170 } 7171 func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool { 7172 // match: (Neg64F x) 7173 // cond: 7174 // result: (NEGD x) 7175 for { 7176 x := v.Args[0] 7177 v.reset(OpMIPS64NEGD) 7178 v.AddArg(x) 7179 return true 7180 } 7181 } 7182 func rewriteValueMIPS64_OpNeg8_0(v *Value) bool { 7183 // match: (Neg8 x) 7184 // cond: 7185 // result: (NEGV x) 7186 for { 7187 x := v.Args[0] 7188 v.reset(OpMIPS64NEGV) 7189 v.AddArg(x) 7190 return true 7191 } 7192 } 7193 func rewriteValueMIPS64_OpNeq16_0(v *Value) bool { 7194 b := v.Block 7195 _ = b 7196 typ := &b.Func.Config.Types 7197 _ = typ 7198 // match: (Neq16 x y) 7199 // cond: 7200 // result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0])) 7201 for { 7202 _ = v.Args[1] 7203 x := v.Args[0] 7204 y := v.Args[1] 7205 v.reset(OpMIPS64SGTU) 7206 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7207 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7208 v1.AddArg(x) 7209 v0.AddArg(v1) 7210 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7211 v2.AddArg(y) 7212 v0.AddArg(v2) 7213 v.AddArg(v0) 7214 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7215 v3.AuxInt = 0 7216 v.AddArg(v3) 7217 return true 7218 } 7219 } 7220 func rewriteValueMIPS64_OpNeq32_0(v *Value) bool { 7221 b := v.Block 7222 _ = b 7223 typ := &b.Func.Config.Types 7224 _ = typ 7225 // match: (Neq32 x y) 7226 // cond: 7227 // result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0])) 7228 for { 7229 _ = v.Args[1] 7230 x := v.Args[0] 7231 y := v.Args[1] 7232 v.reset(OpMIPS64SGTU) 7233 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7234 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7235 v1.AddArg(x) 7236 v0.AddArg(v1) 7237 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7238 v2.AddArg(y) 7239 v0.AddArg(v2) 7240 v.AddArg(v0) 7241 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7242 v3.AuxInt = 0 7243 v.AddArg(v3) 7244 return true 7245 } 7246 } 7247 func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool { 7248 b := v.Block 7249 _ = b 7250 // match: (Neq32F x y) 7251 // cond: 7252 // result: (FPFlagFalse (CMPEQF x y)) 7253 for { 7254 _ = v.Args[1] 7255 x := v.Args[0] 7256 y := v.Args[1] 7257 v.reset(OpMIPS64FPFlagFalse) 7258 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags) 7259 v0.AddArg(x) 7260 v0.AddArg(y) 7261 v.AddArg(v0) 7262 return true 7263 } 7264 } 7265 func rewriteValueMIPS64_OpNeq64_0(v *Value) bool { 7266 b := v.Block 7267 _ = b 7268 typ := &b.Func.Config.Types 7269 _ = typ 7270 // match: (Neq64 x y) 7271 // cond: 7272 // result: (SGTU (XOR x y) (MOVVconst [0])) 7273 for { 7274 _ = v.Args[1] 7275 x := v.Args[0] 7276 y := v.Args[1] 7277 v.reset(OpMIPS64SGTU) 7278 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7279 v0.AddArg(x) 7280 v0.AddArg(y) 7281 v.AddArg(v0) 7282 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7283 v1.AuxInt = 0 7284 v.AddArg(v1) 7285 return true 7286 } 7287 } 7288 func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool { 7289 b := v.Block 7290 _ = b 7291 // match: (Neq64F x y) 7292 // cond: 7293 // result: (FPFlagFalse (CMPEQD x y)) 7294 for { 7295 _ = v.Args[1] 7296 x := v.Args[0] 7297 y := v.Args[1] 7298 v.reset(OpMIPS64FPFlagFalse) 7299 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags) 7300 v0.AddArg(x) 7301 v0.AddArg(y) 7302 v.AddArg(v0) 7303 return true 7304 } 7305 } 7306 func rewriteValueMIPS64_OpNeq8_0(v *Value) bool { 7307 b := v.Block 7308 _ = b 7309 typ := &b.Func.Config.Types 7310 _ = typ 7311 // match: (Neq8 x y) 7312 // cond: 7313 // result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0])) 7314 for { 7315 _ = v.Args[1] 7316 x := v.Args[0] 7317 y := v.Args[1] 7318 v.reset(OpMIPS64SGTU) 7319 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7320 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7321 v1.AddArg(x) 7322 v0.AddArg(v1) 7323 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7324 v2.AddArg(y) 7325 v0.AddArg(v2) 7326 v.AddArg(v0) 7327 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7328 v3.AuxInt = 0 7329 v.AddArg(v3) 7330 return true 7331 } 7332 } 7333 func rewriteValueMIPS64_OpNeqB_0(v *Value) bool { 7334 // match: (NeqB x y) 7335 // cond: 7336 // result: (XOR x y) 7337 for { 7338 _ = v.Args[1] 7339 x := v.Args[0] 7340 y := v.Args[1] 7341 v.reset(OpMIPS64XOR) 7342 v.AddArg(x) 7343 v.AddArg(y) 7344 return true 7345 } 7346 } 7347 func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool { 7348 b := v.Block 7349 _ = b 7350 typ := &b.Func.Config.Types 7351 _ = typ 7352 // match: (NeqPtr x y) 7353 // cond: 7354 // result: (SGTU (XOR x y) (MOVVconst [0])) 7355 for { 7356 _ = v.Args[1] 7357 x := v.Args[0] 7358 y := v.Args[1] 7359 v.reset(OpMIPS64SGTU) 7360 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7361 v0.AddArg(x) 7362 v0.AddArg(y) 7363 v.AddArg(v0) 7364 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7365 v1.AuxInt = 0 7366 v.AddArg(v1) 7367 return true 7368 } 7369 } 7370 func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool { 7371 // match: (NilCheck ptr mem) 7372 // cond: 7373 // result: (LoweredNilCheck ptr mem) 7374 for { 7375 _ = v.Args[1] 7376 ptr := v.Args[0] 7377 mem := v.Args[1] 7378 v.reset(OpMIPS64LoweredNilCheck) 7379 v.AddArg(ptr) 7380 v.AddArg(mem) 7381 return true 7382 } 7383 } 7384 func rewriteValueMIPS64_OpNot_0(v *Value) bool { 7385 // match: (Not x) 7386 // cond: 7387 // result: (XORconst [1] x) 7388 for { 7389 x := v.Args[0] 7390 v.reset(OpMIPS64XORconst) 7391 v.AuxInt = 1 7392 v.AddArg(x) 7393 return true 7394 } 7395 } 7396 func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool { 7397 // match: (OffPtr [off] ptr:(SP)) 7398 // cond: 7399 // result: (MOVVaddr [off] ptr) 7400 for { 7401 off := v.AuxInt 7402 ptr := v.Args[0] 7403 if ptr.Op != OpSP { 7404 break 7405 } 7406 v.reset(OpMIPS64MOVVaddr) 7407 v.AuxInt = off 7408 v.AddArg(ptr) 7409 return true 7410 } 7411 // match: (OffPtr [off] ptr) 7412 // cond: 7413 // result: (ADDVconst [off] ptr) 7414 for { 7415 off := v.AuxInt 7416 ptr := v.Args[0] 7417 v.reset(OpMIPS64ADDVconst) 7418 v.AuxInt = off 7419 v.AddArg(ptr) 7420 return true 7421 } 7422 } 7423 func rewriteValueMIPS64_OpOr16_0(v *Value) bool { 7424 // match: (Or16 x y) 7425 // cond: 7426 // result: (OR x y) 7427 for { 7428 _ = v.Args[1] 7429 x := v.Args[0] 7430 y := v.Args[1] 7431 v.reset(OpMIPS64OR) 7432 v.AddArg(x) 7433 v.AddArg(y) 7434 return true 7435 } 7436 } 7437 func rewriteValueMIPS64_OpOr32_0(v *Value) bool { 7438 // match: (Or32 x y) 7439 // cond: 7440 // result: (OR x y) 7441 for { 7442 _ = v.Args[1] 7443 x := v.Args[0] 7444 y := v.Args[1] 7445 v.reset(OpMIPS64OR) 7446 v.AddArg(x) 7447 v.AddArg(y) 7448 return true 7449 } 7450 } 7451 func rewriteValueMIPS64_OpOr64_0(v *Value) bool { 7452 // match: (Or64 x y) 7453 // cond: 7454 // result: (OR x y) 7455 for { 7456 _ = v.Args[1] 7457 x := v.Args[0] 7458 y := v.Args[1] 7459 v.reset(OpMIPS64OR) 7460 v.AddArg(x) 7461 v.AddArg(y) 7462 return true 7463 } 7464 } 7465 func rewriteValueMIPS64_OpOr8_0(v *Value) bool { 7466 // match: (Or8 x y) 7467 // cond: 7468 // result: (OR x y) 7469 for { 7470 _ = v.Args[1] 7471 x := v.Args[0] 7472 y := v.Args[1] 7473 v.reset(OpMIPS64OR) 7474 v.AddArg(x) 7475 v.AddArg(y) 7476 return true 7477 } 7478 } 7479 func rewriteValueMIPS64_OpOrB_0(v *Value) bool { 7480 // match: (OrB x y) 7481 // cond: 7482 // result: (OR x y) 7483 for { 7484 _ = v.Args[1] 7485 x := v.Args[0] 7486 y := v.Args[1] 7487 v.reset(OpMIPS64OR) 7488 v.AddArg(x) 7489 v.AddArg(y) 7490 return true 7491 } 7492 } 7493 func rewriteValueMIPS64_OpRound32F_0(v *Value) bool { 7494 // match: (Round32F x) 7495 // cond: 7496 // result: x 7497 for { 7498 x := v.Args[0] 7499 v.reset(OpCopy) 7500 v.Type = x.Type 7501 v.AddArg(x) 7502 return true 7503 } 7504 } 7505 func rewriteValueMIPS64_OpRound64F_0(v *Value) bool { 7506 // match: (Round64F x) 7507 // cond: 7508 // result: x 7509 for { 7510 x := v.Args[0] 7511 v.reset(OpCopy) 7512 v.Type = x.Type 7513 v.AddArg(x) 7514 return true 7515 } 7516 } 7517 func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool { 7518 b := v.Block 7519 _ = b 7520 typ := &b.Func.Config.Types 7521 _ = typ 7522 // match: (Rsh16Ux16 <t> x y) 7523 // cond: 7524 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y))) 7525 for { 7526 t := v.Type 7527 _ = v.Args[1] 7528 x := v.Args[0] 7529 y := v.Args[1] 7530 v.reset(OpMIPS64AND) 7531 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7532 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7533 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7534 v2.AuxInt = 64 7535 v1.AddArg(v2) 7536 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7537 v3.AddArg(y) 7538 v1.AddArg(v3) 7539 v0.AddArg(v1) 7540 v.AddArg(v0) 7541 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7542 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7543 v5.AddArg(x) 7544 v4.AddArg(v5) 7545 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7546 v6.AddArg(y) 7547 v4.AddArg(v6) 7548 v.AddArg(v4) 7549 return true 7550 } 7551 } 7552 func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool { 7553 b := v.Block 7554 _ = b 7555 typ := &b.Func.Config.Types 7556 _ = typ 7557 // match: (Rsh16Ux32 <t> x y) 7558 // cond: 7559 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y))) 7560 for { 7561 t := v.Type 7562 _ = v.Args[1] 7563 x := v.Args[0] 7564 y := v.Args[1] 7565 v.reset(OpMIPS64AND) 7566 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7567 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7568 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7569 v2.AuxInt = 64 7570 v1.AddArg(v2) 7571 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7572 v3.AddArg(y) 7573 v1.AddArg(v3) 7574 v0.AddArg(v1) 7575 v.AddArg(v0) 7576 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7577 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7578 v5.AddArg(x) 7579 v4.AddArg(v5) 7580 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7581 v6.AddArg(y) 7582 v4.AddArg(v6) 7583 v.AddArg(v4) 7584 return true 7585 } 7586 } 7587 func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool { 7588 b := v.Block 7589 _ = b 7590 typ := &b.Func.Config.Types 7591 _ = typ 7592 // match: (Rsh16Ux64 <t> x y) 7593 // cond: 7594 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y)) 7595 for { 7596 t := v.Type 7597 _ = v.Args[1] 7598 x := v.Args[0] 7599 y := v.Args[1] 7600 v.reset(OpMIPS64AND) 7601 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7602 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7603 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7604 v2.AuxInt = 64 7605 v1.AddArg(v2) 7606 v1.AddArg(y) 7607 v0.AddArg(v1) 7608 v.AddArg(v0) 7609 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7610 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7611 v4.AddArg(x) 7612 v3.AddArg(v4) 7613 v3.AddArg(y) 7614 v.AddArg(v3) 7615 return true 7616 } 7617 } 7618 func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool { 7619 b := v.Block 7620 _ = b 7621 typ := &b.Func.Config.Types 7622 _ = typ 7623 // match: (Rsh16Ux8 <t> x y) 7624 // cond: 7625 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y))) 7626 for { 7627 t := v.Type 7628 _ = v.Args[1] 7629 x := v.Args[0] 7630 y := v.Args[1] 7631 v.reset(OpMIPS64AND) 7632 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7633 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7634 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7635 v2.AuxInt = 64 7636 v1.AddArg(v2) 7637 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7638 v3.AddArg(y) 7639 v1.AddArg(v3) 7640 v0.AddArg(v1) 7641 v.AddArg(v0) 7642 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7643 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7644 v5.AddArg(x) 7645 v4.AddArg(v5) 7646 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7647 v6.AddArg(y) 7648 v4.AddArg(v6) 7649 v.AddArg(v4) 7650 return true 7651 } 7652 } 7653 func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool { 7654 b := v.Block 7655 _ = b 7656 typ := &b.Func.Config.Types 7657 _ = typ 7658 // match: (Rsh16x16 <t> x y) 7659 // cond: 7660 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y))) 7661 for { 7662 t := v.Type 7663 _ = v.Args[1] 7664 x := v.Args[0] 7665 y := v.Args[1] 7666 v.reset(OpMIPS64SRAV) 7667 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 7668 v0.AddArg(x) 7669 v.AddArg(v0) 7670 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 7671 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7672 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7673 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7674 v4.AddArg(y) 7675 v3.AddArg(v4) 7676 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7677 v5.AuxInt = 63 7678 v3.AddArg(v5) 7679 v2.AddArg(v3) 7680 v1.AddArg(v2) 7681 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7682 v6.AddArg(y) 7683 v1.AddArg(v6) 7684 v.AddArg(v1) 7685 return true 7686 } 7687 } 7688 func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool { 7689 b := v.Block 7690 _ = b 7691 typ := &b.Func.Config.Types 7692 _ = typ 7693 // match: (Rsh16x32 <t> x y) 7694 // cond: 7695 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y))) 7696 for { 7697 t := v.Type 7698 _ = v.Args[1] 7699 x := v.Args[0] 7700 y := v.Args[1] 7701 v.reset(OpMIPS64SRAV) 7702 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 7703 v0.AddArg(x) 7704 v.AddArg(v0) 7705 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 7706 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7707 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7708 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7709 v4.AddArg(y) 7710 v3.AddArg(v4) 7711 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7712 v5.AuxInt = 63 7713 v3.AddArg(v5) 7714 v2.AddArg(v3) 7715 v1.AddArg(v2) 7716 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7717 v6.AddArg(y) 7718 v1.AddArg(v6) 7719 v.AddArg(v1) 7720 return true 7721 } 7722 } 7723 func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool { 7724 b := v.Block 7725 _ = b 7726 typ := &b.Func.Config.Types 7727 _ = typ 7728 // match: (Rsh16x64 <t> x y) 7729 // cond: 7730 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y)) 7731 for { 7732 t := v.Type 7733 _ = v.Args[1] 7734 x := v.Args[0] 7735 y := v.Args[1] 7736 v.reset(OpMIPS64SRAV) 7737 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 7738 v0.AddArg(x) 7739 v.AddArg(v0) 7740 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 7741 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7742 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7743 v3.AddArg(y) 7744 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7745 v4.AuxInt = 63 7746 v3.AddArg(v4) 7747 v2.AddArg(v3) 7748 v1.AddArg(v2) 7749 v1.AddArg(y) 7750 v.AddArg(v1) 7751 return true 7752 } 7753 } 7754 func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool { 7755 b := v.Block 7756 _ = b 7757 typ := &b.Func.Config.Types 7758 _ = typ 7759 // match: (Rsh16x8 <t> x y) 7760 // cond: 7761 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y))) 7762 for { 7763 t := v.Type 7764 _ = v.Args[1] 7765 x := v.Args[0] 7766 y := v.Args[1] 7767 v.reset(OpMIPS64SRAV) 7768 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 7769 v0.AddArg(x) 7770 v.AddArg(v0) 7771 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 7772 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7773 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7774 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7775 v4.AddArg(y) 7776 v3.AddArg(v4) 7777 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7778 v5.AuxInt = 63 7779 v3.AddArg(v5) 7780 v2.AddArg(v3) 7781 v1.AddArg(v2) 7782 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7783 v6.AddArg(y) 7784 v1.AddArg(v6) 7785 v.AddArg(v1) 7786 return true 7787 } 7788 } 7789 func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool { 7790 b := v.Block 7791 _ = b 7792 typ := &b.Func.Config.Types 7793 _ = typ 7794 // match: (Rsh32Ux16 <t> x y) 7795 // cond: 7796 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y))) 7797 for { 7798 t := v.Type 7799 _ = v.Args[1] 7800 x := v.Args[0] 7801 y := v.Args[1] 7802 v.reset(OpMIPS64AND) 7803 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7804 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7805 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7806 v2.AuxInt = 64 7807 v1.AddArg(v2) 7808 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7809 v3.AddArg(y) 7810 v1.AddArg(v3) 7811 v0.AddArg(v1) 7812 v.AddArg(v0) 7813 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7814 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7815 v5.AddArg(x) 7816 v4.AddArg(v5) 7817 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7818 v6.AddArg(y) 7819 v4.AddArg(v6) 7820 v.AddArg(v4) 7821 return true 7822 } 7823 } 7824 func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool { 7825 b := v.Block 7826 _ = b 7827 typ := &b.Func.Config.Types 7828 _ = typ 7829 // match: (Rsh32Ux32 <t> x y) 7830 // cond: 7831 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y))) 7832 for { 7833 t := v.Type 7834 _ = v.Args[1] 7835 x := v.Args[0] 7836 y := v.Args[1] 7837 v.reset(OpMIPS64AND) 7838 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7839 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7840 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7841 v2.AuxInt = 64 7842 v1.AddArg(v2) 7843 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7844 v3.AddArg(y) 7845 v1.AddArg(v3) 7846 v0.AddArg(v1) 7847 v.AddArg(v0) 7848 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7849 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7850 v5.AddArg(x) 7851 v4.AddArg(v5) 7852 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7853 v6.AddArg(y) 7854 v4.AddArg(v6) 7855 v.AddArg(v4) 7856 return true 7857 } 7858 } 7859 func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool { 7860 b := v.Block 7861 _ = b 7862 typ := &b.Func.Config.Types 7863 _ = typ 7864 // match: (Rsh32Ux64 <t> x y) 7865 // cond: 7866 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y)) 7867 for { 7868 t := v.Type 7869 _ = v.Args[1] 7870 x := v.Args[0] 7871 y := v.Args[1] 7872 v.reset(OpMIPS64AND) 7873 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7874 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7875 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7876 v2.AuxInt = 64 7877 v1.AddArg(v2) 7878 v1.AddArg(y) 7879 v0.AddArg(v1) 7880 v.AddArg(v0) 7881 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7882 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7883 v4.AddArg(x) 7884 v3.AddArg(v4) 7885 v3.AddArg(y) 7886 v.AddArg(v3) 7887 return true 7888 } 7889 } 7890 func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool { 7891 b := v.Block 7892 _ = b 7893 typ := &b.Func.Config.Types 7894 _ = typ 7895 // match: (Rsh32Ux8 <t> x y) 7896 // cond: 7897 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y))) 7898 for { 7899 t := v.Type 7900 _ = v.Args[1] 7901 x := v.Args[0] 7902 y := v.Args[1] 7903 v.reset(OpMIPS64AND) 7904 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7905 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7906 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7907 v2.AuxInt = 64 7908 v1.AddArg(v2) 7909 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7910 v3.AddArg(y) 7911 v1.AddArg(v3) 7912 v0.AddArg(v1) 7913 v.AddArg(v0) 7914 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7915 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7916 v5.AddArg(x) 7917 v4.AddArg(v5) 7918 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7919 v6.AddArg(y) 7920 v4.AddArg(v6) 7921 v.AddArg(v4) 7922 return true 7923 } 7924 } 7925 func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool { 7926 b := v.Block 7927 _ = b 7928 typ := &b.Func.Config.Types 7929 _ = typ 7930 // match: (Rsh32x16 <t> x y) 7931 // cond: 7932 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y))) 7933 for { 7934 t := v.Type 7935 _ = v.Args[1] 7936 x := v.Args[0] 7937 y := v.Args[1] 7938 v.reset(OpMIPS64SRAV) 7939 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 7940 v0.AddArg(x) 7941 v.AddArg(v0) 7942 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 7943 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7944 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7945 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7946 v4.AddArg(y) 7947 v3.AddArg(v4) 7948 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7949 v5.AuxInt = 63 7950 v3.AddArg(v5) 7951 v2.AddArg(v3) 7952 v1.AddArg(v2) 7953 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7954 v6.AddArg(y) 7955 v1.AddArg(v6) 7956 v.AddArg(v1) 7957 return true 7958 } 7959 } 7960 func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool { 7961 b := v.Block 7962 _ = b 7963 typ := &b.Func.Config.Types 7964 _ = typ 7965 // match: (Rsh32x32 <t> x y) 7966 // cond: 7967 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y))) 7968 for { 7969 t := v.Type 7970 _ = v.Args[1] 7971 x := v.Args[0] 7972 y := v.Args[1] 7973 v.reset(OpMIPS64SRAV) 7974 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 7975 v0.AddArg(x) 7976 v.AddArg(v0) 7977 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 7978 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7979 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7980 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7981 v4.AddArg(y) 7982 v3.AddArg(v4) 7983 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 7984 v5.AuxInt = 63 7985 v3.AddArg(v5) 7986 v2.AddArg(v3) 7987 v1.AddArg(v2) 7988 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7989 v6.AddArg(y) 7990 v1.AddArg(v6) 7991 v.AddArg(v1) 7992 return true 7993 } 7994 } 7995 func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool { 7996 b := v.Block 7997 _ = b 7998 typ := &b.Func.Config.Types 7999 _ = typ 8000 // match: (Rsh32x64 <t> x y) 8001 // cond: 8002 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y)) 8003 for { 8004 t := v.Type 8005 _ = v.Args[1] 8006 x := v.Args[0] 8007 y := v.Args[1] 8008 v.reset(OpMIPS64SRAV) 8009 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8010 v0.AddArg(x) 8011 v.AddArg(v0) 8012 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8013 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8014 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8015 v3.AddArg(y) 8016 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8017 v4.AuxInt = 63 8018 v3.AddArg(v4) 8019 v2.AddArg(v3) 8020 v1.AddArg(v2) 8021 v1.AddArg(y) 8022 v.AddArg(v1) 8023 return true 8024 } 8025 } 8026 func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool { 8027 b := v.Block 8028 _ = b 8029 typ := &b.Func.Config.Types 8030 _ = typ 8031 // match: (Rsh32x8 <t> x y) 8032 // cond: 8033 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8034 for { 8035 t := v.Type 8036 _ = v.Args[1] 8037 x := v.Args[0] 8038 y := v.Args[1] 8039 v.reset(OpMIPS64SRAV) 8040 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8041 v0.AddArg(x) 8042 v.AddArg(v0) 8043 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8044 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8045 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8046 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8047 v4.AddArg(y) 8048 v3.AddArg(v4) 8049 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8050 v5.AuxInt = 63 8051 v3.AddArg(v5) 8052 v2.AddArg(v3) 8053 v1.AddArg(v2) 8054 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8055 v6.AddArg(y) 8056 v1.AddArg(v6) 8057 v.AddArg(v1) 8058 return true 8059 } 8060 } 8061 func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool { 8062 b := v.Block 8063 _ = b 8064 typ := &b.Func.Config.Types 8065 _ = typ 8066 // match: (Rsh64Ux16 <t> x y) 8067 // cond: 8068 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y))) 8069 for { 8070 t := v.Type 8071 _ = v.Args[1] 8072 x := v.Args[0] 8073 y := v.Args[1] 8074 v.reset(OpMIPS64AND) 8075 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8076 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8077 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8078 v2.AuxInt = 64 8079 v1.AddArg(v2) 8080 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8081 v3.AddArg(y) 8082 v1.AddArg(v3) 8083 v0.AddArg(v1) 8084 v.AddArg(v0) 8085 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8086 v4.AddArg(x) 8087 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8088 v5.AddArg(y) 8089 v4.AddArg(v5) 8090 v.AddArg(v4) 8091 return true 8092 } 8093 } 8094 func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool { 8095 b := v.Block 8096 _ = b 8097 typ := &b.Func.Config.Types 8098 _ = typ 8099 // match: (Rsh64Ux32 <t> x y) 8100 // cond: 8101 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y))) 8102 for { 8103 t := v.Type 8104 _ = v.Args[1] 8105 x := v.Args[0] 8106 y := v.Args[1] 8107 v.reset(OpMIPS64AND) 8108 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8109 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8110 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8111 v2.AuxInt = 64 8112 v1.AddArg(v2) 8113 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8114 v3.AddArg(y) 8115 v1.AddArg(v3) 8116 v0.AddArg(v1) 8117 v.AddArg(v0) 8118 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8119 v4.AddArg(x) 8120 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8121 v5.AddArg(y) 8122 v4.AddArg(v5) 8123 v.AddArg(v4) 8124 return true 8125 } 8126 } 8127 func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool { 8128 b := v.Block 8129 _ = b 8130 typ := &b.Func.Config.Types 8131 _ = typ 8132 // match: (Rsh64Ux64 <t> x y) 8133 // cond: 8134 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> x y)) 8135 for { 8136 t := v.Type 8137 _ = v.Args[1] 8138 x := v.Args[0] 8139 y := v.Args[1] 8140 v.reset(OpMIPS64AND) 8141 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8142 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8143 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8144 v2.AuxInt = 64 8145 v1.AddArg(v2) 8146 v1.AddArg(y) 8147 v0.AddArg(v1) 8148 v.AddArg(v0) 8149 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8150 v3.AddArg(x) 8151 v3.AddArg(y) 8152 v.AddArg(v3) 8153 return true 8154 } 8155 } 8156 func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool { 8157 b := v.Block 8158 _ = b 8159 typ := &b.Func.Config.Types 8160 _ = typ 8161 // match: (Rsh64Ux8 <t> x y) 8162 // cond: 8163 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y))) 8164 for { 8165 t := v.Type 8166 _ = v.Args[1] 8167 x := v.Args[0] 8168 y := v.Args[1] 8169 v.reset(OpMIPS64AND) 8170 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8171 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8172 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8173 v2.AuxInt = 64 8174 v1.AddArg(v2) 8175 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8176 v3.AddArg(y) 8177 v1.AddArg(v3) 8178 v0.AddArg(v1) 8179 v.AddArg(v0) 8180 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8181 v4.AddArg(x) 8182 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8183 v5.AddArg(y) 8184 v4.AddArg(v5) 8185 v.AddArg(v4) 8186 return true 8187 } 8188 } 8189 func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool { 8190 b := v.Block 8191 _ = b 8192 typ := &b.Func.Config.Types 8193 _ = typ 8194 // match: (Rsh64x16 <t> x y) 8195 // cond: 8196 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8197 for { 8198 t := v.Type 8199 _ = v.Args[1] 8200 x := v.Args[0] 8201 y := v.Args[1] 8202 v.reset(OpMIPS64SRAV) 8203 v.AddArg(x) 8204 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8205 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8206 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8207 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8208 v3.AddArg(y) 8209 v2.AddArg(v3) 8210 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8211 v4.AuxInt = 63 8212 v2.AddArg(v4) 8213 v1.AddArg(v2) 8214 v0.AddArg(v1) 8215 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8216 v5.AddArg(y) 8217 v0.AddArg(v5) 8218 v.AddArg(v0) 8219 return true 8220 } 8221 } 8222 func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool { 8223 b := v.Block 8224 _ = b 8225 typ := &b.Func.Config.Types 8226 _ = typ 8227 // match: (Rsh64x32 <t> x y) 8228 // cond: 8229 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8230 for { 8231 t := v.Type 8232 _ = v.Args[1] 8233 x := v.Args[0] 8234 y := v.Args[1] 8235 v.reset(OpMIPS64SRAV) 8236 v.AddArg(x) 8237 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8238 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8239 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8240 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8241 v3.AddArg(y) 8242 v2.AddArg(v3) 8243 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8244 v4.AuxInt = 63 8245 v2.AddArg(v4) 8246 v1.AddArg(v2) 8247 v0.AddArg(v1) 8248 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8249 v5.AddArg(y) 8250 v0.AddArg(v5) 8251 v.AddArg(v0) 8252 return true 8253 } 8254 } 8255 func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool { 8256 b := v.Block 8257 _ = b 8258 typ := &b.Func.Config.Types 8259 _ = typ 8260 // match: (Rsh64x64 <t> x y) 8261 // cond: 8262 // result: (SRAV x (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y)) 8263 for { 8264 t := v.Type 8265 _ = v.Args[1] 8266 x := v.Args[0] 8267 y := v.Args[1] 8268 v.reset(OpMIPS64SRAV) 8269 v.AddArg(x) 8270 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8271 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8272 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8273 v2.AddArg(y) 8274 v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8275 v3.AuxInt = 63 8276 v2.AddArg(v3) 8277 v1.AddArg(v2) 8278 v0.AddArg(v1) 8279 v0.AddArg(y) 8280 v.AddArg(v0) 8281 return true 8282 } 8283 } 8284 func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool { 8285 b := v.Block 8286 _ = b 8287 typ := &b.Func.Config.Types 8288 _ = typ 8289 // match: (Rsh64x8 <t> x y) 8290 // cond: 8291 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8292 for { 8293 t := v.Type 8294 _ = v.Args[1] 8295 x := v.Args[0] 8296 y := v.Args[1] 8297 v.reset(OpMIPS64SRAV) 8298 v.AddArg(x) 8299 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8300 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8301 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8302 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8303 v3.AddArg(y) 8304 v2.AddArg(v3) 8305 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8306 v4.AuxInt = 63 8307 v2.AddArg(v4) 8308 v1.AddArg(v2) 8309 v0.AddArg(v1) 8310 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8311 v5.AddArg(y) 8312 v0.AddArg(v5) 8313 v.AddArg(v0) 8314 return true 8315 } 8316 } 8317 func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool { 8318 b := v.Block 8319 _ = b 8320 typ := &b.Func.Config.Types 8321 _ = typ 8322 // match: (Rsh8Ux16 <t> x y) 8323 // cond: 8324 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y))) 8325 for { 8326 t := v.Type 8327 _ = v.Args[1] 8328 x := v.Args[0] 8329 y := v.Args[1] 8330 v.reset(OpMIPS64AND) 8331 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8332 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8333 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8334 v2.AuxInt = 64 8335 v1.AddArg(v2) 8336 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8337 v3.AddArg(y) 8338 v1.AddArg(v3) 8339 v0.AddArg(v1) 8340 v.AddArg(v0) 8341 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8342 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8343 v5.AddArg(x) 8344 v4.AddArg(v5) 8345 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8346 v6.AddArg(y) 8347 v4.AddArg(v6) 8348 v.AddArg(v4) 8349 return true 8350 } 8351 } 8352 func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool { 8353 b := v.Block 8354 _ = b 8355 typ := &b.Func.Config.Types 8356 _ = typ 8357 // match: (Rsh8Ux32 <t> x y) 8358 // cond: 8359 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y))) 8360 for { 8361 t := v.Type 8362 _ = v.Args[1] 8363 x := v.Args[0] 8364 y := v.Args[1] 8365 v.reset(OpMIPS64AND) 8366 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8367 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8368 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8369 v2.AuxInt = 64 8370 v1.AddArg(v2) 8371 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8372 v3.AddArg(y) 8373 v1.AddArg(v3) 8374 v0.AddArg(v1) 8375 v.AddArg(v0) 8376 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8377 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8378 v5.AddArg(x) 8379 v4.AddArg(v5) 8380 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8381 v6.AddArg(y) 8382 v4.AddArg(v6) 8383 v.AddArg(v4) 8384 return true 8385 } 8386 } 8387 func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool { 8388 b := v.Block 8389 _ = b 8390 typ := &b.Func.Config.Types 8391 _ = typ 8392 // match: (Rsh8Ux64 <t> x y) 8393 // cond: 8394 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y)) 8395 for { 8396 t := v.Type 8397 _ = v.Args[1] 8398 x := v.Args[0] 8399 y := v.Args[1] 8400 v.reset(OpMIPS64AND) 8401 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8402 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8403 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8404 v2.AuxInt = 64 8405 v1.AddArg(v2) 8406 v1.AddArg(y) 8407 v0.AddArg(v1) 8408 v.AddArg(v0) 8409 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8410 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8411 v4.AddArg(x) 8412 v3.AddArg(v4) 8413 v3.AddArg(y) 8414 v.AddArg(v3) 8415 return true 8416 } 8417 } 8418 func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool { 8419 b := v.Block 8420 _ = b 8421 typ := &b.Func.Config.Types 8422 _ = typ 8423 // match: (Rsh8Ux8 <t> x y) 8424 // cond: 8425 // result: (AND (NEGV <t> (SGTU (Const64 <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y))) 8426 for { 8427 t := v.Type 8428 _ = v.Args[1] 8429 x := v.Args[0] 8430 y := v.Args[1] 8431 v.reset(OpMIPS64AND) 8432 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8433 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8434 v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8435 v2.AuxInt = 64 8436 v1.AddArg(v2) 8437 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8438 v3.AddArg(y) 8439 v1.AddArg(v3) 8440 v0.AddArg(v1) 8441 v.AddArg(v0) 8442 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8443 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8444 v5.AddArg(x) 8445 v4.AddArg(v5) 8446 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8447 v6.AddArg(y) 8448 v4.AddArg(v6) 8449 v.AddArg(v4) 8450 return true 8451 } 8452 } 8453 func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool { 8454 b := v.Block 8455 _ = b 8456 typ := &b.Func.Config.Types 8457 _ = typ 8458 // match: (Rsh8x16 <t> x y) 8459 // cond: 8460 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8461 for { 8462 t := v.Type 8463 _ = v.Args[1] 8464 x := v.Args[0] 8465 y := v.Args[1] 8466 v.reset(OpMIPS64SRAV) 8467 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8468 v0.AddArg(x) 8469 v.AddArg(v0) 8470 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8471 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8472 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8473 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8474 v4.AddArg(y) 8475 v3.AddArg(v4) 8476 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8477 v5.AuxInt = 63 8478 v3.AddArg(v5) 8479 v2.AddArg(v3) 8480 v1.AddArg(v2) 8481 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8482 v6.AddArg(y) 8483 v1.AddArg(v6) 8484 v.AddArg(v1) 8485 return true 8486 } 8487 } 8488 func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool { 8489 b := v.Block 8490 _ = b 8491 typ := &b.Func.Config.Types 8492 _ = typ 8493 // match: (Rsh8x32 <t> x y) 8494 // cond: 8495 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8496 for { 8497 t := v.Type 8498 _ = v.Args[1] 8499 x := v.Args[0] 8500 y := v.Args[1] 8501 v.reset(OpMIPS64SRAV) 8502 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8503 v0.AddArg(x) 8504 v.AddArg(v0) 8505 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8506 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8507 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8508 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8509 v4.AddArg(y) 8510 v3.AddArg(v4) 8511 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8512 v5.AuxInt = 63 8513 v3.AddArg(v5) 8514 v2.AddArg(v3) 8515 v1.AddArg(v2) 8516 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8517 v6.AddArg(y) 8518 v1.AddArg(v6) 8519 v.AddArg(v1) 8520 return true 8521 } 8522 } 8523 func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool { 8524 b := v.Block 8525 _ = b 8526 typ := &b.Func.Config.Types 8527 _ = typ 8528 // match: (Rsh8x64 <t> x y) 8529 // cond: 8530 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (Const64 <typ.UInt64> [63]))) y)) 8531 for { 8532 t := v.Type 8533 _ = v.Args[1] 8534 x := v.Args[0] 8535 y := v.Args[1] 8536 v.reset(OpMIPS64SRAV) 8537 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8538 v0.AddArg(x) 8539 v.AddArg(v0) 8540 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8541 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8542 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8543 v3.AddArg(y) 8544 v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8545 v4.AuxInt = 63 8546 v3.AddArg(v4) 8547 v2.AddArg(v3) 8548 v1.AddArg(v2) 8549 v1.AddArg(y) 8550 v.AddArg(v1) 8551 return true 8552 } 8553 } 8554 func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool { 8555 b := v.Block 8556 _ = b 8557 typ := &b.Func.Config.Types 8558 _ = typ 8559 // match: (Rsh8x8 <t> x y) 8560 // cond: 8561 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (Const64 <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8562 for { 8563 t := v.Type 8564 _ = v.Args[1] 8565 x := v.Args[0] 8566 y := v.Args[1] 8567 v.reset(OpMIPS64SRAV) 8568 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8569 v0.AddArg(x) 8570 v.AddArg(v0) 8571 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8572 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8573 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8574 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8575 v4.AddArg(y) 8576 v3.AddArg(v4) 8577 v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) 8578 v5.AuxInt = 63 8579 v3.AddArg(v5) 8580 v2.AddArg(v3) 8581 v1.AddArg(v2) 8582 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8583 v6.AddArg(y) 8584 v1.AddArg(v6) 8585 v.AddArg(v1) 8586 return true 8587 } 8588 } 8589 func rewriteValueMIPS64_OpSelect0_0(v *Value) bool { 8590 // match: (Select0 (DIVVU _ (MOVVconst [1]))) 8591 // cond: 8592 // result: (MOVVconst [0]) 8593 for { 8594 v_0 := v.Args[0] 8595 if v_0.Op != OpMIPS64DIVVU { 8596 break 8597 } 8598 _ = v_0.Args[1] 8599 v_0_1 := v_0.Args[1] 8600 if v_0_1.Op != OpMIPS64MOVVconst { 8601 break 8602 } 8603 if v_0_1.AuxInt != 1 { 8604 break 8605 } 8606 v.reset(OpMIPS64MOVVconst) 8607 v.AuxInt = 0 8608 return true 8609 } 8610 // match: (Select0 (DIVVU x (MOVVconst [c]))) 8611 // cond: isPowerOfTwo(c) 8612 // result: (ANDconst [c-1] x) 8613 for { 8614 v_0 := v.Args[0] 8615 if v_0.Op != OpMIPS64DIVVU { 8616 break 8617 } 8618 _ = v_0.Args[1] 8619 x := v_0.Args[0] 8620 v_0_1 := v_0.Args[1] 8621 if v_0_1.Op != OpMIPS64MOVVconst { 8622 break 8623 } 8624 c := v_0_1.AuxInt 8625 if !(isPowerOfTwo(c)) { 8626 break 8627 } 8628 v.reset(OpMIPS64ANDconst) 8629 v.AuxInt = c - 1 8630 v.AddArg(x) 8631 return true 8632 } 8633 // match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 8634 // cond: 8635 // result: (MOVVconst [int64(c)%int64(d)]) 8636 for { 8637 v_0 := v.Args[0] 8638 if v_0.Op != OpMIPS64DIVV { 8639 break 8640 } 8641 _ = v_0.Args[1] 8642 v_0_0 := v_0.Args[0] 8643 if v_0_0.Op != OpMIPS64MOVVconst { 8644 break 8645 } 8646 c := v_0_0.AuxInt 8647 v_0_1 := v_0.Args[1] 8648 if v_0_1.Op != OpMIPS64MOVVconst { 8649 break 8650 } 8651 d := v_0_1.AuxInt 8652 v.reset(OpMIPS64MOVVconst) 8653 v.AuxInt = int64(c) % int64(d) 8654 return true 8655 } 8656 // match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 8657 // cond: 8658 // result: (MOVVconst [int64(uint64(c)%uint64(d))]) 8659 for { 8660 v_0 := v.Args[0] 8661 if v_0.Op != OpMIPS64DIVVU { 8662 break 8663 } 8664 _ = v_0.Args[1] 8665 v_0_0 := v_0.Args[0] 8666 if v_0_0.Op != OpMIPS64MOVVconst { 8667 break 8668 } 8669 c := v_0_0.AuxInt 8670 v_0_1 := v_0.Args[1] 8671 if v_0_1.Op != OpMIPS64MOVVconst { 8672 break 8673 } 8674 d := v_0_1.AuxInt 8675 v.reset(OpMIPS64MOVVconst) 8676 v.AuxInt = int64(uint64(c) % uint64(d)) 8677 return true 8678 } 8679 return false 8680 } 8681 func rewriteValueMIPS64_OpSelect1_0(v *Value) bool { 8682 // match: (Select1 (MULVU x (MOVVconst [-1]))) 8683 // cond: 8684 // result: (NEGV x) 8685 for { 8686 v_0 := v.Args[0] 8687 if v_0.Op != OpMIPS64MULVU { 8688 break 8689 } 8690 _ = v_0.Args[1] 8691 x := v_0.Args[0] 8692 v_0_1 := v_0.Args[1] 8693 if v_0_1.Op != OpMIPS64MOVVconst { 8694 break 8695 } 8696 if v_0_1.AuxInt != -1 { 8697 break 8698 } 8699 v.reset(OpMIPS64NEGV) 8700 v.AddArg(x) 8701 return true 8702 } 8703 // match: (Select1 (MULVU (MOVVconst [-1]) x)) 8704 // cond: 8705 // result: (NEGV x) 8706 for { 8707 v_0 := v.Args[0] 8708 if v_0.Op != OpMIPS64MULVU { 8709 break 8710 } 8711 _ = v_0.Args[1] 8712 v_0_0 := v_0.Args[0] 8713 if v_0_0.Op != OpMIPS64MOVVconst { 8714 break 8715 } 8716 if v_0_0.AuxInt != -1 { 8717 break 8718 } 8719 x := v_0.Args[1] 8720 v.reset(OpMIPS64NEGV) 8721 v.AddArg(x) 8722 return true 8723 } 8724 // match: (Select1 (MULVU _ (MOVVconst [0]))) 8725 // cond: 8726 // result: (MOVVconst [0]) 8727 for { 8728 v_0 := v.Args[0] 8729 if v_0.Op != OpMIPS64MULVU { 8730 break 8731 } 8732 _ = v_0.Args[1] 8733 v_0_1 := v_0.Args[1] 8734 if v_0_1.Op != OpMIPS64MOVVconst { 8735 break 8736 } 8737 if v_0_1.AuxInt != 0 { 8738 break 8739 } 8740 v.reset(OpMIPS64MOVVconst) 8741 v.AuxInt = 0 8742 return true 8743 } 8744 // match: (Select1 (MULVU (MOVVconst [0]) _)) 8745 // cond: 8746 // result: (MOVVconst [0]) 8747 for { 8748 v_0 := v.Args[0] 8749 if v_0.Op != OpMIPS64MULVU { 8750 break 8751 } 8752 _ = v_0.Args[1] 8753 v_0_0 := v_0.Args[0] 8754 if v_0_0.Op != OpMIPS64MOVVconst { 8755 break 8756 } 8757 if v_0_0.AuxInt != 0 { 8758 break 8759 } 8760 v.reset(OpMIPS64MOVVconst) 8761 v.AuxInt = 0 8762 return true 8763 } 8764 // match: (Select1 (MULVU x (MOVVconst [1]))) 8765 // cond: 8766 // result: x 8767 for { 8768 v_0 := v.Args[0] 8769 if v_0.Op != OpMIPS64MULVU { 8770 break 8771 } 8772 _ = v_0.Args[1] 8773 x := v_0.Args[0] 8774 v_0_1 := v_0.Args[1] 8775 if v_0_1.Op != OpMIPS64MOVVconst { 8776 break 8777 } 8778 if v_0_1.AuxInt != 1 { 8779 break 8780 } 8781 v.reset(OpCopy) 8782 v.Type = x.Type 8783 v.AddArg(x) 8784 return true 8785 } 8786 // match: (Select1 (MULVU (MOVVconst [1]) x)) 8787 // cond: 8788 // result: x 8789 for { 8790 v_0 := v.Args[0] 8791 if v_0.Op != OpMIPS64MULVU { 8792 break 8793 } 8794 _ = v_0.Args[1] 8795 v_0_0 := v_0.Args[0] 8796 if v_0_0.Op != OpMIPS64MOVVconst { 8797 break 8798 } 8799 if v_0_0.AuxInt != 1 { 8800 break 8801 } 8802 x := v_0.Args[1] 8803 v.reset(OpCopy) 8804 v.Type = x.Type 8805 v.AddArg(x) 8806 return true 8807 } 8808 // match: (Select1 (MULVU x (MOVVconst [c]))) 8809 // cond: isPowerOfTwo(c) 8810 // result: (SLLVconst [log2(c)] x) 8811 for { 8812 v_0 := v.Args[0] 8813 if v_0.Op != OpMIPS64MULVU { 8814 break 8815 } 8816 _ = v_0.Args[1] 8817 x := v_0.Args[0] 8818 v_0_1 := v_0.Args[1] 8819 if v_0_1.Op != OpMIPS64MOVVconst { 8820 break 8821 } 8822 c := v_0_1.AuxInt 8823 if !(isPowerOfTwo(c)) { 8824 break 8825 } 8826 v.reset(OpMIPS64SLLVconst) 8827 v.AuxInt = log2(c) 8828 v.AddArg(x) 8829 return true 8830 } 8831 // match: (Select1 (MULVU (MOVVconst [c]) x)) 8832 // cond: isPowerOfTwo(c) 8833 // result: (SLLVconst [log2(c)] x) 8834 for { 8835 v_0 := v.Args[0] 8836 if v_0.Op != OpMIPS64MULVU { 8837 break 8838 } 8839 _ = v_0.Args[1] 8840 v_0_0 := v_0.Args[0] 8841 if v_0_0.Op != OpMIPS64MOVVconst { 8842 break 8843 } 8844 c := v_0_0.AuxInt 8845 x := v_0.Args[1] 8846 if !(isPowerOfTwo(c)) { 8847 break 8848 } 8849 v.reset(OpMIPS64SLLVconst) 8850 v.AuxInt = log2(c) 8851 v.AddArg(x) 8852 return true 8853 } 8854 // match: (Select1 (MULVU (MOVVconst [-1]) x)) 8855 // cond: 8856 // result: (NEGV x) 8857 for { 8858 v_0 := v.Args[0] 8859 if v_0.Op != OpMIPS64MULVU { 8860 break 8861 } 8862 _ = v_0.Args[1] 8863 v_0_0 := v_0.Args[0] 8864 if v_0_0.Op != OpMIPS64MOVVconst { 8865 break 8866 } 8867 if v_0_0.AuxInt != -1 { 8868 break 8869 } 8870 x := v_0.Args[1] 8871 v.reset(OpMIPS64NEGV) 8872 v.AddArg(x) 8873 return true 8874 } 8875 // match: (Select1 (MULVU x (MOVVconst [-1]))) 8876 // cond: 8877 // result: (NEGV x) 8878 for { 8879 v_0 := v.Args[0] 8880 if v_0.Op != OpMIPS64MULVU { 8881 break 8882 } 8883 _ = v_0.Args[1] 8884 x := v_0.Args[0] 8885 v_0_1 := v_0.Args[1] 8886 if v_0_1.Op != OpMIPS64MOVVconst { 8887 break 8888 } 8889 if v_0_1.AuxInt != -1 { 8890 break 8891 } 8892 v.reset(OpMIPS64NEGV) 8893 v.AddArg(x) 8894 return true 8895 } 8896 return false 8897 } 8898 func rewriteValueMIPS64_OpSelect1_10(v *Value) bool { 8899 // match: (Select1 (MULVU (MOVVconst [0]) _)) 8900 // cond: 8901 // result: (MOVVconst [0]) 8902 for { 8903 v_0 := v.Args[0] 8904 if v_0.Op != OpMIPS64MULVU { 8905 break 8906 } 8907 _ = v_0.Args[1] 8908 v_0_0 := v_0.Args[0] 8909 if v_0_0.Op != OpMIPS64MOVVconst { 8910 break 8911 } 8912 if v_0_0.AuxInt != 0 { 8913 break 8914 } 8915 v.reset(OpMIPS64MOVVconst) 8916 v.AuxInt = 0 8917 return true 8918 } 8919 // match: (Select1 (MULVU _ (MOVVconst [0]))) 8920 // cond: 8921 // result: (MOVVconst [0]) 8922 for { 8923 v_0 := v.Args[0] 8924 if v_0.Op != OpMIPS64MULVU { 8925 break 8926 } 8927 _ = v_0.Args[1] 8928 v_0_1 := v_0.Args[1] 8929 if v_0_1.Op != OpMIPS64MOVVconst { 8930 break 8931 } 8932 if v_0_1.AuxInt != 0 { 8933 break 8934 } 8935 v.reset(OpMIPS64MOVVconst) 8936 v.AuxInt = 0 8937 return true 8938 } 8939 // match: (Select1 (MULVU (MOVVconst [1]) x)) 8940 // cond: 8941 // result: x 8942 for { 8943 v_0 := v.Args[0] 8944 if v_0.Op != OpMIPS64MULVU { 8945 break 8946 } 8947 _ = v_0.Args[1] 8948 v_0_0 := v_0.Args[0] 8949 if v_0_0.Op != OpMIPS64MOVVconst { 8950 break 8951 } 8952 if v_0_0.AuxInt != 1 { 8953 break 8954 } 8955 x := v_0.Args[1] 8956 v.reset(OpCopy) 8957 v.Type = x.Type 8958 v.AddArg(x) 8959 return true 8960 } 8961 // match: (Select1 (MULVU x (MOVVconst [1]))) 8962 // cond: 8963 // result: x 8964 for { 8965 v_0 := v.Args[0] 8966 if v_0.Op != OpMIPS64MULVU { 8967 break 8968 } 8969 _ = v_0.Args[1] 8970 x := v_0.Args[0] 8971 v_0_1 := v_0.Args[1] 8972 if v_0_1.Op != OpMIPS64MOVVconst { 8973 break 8974 } 8975 if v_0_1.AuxInt != 1 { 8976 break 8977 } 8978 v.reset(OpCopy) 8979 v.Type = x.Type 8980 v.AddArg(x) 8981 return true 8982 } 8983 // match: (Select1 (MULVU (MOVVconst [c]) x)) 8984 // cond: isPowerOfTwo(c) 8985 // result: (SLLVconst [log2(c)] x) 8986 for { 8987 v_0 := v.Args[0] 8988 if v_0.Op != OpMIPS64MULVU { 8989 break 8990 } 8991 _ = v_0.Args[1] 8992 v_0_0 := v_0.Args[0] 8993 if v_0_0.Op != OpMIPS64MOVVconst { 8994 break 8995 } 8996 c := v_0_0.AuxInt 8997 x := v_0.Args[1] 8998 if !(isPowerOfTwo(c)) { 8999 break 9000 } 9001 v.reset(OpMIPS64SLLVconst) 9002 v.AuxInt = log2(c) 9003 v.AddArg(x) 9004 return true 9005 } 9006 // match: (Select1 (MULVU x (MOVVconst [c]))) 9007 // cond: isPowerOfTwo(c) 9008 // result: (SLLVconst [log2(c)] x) 9009 for { 9010 v_0 := v.Args[0] 9011 if v_0.Op != OpMIPS64MULVU { 9012 break 9013 } 9014 _ = v_0.Args[1] 9015 x := v_0.Args[0] 9016 v_0_1 := v_0.Args[1] 9017 if v_0_1.Op != OpMIPS64MOVVconst { 9018 break 9019 } 9020 c := v_0_1.AuxInt 9021 if !(isPowerOfTwo(c)) { 9022 break 9023 } 9024 v.reset(OpMIPS64SLLVconst) 9025 v.AuxInt = log2(c) 9026 v.AddArg(x) 9027 return true 9028 } 9029 // match: (Select1 (DIVVU x (MOVVconst [1]))) 9030 // cond: 9031 // result: x 9032 for { 9033 v_0 := v.Args[0] 9034 if v_0.Op != OpMIPS64DIVVU { 9035 break 9036 } 9037 _ = v_0.Args[1] 9038 x := v_0.Args[0] 9039 v_0_1 := v_0.Args[1] 9040 if v_0_1.Op != OpMIPS64MOVVconst { 9041 break 9042 } 9043 if v_0_1.AuxInt != 1 { 9044 break 9045 } 9046 v.reset(OpCopy) 9047 v.Type = x.Type 9048 v.AddArg(x) 9049 return true 9050 } 9051 // match: (Select1 (DIVVU x (MOVVconst [c]))) 9052 // cond: isPowerOfTwo(c) 9053 // result: (SRLVconst [log2(c)] x) 9054 for { 9055 v_0 := v.Args[0] 9056 if v_0.Op != OpMIPS64DIVVU { 9057 break 9058 } 9059 _ = v_0.Args[1] 9060 x := v_0.Args[0] 9061 v_0_1 := v_0.Args[1] 9062 if v_0_1.Op != OpMIPS64MOVVconst { 9063 break 9064 } 9065 c := v_0_1.AuxInt 9066 if !(isPowerOfTwo(c)) { 9067 break 9068 } 9069 v.reset(OpMIPS64SRLVconst) 9070 v.AuxInt = log2(c) 9071 v.AddArg(x) 9072 return true 9073 } 9074 // match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d]))) 9075 // cond: 9076 // result: (MOVVconst [c*d]) 9077 for { 9078 v_0 := v.Args[0] 9079 if v_0.Op != OpMIPS64MULVU { 9080 break 9081 } 9082 _ = v_0.Args[1] 9083 v_0_0 := v_0.Args[0] 9084 if v_0_0.Op != OpMIPS64MOVVconst { 9085 break 9086 } 9087 c := v_0_0.AuxInt 9088 v_0_1 := v_0.Args[1] 9089 if v_0_1.Op != OpMIPS64MOVVconst { 9090 break 9091 } 9092 d := v_0_1.AuxInt 9093 v.reset(OpMIPS64MOVVconst) 9094 v.AuxInt = c * d 9095 return true 9096 } 9097 // match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c]))) 9098 // cond: 9099 // result: (MOVVconst [c*d]) 9100 for { 9101 v_0 := v.Args[0] 9102 if v_0.Op != OpMIPS64MULVU { 9103 break 9104 } 9105 _ = v_0.Args[1] 9106 v_0_0 := v_0.Args[0] 9107 if v_0_0.Op != OpMIPS64MOVVconst { 9108 break 9109 } 9110 d := v_0_0.AuxInt 9111 v_0_1 := v_0.Args[1] 9112 if v_0_1.Op != OpMIPS64MOVVconst { 9113 break 9114 } 9115 c := v_0_1.AuxInt 9116 v.reset(OpMIPS64MOVVconst) 9117 v.AuxInt = c * d 9118 return true 9119 } 9120 return false 9121 } 9122 func rewriteValueMIPS64_OpSelect1_20(v *Value) bool { 9123 // match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 9124 // cond: 9125 // result: (MOVVconst [int64(c)/int64(d)]) 9126 for { 9127 v_0 := v.Args[0] 9128 if v_0.Op != OpMIPS64DIVV { 9129 break 9130 } 9131 _ = v_0.Args[1] 9132 v_0_0 := v_0.Args[0] 9133 if v_0_0.Op != OpMIPS64MOVVconst { 9134 break 9135 } 9136 c := v_0_0.AuxInt 9137 v_0_1 := v_0.Args[1] 9138 if v_0_1.Op != OpMIPS64MOVVconst { 9139 break 9140 } 9141 d := v_0_1.AuxInt 9142 v.reset(OpMIPS64MOVVconst) 9143 v.AuxInt = int64(c) / int64(d) 9144 return true 9145 } 9146 // match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 9147 // cond: 9148 // result: (MOVVconst [int64(uint64(c)/uint64(d))]) 9149 for { 9150 v_0 := v.Args[0] 9151 if v_0.Op != OpMIPS64DIVVU { 9152 break 9153 } 9154 _ = v_0.Args[1] 9155 v_0_0 := v_0.Args[0] 9156 if v_0_0.Op != OpMIPS64MOVVconst { 9157 break 9158 } 9159 c := v_0_0.AuxInt 9160 v_0_1 := v_0.Args[1] 9161 if v_0_1.Op != OpMIPS64MOVVconst { 9162 break 9163 } 9164 d := v_0_1.AuxInt 9165 v.reset(OpMIPS64MOVVconst) 9166 v.AuxInt = int64(uint64(c) / uint64(d)) 9167 return true 9168 } 9169 return false 9170 } 9171 func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool { 9172 // match: (SignExt16to32 x) 9173 // cond: 9174 // result: (MOVHreg x) 9175 for { 9176 x := v.Args[0] 9177 v.reset(OpMIPS64MOVHreg) 9178 v.AddArg(x) 9179 return true 9180 } 9181 } 9182 func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool { 9183 // match: (SignExt16to64 x) 9184 // cond: 9185 // result: (MOVHreg x) 9186 for { 9187 x := v.Args[0] 9188 v.reset(OpMIPS64MOVHreg) 9189 v.AddArg(x) 9190 return true 9191 } 9192 } 9193 func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool { 9194 // match: (SignExt32to64 x) 9195 // cond: 9196 // result: (MOVWreg x) 9197 for { 9198 x := v.Args[0] 9199 v.reset(OpMIPS64MOVWreg) 9200 v.AddArg(x) 9201 return true 9202 } 9203 } 9204 func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool { 9205 // match: (SignExt8to16 x) 9206 // cond: 9207 // result: (MOVBreg x) 9208 for { 9209 x := v.Args[0] 9210 v.reset(OpMIPS64MOVBreg) 9211 v.AddArg(x) 9212 return true 9213 } 9214 } 9215 func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool { 9216 // match: (SignExt8to32 x) 9217 // cond: 9218 // result: (MOVBreg x) 9219 for { 9220 x := v.Args[0] 9221 v.reset(OpMIPS64MOVBreg) 9222 v.AddArg(x) 9223 return true 9224 } 9225 } 9226 func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool { 9227 // match: (SignExt8to64 x) 9228 // cond: 9229 // result: (MOVBreg x) 9230 for { 9231 x := v.Args[0] 9232 v.reset(OpMIPS64MOVBreg) 9233 v.AddArg(x) 9234 return true 9235 } 9236 } 9237 func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool { 9238 b := v.Block 9239 _ = b 9240 // match: (Slicemask <t> x) 9241 // cond: 9242 // result: (SRAVconst (NEGV <t> x) [63]) 9243 for { 9244 t := v.Type 9245 x := v.Args[0] 9246 v.reset(OpMIPS64SRAVconst) 9247 v.AuxInt = 63 9248 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 9249 v0.AddArg(x) 9250 v.AddArg(v0) 9251 return true 9252 } 9253 } 9254 func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool { 9255 // match: (StaticCall [argwid] {target} mem) 9256 // cond: 9257 // result: (CALLstatic [argwid] {target} mem) 9258 for { 9259 argwid := v.AuxInt 9260 target := v.Aux 9261 mem := v.Args[0] 9262 v.reset(OpMIPS64CALLstatic) 9263 v.AuxInt = argwid 9264 v.Aux = target 9265 v.AddArg(mem) 9266 return true 9267 } 9268 } 9269 func rewriteValueMIPS64_OpStore_0(v *Value) bool { 9270 // match: (Store {t} ptr val mem) 9271 // cond: t.(*types.Type).Size() == 1 9272 // result: (MOVBstore ptr val mem) 9273 for { 9274 t := v.Aux 9275 _ = v.Args[2] 9276 ptr := v.Args[0] 9277 val := v.Args[1] 9278 mem := v.Args[2] 9279 if !(t.(*types.Type).Size() == 1) { 9280 break 9281 } 9282 v.reset(OpMIPS64MOVBstore) 9283 v.AddArg(ptr) 9284 v.AddArg(val) 9285 v.AddArg(mem) 9286 return true 9287 } 9288 // match: (Store {t} ptr val mem) 9289 // cond: t.(*types.Type).Size() == 2 9290 // result: (MOVHstore ptr val mem) 9291 for { 9292 t := v.Aux 9293 _ = v.Args[2] 9294 ptr := v.Args[0] 9295 val := v.Args[1] 9296 mem := v.Args[2] 9297 if !(t.(*types.Type).Size() == 2) { 9298 break 9299 } 9300 v.reset(OpMIPS64MOVHstore) 9301 v.AddArg(ptr) 9302 v.AddArg(val) 9303 v.AddArg(mem) 9304 return true 9305 } 9306 // match: (Store {t} ptr val mem) 9307 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 9308 // result: (MOVWstore ptr val mem) 9309 for { 9310 t := v.Aux 9311 _ = v.Args[2] 9312 ptr := v.Args[0] 9313 val := v.Args[1] 9314 mem := v.Args[2] 9315 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 9316 break 9317 } 9318 v.reset(OpMIPS64MOVWstore) 9319 v.AddArg(ptr) 9320 v.AddArg(val) 9321 v.AddArg(mem) 9322 return true 9323 } 9324 // match: (Store {t} ptr val mem) 9325 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 9326 // result: (MOVVstore ptr val mem) 9327 for { 9328 t := v.Aux 9329 _ = v.Args[2] 9330 ptr := v.Args[0] 9331 val := v.Args[1] 9332 mem := v.Args[2] 9333 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 9334 break 9335 } 9336 v.reset(OpMIPS64MOVVstore) 9337 v.AddArg(ptr) 9338 v.AddArg(val) 9339 v.AddArg(mem) 9340 return true 9341 } 9342 // match: (Store {t} ptr val mem) 9343 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 9344 // result: (MOVFstore ptr val mem) 9345 for { 9346 t := v.Aux 9347 _ = v.Args[2] 9348 ptr := v.Args[0] 9349 val := v.Args[1] 9350 mem := v.Args[2] 9351 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 9352 break 9353 } 9354 v.reset(OpMIPS64MOVFstore) 9355 v.AddArg(ptr) 9356 v.AddArg(val) 9357 v.AddArg(mem) 9358 return true 9359 } 9360 // match: (Store {t} ptr val mem) 9361 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 9362 // result: (MOVDstore ptr val mem) 9363 for { 9364 t := v.Aux 9365 _ = v.Args[2] 9366 ptr := v.Args[0] 9367 val := v.Args[1] 9368 mem := v.Args[2] 9369 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 9370 break 9371 } 9372 v.reset(OpMIPS64MOVDstore) 9373 v.AddArg(ptr) 9374 v.AddArg(val) 9375 v.AddArg(mem) 9376 return true 9377 } 9378 return false 9379 } 9380 func rewriteValueMIPS64_OpSub16_0(v *Value) bool { 9381 // match: (Sub16 x y) 9382 // cond: 9383 // result: (SUBV x y) 9384 for { 9385 _ = v.Args[1] 9386 x := v.Args[0] 9387 y := v.Args[1] 9388 v.reset(OpMIPS64SUBV) 9389 v.AddArg(x) 9390 v.AddArg(y) 9391 return true 9392 } 9393 } 9394 func rewriteValueMIPS64_OpSub32_0(v *Value) bool { 9395 // match: (Sub32 x y) 9396 // cond: 9397 // result: (SUBV x y) 9398 for { 9399 _ = v.Args[1] 9400 x := v.Args[0] 9401 y := v.Args[1] 9402 v.reset(OpMIPS64SUBV) 9403 v.AddArg(x) 9404 v.AddArg(y) 9405 return true 9406 } 9407 } 9408 func rewriteValueMIPS64_OpSub32F_0(v *Value) bool { 9409 // match: (Sub32F x y) 9410 // cond: 9411 // result: (SUBF x y) 9412 for { 9413 _ = v.Args[1] 9414 x := v.Args[0] 9415 y := v.Args[1] 9416 v.reset(OpMIPS64SUBF) 9417 v.AddArg(x) 9418 v.AddArg(y) 9419 return true 9420 } 9421 } 9422 func rewriteValueMIPS64_OpSub64_0(v *Value) bool { 9423 // match: (Sub64 x y) 9424 // cond: 9425 // result: (SUBV x y) 9426 for { 9427 _ = v.Args[1] 9428 x := v.Args[0] 9429 y := v.Args[1] 9430 v.reset(OpMIPS64SUBV) 9431 v.AddArg(x) 9432 v.AddArg(y) 9433 return true 9434 } 9435 } 9436 func rewriteValueMIPS64_OpSub64F_0(v *Value) bool { 9437 // match: (Sub64F x y) 9438 // cond: 9439 // result: (SUBD x y) 9440 for { 9441 _ = v.Args[1] 9442 x := v.Args[0] 9443 y := v.Args[1] 9444 v.reset(OpMIPS64SUBD) 9445 v.AddArg(x) 9446 v.AddArg(y) 9447 return true 9448 } 9449 } 9450 func rewriteValueMIPS64_OpSub8_0(v *Value) bool { 9451 // match: (Sub8 x y) 9452 // cond: 9453 // result: (SUBV x y) 9454 for { 9455 _ = v.Args[1] 9456 x := v.Args[0] 9457 y := v.Args[1] 9458 v.reset(OpMIPS64SUBV) 9459 v.AddArg(x) 9460 v.AddArg(y) 9461 return true 9462 } 9463 } 9464 func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool { 9465 // match: (SubPtr x y) 9466 // cond: 9467 // result: (SUBV x y) 9468 for { 9469 _ = v.Args[1] 9470 x := v.Args[0] 9471 y := v.Args[1] 9472 v.reset(OpMIPS64SUBV) 9473 v.AddArg(x) 9474 v.AddArg(y) 9475 return true 9476 } 9477 } 9478 func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool { 9479 // match: (Trunc16to8 x) 9480 // cond: 9481 // result: x 9482 for { 9483 x := v.Args[0] 9484 v.reset(OpCopy) 9485 v.Type = x.Type 9486 v.AddArg(x) 9487 return true 9488 } 9489 } 9490 func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool { 9491 // match: (Trunc32to16 x) 9492 // cond: 9493 // result: x 9494 for { 9495 x := v.Args[0] 9496 v.reset(OpCopy) 9497 v.Type = x.Type 9498 v.AddArg(x) 9499 return true 9500 } 9501 } 9502 func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool { 9503 // match: (Trunc32to8 x) 9504 // cond: 9505 // result: x 9506 for { 9507 x := v.Args[0] 9508 v.reset(OpCopy) 9509 v.Type = x.Type 9510 v.AddArg(x) 9511 return true 9512 } 9513 } 9514 func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool { 9515 // match: (Trunc64to16 x) 9516 // cond: 9517 // result: x 9518 for { 9519 x := v.Args[0] 9520 v.reset(OpCopy) 9521 v.Type = x.Type 9522 v.AddArg(x) 9523 return true 9524 } 9525 } 9526 func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool { 9527 // match: (Trunc64to32 x) 9528 // cond: 9529 // result: x 9530 for { 9531 x := v.Args[0] 9532 v.reset(OpCopy) 9533 v.Type = x.Type 9534 v.AddArg(x) 9535 return true 9536 } 9537 } 9538 func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool { 9539 // match: (Trunc64to8 x) 9540 // cond: 9541 // result: x 9542 for { 9543 x := v.Args[0] 9544 v.reset(OpCopy) 9545 v.Type = x.Type 9546 v.AddArg(x) 9547 return true 9548 } 9549 } 9550 func rewriteValueMIPS64_OpXor16_0(v *Value) bool { 9551 // match: (Xor16 x y) 9552 // cond: 9553 // result: (XOR x y) 9554 for { 9555 _ = v.Args[1] 9556 x := v.Args[0] 9557 y := v.Args[1] 9558 v.reset(OpMIPS64XOR) 9559 v.AddArg(x) 9560 v.AddArg(y) 9561 return true 9562 } 9563 } 9564 func rewriteValueMIPS64_OpXor32_0(v *Value) bool { 9565 // match: (Xor32 x y) 9566 // cond: 9567 // result: (XOR x y) 9568 for { 9569 _ = v.Args[1] 9570 x := v.Args[0] 9571 y := v.Args[1] 9572 v.reset(OpMIPS64XOR) 9573 v.AddArg(x) 9574 v.AddArg(y) 9575 return true 9576 } 9577 } 9578 func rewriteValueMIPS64_OpXor64_0(v *Value) bool { 9579 // match: (Xor64 x y) 9580 // cond: 9581 // result: (XOR x y) 9582 for { 9583 _ = v.Args[1] 9584 x := v.Args[0] 9585 y := v.Args[1] 9586 v.reset(OpMIPS64XOR) 9587 v.AddArg(x) 9588 v.AddArg(y) 9589 return true 9590 } 9591 } 9592 func rewriteValueMIPS64_OpXor8_0(v *Value) bool { 9593 // match: (Xor8 x y) 9594 // cond: 9595 // result: (XOR x y) 9596 for { 9597 _ = v.Args[1] 9598 x := v.Args[0] 9599 y := v.Args[1] 9600 v.reset(OpMIPS64XOR) 9601 v.AddArg(x) 9602 v.AddArg(y) 9603 return true 9604 } 9605 } 9606 func rewriteValueMIPS64_OpZero_0(v *Value) bool { 9607 b := v.Block 9608 _ = b 9609 typ := &b.Func.Config.Types 9610 _ = typ 9611 // match: (Zero [0] _ mem) 9612 // cond: 9613 // result: mem 9614 for { 9615 if v.AuxInt != 0 { 9616 break 9617 } 9618 _ = v.Args[1] 9619 mem := v.Args[1] 9620 v.reset(OpCopy) 9621 v.Type = mem.Type 9622 v.AddArg(mem) 9623 return true 9624 } 9625 // match: (Zero [1] ptr mem) 9626 // cond: 9627 // result: (MOVBstore ptr (MOVVconst [0]) mem) 9628 for { 9629 if v.AuxInt != 1 { 9630 break 9631 } 9632 _ = v.Args[1] 9633 ptr := v.Args[0] 9634 mem := v.Args[1] 9635 v.reset(OpMIPS64MOVBstore) 9636 v.AddArg(ptr) 9637 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9638 v0.AuxInt = 0 9639 v.AddArg(v0) 9640 v.AddArg(mem) 9641 return true 9642 } 9643 // match: (Zero [2] {t} ptr mem) 9644 // cond: t.(*types.Type).Alignment()%2 == 0 9645 // result: (MOVHstore ptr (MOVVconst [0]) mem) 9646 for { 9647 if v.AuxInt != 2 { 9648 break 9649 } 9650 t := v.Aux 9651 _ = v.Args[1] 9652 ptr := v.Args[0] 9653 mem := v.Args[1] 9654 if !(t.(*types.Type).Alignment()%2 == 0) { 9655 break 9656 } 9657 v.reset(OpMIPS64MOVHstore) 9658 v.AddArg(ptr) 9659 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9660 v0.AuxInt = 0 9661 v.AddArg(v0) 9662 v.AddArg(mem) 9663 return true 9664 } 9665 // match: (Zero [2] ptr mem) 9666 // cond: 9667 // result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)) 9668 for { 9669 if v.AuxInt != 2 { 9670 break 9671 } 9672 _ = v.Args[1] 9673 ptr := v.Args[0] 9674 mem := v.Args[1] 9675 v.reset(OpMIPS64MOVBstore) 9676 v.AuxInt = 1 9677 v.AddArg(ptr) 9678 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9679 v0.AuxInt = 0 9680 v.AddArg(v0) 9681 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 9682 v1.AuxInt = 0 9683 v1.AddArg(ptr) 9684 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9685 v2.AuxInt = 0 9686 v1.AddArg(v2) 9687 v1.AddArg(mem) 9688 v.AddArg(v1) 9689 return true 9690 } 9691 // match: (Zero [4] {t} ptr mem) 9692 // cond: t.(*types.Type).Alignment()%4 == 0 9693 // result: (MOVWstore ptr (MOVVconst [0]) mem) 9694 for { 9695 if v.AuxInt != 4 { 9696 break 9697 } 9698 t := v.Aux 9699 _ = v.Args[1] 9700 ptr := v.Args[0] 9701 mem := v.Args[1] 9702 if !(t.(*types.Type).Alignment()%4 == 0) { 9703 break 9704 } 9705 v.reset(OpMIPS64MOVWstore) 9706 v.AddArg(ptr) 9707 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9708 v0.AuxInt = 0 9709 v.AddArg(v0) 9710 v.AddArg(mem) 9711 return true 9712 } 9713 // match: (Zero [4] {t} ptr mem) 9714 // cond: t.(*types.Type).Alignment()%2 == 0 9715 // result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)) 9716 for { 9717 if v.AuxInt != 4 { 9718 break 9719 } 9720 t := v.Aux 9721 _ = v.Args[1] 9722 ptr := v.Args[0] 9723 mem := v.Args[1] 9724 if !(t.(*types.Type).Alignment()%2 == 0) { 9725 break 9726 } 9727 v.reset(OpMIPS64MOVHstore) 9728 v.AuxInt = 2 9729 v.AddArg(ptr) 9730 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9731 v0.AuxInt = 0 9732 v.AddArg(v0) 9733 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 9734 v1.AuxInt = 0 9735 v1.AddArg(ptr) 9736 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9737 v2.AuxInt = 0 9738 v1.AddArg(v2) 9739 v1.AddArg(mem) 9740 v.AddArg(v1) 9741 return true 9742 } 9743 // match: (Zero [4] ptr mem) 9744 // cond: 9745 // result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))) 9746 for { 9747 if v.AuxInt != 4 { 9748 break 9749 } 9750 _ = v.Args[1] 9751 ptr := v.Args[0] 9752 mem := v.Args[1] 9753 v.reset(OpMIPS64MOVBstore) 9754 v.AuxInt = 3 9755 v.AddArg(ptr) 9756 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9757 v0.AuxInt = 0 9758 v.AddArg(v0) 9759 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 9760 v1.AuxInt = 2 9761 v1.AddArg(ptr) 9762 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9763 v2.AuxInt = 0 9764 v1.AddArg(v2) 9765 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 9766 v3.AuxInt = 1 9767 v3.AddArg(ptr) 9768 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9769 v4.AuxInt = 0 9770 v3.AddArg(v4) 9771 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 9772 v5.AuxInt = 0 9773 v5.AddArg(ptr) 9774 v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9775 v6.AuxInt = 0 9776 v5.AddArg(v6) 9777 v5.AddArg(mem) 9778 v3.AddArg(v5) 9779 v1.AddArg(v3) 9780 v.AddArg(v1) 9781 return true 9782 } 9783 // match: (Zero [8] {t} ptr mem) 9784 // cond: t.(*types.Type).Alignment()%8 == 0 9785 // result: (MOVVstore ptr (MOVVconst [0]) mem) 9786 for { 9787 if v.AuxInt != 8 { 9788 break 9789 } 9790 t := v.Aux 9791 _ = v.Args[1] 9792 ptr := v.Args[0] 9793 mem := v.Args[1] 9794 if !(t.(*types.Type).Alignment()%8 == 0) { 9795 break 9796 } 9797 v.reset(OpMIPS64MOVVstore) 9798 v.AddArg(ptr) 9799 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9800 v0.AuxInt = 0 9801 v.AddArg(v0) 9802 v.AddArg(mem) 9803 return true 9804 } 9805 // match: (Zero [8] {t} ptr mem) 9806 // cond: t.(*types.Type).Alignment()%4 == 0 9807 // result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)) 9808 for { 9809 if v.AuxInt != 8 { 9810 break 9811 } 9812 t := v.Aux 9813 _ = v.Args[1] 9814 ptr := v.Args[0] 9815 mem := v.Args[1] 9816 if !(t.(*types.Type).Alignment()%4 == 0) { 9817 break 9818 } 9819 v.reset(OpMIPS64MOVWstore) 9820 v.AuxInt = 4 9821 v.AddArg(ptr) 9822 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9823 v0.AuxInt = 0 9824 v.AddArg(v0) 9825 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 9826 v1.AuxInt = 0 9827 v1.AddArg(ptr) 9828 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9829 v2.AuxInt = 0 9830 v1.AddArg(v2) 9831 v1.AddArg(mem) 9832 v.AddArg(v1) 9833 return true 9834 } 9835 // match: (Zero [8] {t} ptr mem) 9836 // cond: t.(*types.Type).Alignment()%2 == 0 9837 // result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))) 9838 for { 9839 if v.AuxInt != 8 { 9840 break 9841 } 9842 t := v.Aux 9843 _ = v.Args[1] 9844 ptr := v.Args[0] 9845 mem := v.Args[1] 9846 if !(t.(*types.Type).Alignment()%2 == 0) { 9847 break 9848 } 9849 v.reset(OpMIPS64MOVHstore) 9850 v.AuxInt = 6 9851 v.AddArg(ptr) 9852 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9853 v0.AuxInt = 0 9854 v.AddArg(v0) 9855 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 9856 v1.AuxInt = 4 9857 v1.AddArg(ptr) 9858 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9859 v2.AuxInt = 0 9860 v1.AddArg(v2) 9861 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 9862 v3.AuxInt = 2 9863 v3.AddArg(ptr) 9864 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9865 v4.AuxInt = 0 9866 v3.AddArg(v4) 9867 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 9868 v5.AuxInt = 0 9869 v5.AddArg(ptr) 9870 v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9871 v6.AuxInt = 0 9872 v5.AddArg(v6) 9873 v5.AddArg(mem) 9874 v3.AddArg(v5) 9875 v1.AddArg(v3) 9876 v.AddArg(v1) 9877 return true 9878 } 9879 return false 9880 } 9881 func rewriteValueMIPS64_OpZero_10(v *Value) bool { 9882 b := v.Block 9883 _ = b 9884 config := b.Func.Config 9885 _ = config 9886 typ := &b.Func.Config.Types 9887 _ = typ 9888 // match: (Zero [3] ptr mem) 9889 // cond: 9890 // result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))) 9891 for { 9892 if v.AuxInt != 3 { 9893 break 9894 } 9895 _ = v.Args[1] 9896 ptr := v.Args[0] 9897 mem := v.Args[1] 9898 v.reset(OpMIPS64MOVBstore) 9899 v.AuxInt = 2 9900 v.AddArg(ptr) 9901 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9902 v0.AuxInt = 0 9903 v.AddArg(v0) 9904 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 9905 v1.AuxInt = 1 9906 v1.AddArg(ptr) 9907 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9908 v2.AuxInt = 0 9909 v1.AddArg(v2) 9910 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 9911 v3.AuxInt = 0 9912 v3.AddArg(ptr) 9913 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9914 v4.AuxInt = 0 9915 v3.AddArg(v4) 9916 v3.AddArg(mem) 9917 v1.AddArg(v3) 9918 v.AddArg(v1) 9919 return true 9920 } 9921 // match: (Zero [6] {t} ptr mem) 9922 // cond: t.(*types.Type).Alignment()%2 == 0 9923 // result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))) 9924 for { 9925 if v.AuxInt != 6 { 9926 break 9927 } 9928 t := v.Aux 9929 _ = v.Args[1] 9930 ptr := v.Args[0] 9931 mem := v.Args[1] 9932 if !(t.(*types.Type).Alignment()%2 == 0) { 9933 break 9934 } 9935 v.reset(OpMIPS64MOVHstore) 9936 v.AuxInt = 4 9937 v.AddArg(ptr) 9938 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9939 v0.AuxInt = 0 9940 v.AddArg(v0) 9941 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 9942 v1.AuxInt = 2 9943 v1.AddArg(ptr) 9944 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9945 v2.AuxInt = 0 9946 v1.AddArg(v2) 9947 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 9948 v3.AuxInt = 0 9949 v3.AddArg(ptr) 9950 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9951 v4.AuxInt = 0 9952 v3.AddArg(v4) 9953 v3.AddArg(mem) 9954 v1.AddArg(v3) 9955 v.AddArg(v1) 9956 return true 9957 } 9958 // match: (Zero [12] {t} ptr mem) 9959 // cond: t.(*types.Type).Alignment()%4 == 0 9960 // result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))) 9961 for { 9962 if v.AuxInt != 12 { 9963 break 9964 } 9965 t := v.Aux 9966 _ = v.Args[1] 9967 ptr := v.Args[0] 9968 mem := v.Args[1] 9969 if !(t.(*types.Type).Alignment()%4 == 0) { 9970 break 9971 } 9972 v.reset(OpMIPS64MOVWstore) 9973 v.AuxInt = 8 9974 v.AddArg(ptr) 9975 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9976 v0.AuxInt = 0 9977 v.AddArg(v0) 9978 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 9979 v1.AuxInt = 4 9980 v1.AddArg(ptr) 9981 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9982 v2.AuxInt = 0 9983 v1.AddArg(v2) 9984 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 9985 v3.AuxInt = 0 9986 v3.AddArg(ptr) 9987 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 9988 v4.AuxInt = 0 9989 v3.AddArg(v4) 9990 v3.AddArg(mem) 9991 v1.AddArg(v3) 9992 v.AddArg(v1) 9993 return true 9994 } 9995 // match: (Zero [16] {t} ptr mem) 9996 // cond: t.(*types.Type).Alignment()%8 == 0 9997 // result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)) 9998 for { 9999 if v.AuxInt != 16 { 10000 break 10001 } 10002 t := v.Aux 10003 _ = v.Args[1] 10004 ptr := v.Args[0] 10005 mem := v.Args[1] 10006 if !(t.(*types.Type).Alignment()%8 == 0) { 10007 break 10008 } 10009 v.reset(OpMIPS64MOVVstore) 10010 v.AuxInt = 8 10011 v.AddArg(ptr) 10012 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10013 v0.AuxInt = 0 10014 v.AddArg(v0) 10015 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10016 v1.AuxInt = 0 10017 v1.AddArg(ptr) 10018 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10019 v2.AuxInt = 0 10020 v1.AddArg(v2) 10021 v1.AddArg(mem) 10022 v.AddArg(v1) 10023 return true 10024 } 10025 // match: (Zero [24] {t} ptr mem) 10026 // cond: t.(*types.Type).Alignment()%8 == 0 10027 // result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))) 10028 for { 10029 if v.AuxInt != 24 { 10030 break 10031 } 10032 t := v.Aux 10033 _ = v.Args[1] 10034 ptr := v.Args[0] 10035 mem := v.Args[1] 10036 if !(t.(*types.Type).Alignment()%8 == 0) { 10037 break 10038 } 10039 v.reset(OpMIPS64MOVVstore) 10040 v.AuxInt = 16 10041 v.AddArg(ptr) 10042 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10043 v0.AuxInt = 0 10044 v.AddArg(v0) 10045 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10046 v1.AuxInt = 8 10047 v1.AddArg(ptr) 10048 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10049 v2.AuxInt = 0 10050 v1.AddArg(v2) 10051 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10052 v3.AuxInt = 0 10053 v3.AddArg(ptr) 10054 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10055 v4.AuxInt = 0 10056 v3.AddArg(v4) 10057 v3.AddArg(mem) 10058 v1.AddArg(v3) 10059 v.AddArg(v1) 10060 return true 10061 } 10062 // match: (Zero [s] {t} ptr mem) 10063 // cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice 10064 // result: (DUFFZERO [8 * (128 - int64(s/8))] ptr mem) 10065 for { 10066 s := v.AuxInt 10067 t := v.Aux 10068 _ = v.Args[1] 10069 ptr := v.Args[0] 10070 mem := v.Args[1] 10071 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) { 10072 break 10073 } 10074 v.reset(OpMIPS64DUFFZERO) 10075 v.AuxInt = 8 * (128 - int64(s/8)) 10076 v.AddArg(ptr) 10077 v.AddArg(mem) 10078 return true 10079 } 10080 // match: (Zero [s] {t} ptr mem) 10081 // cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0 10082 // result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 10083 for { 10084 s := v.AuxInt 10085 t := v.Aux 10086 _ = v.Args[1] 10087 ptr := v.Args[0] 10088 mem := v.Args[1] 10089 if !((s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0) { 10090 break 10091 } 10092 v.reset(OpMIPS64LoweredZero) 10093 v.AuxInt = t.(*types.Type).Alignment() 10094 v.AddArg(ptr) 10095 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type) 10096 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 10097 v0.AddArg(ptr) 10098 v.AddArg(v0) 10099 v.AddArg(mem) 10100 return true 10101 } 10102 return false 10103 } 10104 func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool { 10105 // match: (ZeroExt16to32 x) 10106 // cond: 10107 // result: (MOVHUreg x) 10108 for { 10109 x := v.Args[0] 10110 v.reset(OpMIPS64MOVHUreg) 10111 v.AddArg(x) 10112 return true 10113 } 10114 } 10115 func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool { 10116 // match: (ZeroExt16to64 x) 10117 // cond: 10118 // result: (MOVHUreg x) 10119 for { 10120 x := v.Args[0] 10121 v.reset(OpMIPS64MOVHUreg) 10122 v.AddArg(x) 10123 return true 10124 } 10125 } 10126 func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool { 10127 // match: (ZeroExt32to64 x) 10128 // cond: 10129 // result: (MOVWUreg x) 10130 for { 10131 x := v.Args[0] 10132 v.reset(OpMIPS64MOVWUreg) 10133 v.AddArg(x) 10134 return true 10135 } 10136 } 10137 func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool { 10138 // match: (ZeroExt8to16 x) 10139 // cond: 10140 // result: (MOVBUreg x) 10141 for { 10142 x := v.Args[0] 10143 v.reset(OpMIPS64MOVBUreg) 10144 v.AddArg(x) 10145 return true 10146 } 10147 } 10148 func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool { 10149 // match: (ZeroExt8to32 x) 10150 // cond: 10151 // result: (MOVBUreg x) 10152 for { 10153 x := v.Args[0] 10154 v.reset(OpMIPS64MOVBUreg) 10155 v.AddArg(x) 10156 return true 10157 } 10158 } 10159 func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool { 10160 // match: (ZeroExt8to64 x) 10161 // cond: 10162 // result: (MOVBUreg x) 10163 for { 10164 x := v.Args[0] 10165 v.reset(OpMIPS64MOVBUreg) 10166 v.AddArg(x) 10167 return true 10168 } 10169 } 10170 func rewriteBlockMIPS64(b *Block) bool { 10171 config := b.Func.Config 10172 _ = config 10173 fe := b.Func.fe 10174 _ = fe 10175 typ := &config.Types 10176 _ = typ 10177 switch b.Kind { 10178 case BlockMIPS64EQ: 10179 // match: (EQ (FPFlagTrue cmp) yes no) 10180 // cond: 10181 // result: (FPF cmp yes no) 10182 for { 10183 v := b.Control 10184 if v.Op != OpMIPS64FPFlagTrue { 10185 break 10186 } 10187 cmp := v.Args[0] 10188 b.Kind = BlockMIPS64FPF 10189 b.SetControl(cmp) 10190 return true 10191 } 10192 // match: (EQ (FPFlagFalse cmp) yes no) 10193 // cond: 10194 // result: (FPT cmp yes no) 10195 for { 10196 v := b.Control 10197 if v.Op != OpMIPS64FPFlagFalse { 10198 break 10199 } 10200 cmp := v.Args[0] 10201 b.Kind = BlockMIPS64FPT 10202 b.SetControl(cmp) 10203 return true 10204 } 10205 // match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no) 10206 // cond: 10207 // result: (NE cmp yes no) 10208 for { 10209 v := b.Control 10210 if v.Op != OpMIPS64XORconst { 10211 break 10212 } 10213 if v.AuxInt != 1 { 10214 break 10215 } 10216 cmp := v.Args[0] 10217 if cmp.Op != OpMIPS64SGT { 10218 break 10219 } 10220 _ = cmp.Args[1] 10221 b.Kind = BlockMIPS64NE 10222 b.SetControl(cmp) 10223 return true 10224 } 10225 // match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no) 10226 // cond: 10227 // result: (NE cmp yes no) 10228 for { 10229 v := b.Control 10230 if v.Op != OpMIPS64XORconst { 10231 break 10232 } 10233 if v.AuxInt != 1 { 10234 break 10235 } 10236 cmp := v.Args[0] 10237 if cmp.Op != OpMIPS64SGTU { 10238 break 10239 } 10240 _ = cmp.Args[1] 10241 b.Kind = BlockMIPS64NE 10242 b.SetControl(cmp) 10243 return true 10244 } 10245 // match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no) 10246 // cond: 10247 // result: (NE cmp yes no) 10248 for { 10249 v := b.Control 10250 if v.Op != OpMIPS64XORconst { 10251 break 10252 } 10253 if v.AuxInt != 1 { 10254 break 10255 } 10256 cmp := v.Args[0] 10257 if cmp.Op != OpMIPS64SGTconst { 10258 break 10259 } 10260 b.Kind = BlockMIPS64NE 10261 b.SetControl(cmp) 10262 return true 10263 } 10264 // match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no) 10265 // cond: 10266 // result: (NE cmp yes no) 10267 for { 10268 v := b.Control 10269 if v.Op != OpMIPS64XORconst { 10270 break 10271 } 10272 if v.AuxInt != 1 { 10273 break 10274 } 10275 cmp := v.Args[0] 10276 if cmp.Op != OpMIPS64SGTUconst { 10277 break 10278 } 10279 b.Kind = BlockMIPS64NE 10280 b.SetControl(cmp) 10281 return true 10282 } 10283 // match: (EQ (SGTUconst [1] x) yes no) 10284 // cond: 10285 // result: (NE x yes no) 10286 for { 10287 v := b.Control 10288 if v.Op != OpMIPS64SGTUconst { 10289 break 10290 } 10291 if v.AuxInt != 1 { 10292 break 10293 } 10294 x := v.Args[0] 10295 b.Kind = BlockMIPS64NE 10296 b.SetControl(x) 10297 return true 10298 } 10299 // match: (EQ (SGTU x (MOVVconst [0])) yes no) 10300 // cond: 10301 // result: (EQ x yes no) 10302 for { 10303 v := b.Control 10304 if v.Op != OpMIPS64SGTU { 10305 break 10306 } 10307 _ = v.Args[1] 10308 x := v.Args[0] 10309 v_1 := v.Args[1] 10310 if v_1.Op != OpMIPS64MOVVconst { 10311 break 10312 } 10313 if v_1.AuxInt != 0 { 10314 break 10315 } 10316 b.Kind = BlockMIPS64EQ 10317 b.SetControl(x) 10318 return true 10319 } 10320 // match: (EQ (SGTconst [0] x) yes no) 10321 // cond: 10322 // result: (GEZ x yes no) 10323 for { 10324 v := b.Control 10325 if v.Op != OpMIPS64SGTconst { 10326 break 10327 } 10328 if v.AuxInt != 0 { 10329 break 10330 } 10331 x := v.Args[0] 10332 b.Kind = BlockMIPS64GEZ 10333 b.SetControl(x) 10334 return true 10335 } 10336 // match: (EQ (SGT x (MOVVconst [0])) yes no) 10337 // cond: 10338 // result: (LEZ x yes no) 10339 for { 10340 v := b.Control 10341 if v.Op != OpMIPS64SGT { 10342 break 10343 } 10344 _ = v.Args[1] 10345 x := v.Args[0] 10346 v_1 := v.Args[1] 10347 if v_1.Op != OpMIPS64MOVVconst { 10348 break 10349 } 10350 if v_1.AuxInt != 0 { 10351 break 10352 } 10353 b.Kind = BlockMIPS64LEZ 10354 b.SetControl(x) 10355 return true 10356 } 10357 // match: (EQ (MOVVconst [0]) yes no) 10358 // cond: 10359 // result: (First nil yes no) 10360 for { 10361 v := b.Control 10362 if v.Op != OpMIPS64MOVVconst { 10363 break 10364 } 10365 if v.AuxInt != 0 { 10366 break 10367 } 10368 b.Kind = BlockFirst 10369 b.SetControl(nil) 10370 return true 10371 } 10372 // match: (EQ (MOVVconst [c]) yes no) 10373 // cond: c != 0 10374 // result: (First nil no yes) 10375 for { 10376 v := b.Control 10377 if v.Op != OpMIPS64MOVVconst { 10378 break 10379 } 10380 c := v.AuxInt 10381 if !(c != 0) { 10382 break 10383 } 10384 b.Kind = BlockFirst 10385 b.SetControl(nil) 10386 b.swapSuccessors() 10387 return true 10388 } 10389 case BlockMIPS64GEZ: 10390 // match: (GEZ (MOVVconst [c]) yes no) 10391 // cond: c >= 0 10392 // result: (First nil yes no) 10393 for { 10394 v := b.Control 10395 if v.Op != OpMIPS64MOVVconst { 10396 break 10397 } 10398 c := v.AuxInt 10399 if !(c >= 0) { 10400 break 10401 } 10402 b.Kind = BlockFirst 10403 b.SetControl(nil) 10404 return true 10405 } 10406 // match: (GEZ (MOVVconst [c]) yes no) 10407 // cond: c < 0 10408 // result: (First nil no yes) 10409 for { 10410 v := b.Control 10411 if v.Op != OpMIPS64MOVVconst { 10412 break 10413 } 10414 c := v.AuxInt 10415 if !(c < 0) { 10416 break 10417 } 10418 b.Kind = BlockFirst 10419 b.SetControl(nil) 10420 b.swapSuccessors() 10421 return true 10422 } 10423 case BlockMIPS64GTZ: 10424 // match: (GTZ (MOVVconst [c]) yes no) 10425 // cond: c > 0 10426 // result: (First nil yes no) 10427 for { 10428 v := b.Control 10429 if v.Op != OpMIPS64MOVVconst { 10430 break 10431 } 10432 c := v.AuxInt 10433 if !(c > 0) { 10434 break 10435 } 10436 b.Kind = BlockFirst 10437 b.SetControl(nil) 10438 return true 10439 } 10440 // match: (GTZ (MOVVconst [c]) yes no) 10441 // cond: c <= 0 10442 // result: (First nil no yes) 10443 for { 10444 v := b.Control 10445 if v.Op != OpMIPS64MOVVconst { 10446 break 10447 } 10448 c := v.AuxInt 10449 if !(c <= 0) { 10450 break 10451 } 10452 b.Kind = BlockFirst 10453 b.SetControl(nil) 10454 b.swapSuccessors() 10455 return true 10456 } 10457 case BlockIf: 10458 // match: (If cond yes no) 10459 // cond: 10460 // result: (NE cond yes no) 10461 for { 10462 v := b.Control 10463 _ = v 10464 cond := b.Control 10465 b.Kind = BlockMIPS64NE 10466 b.SetControl(cond) 10467 return true 10468 } 10469 case BlockMIPS64LEZ: 10470 // match: (LEZ (MOVVconst [c]) yes no) 10471 // cond: c <= 0 10472 // result: (First nil yes no) 10473 for { 10474 v := b.Control 10475 if v.Op != OpMIPS64MOVVconst { 10476 break 10477 } 10478 c := v.AuxInt 10479 if !(c <= 0) { 10480 break 10481 } 10482 b.Kind = BlockFirst 10483 b.SetControl(nil) 10484 return true 10485 } 10486 // match: (LEZ (MOVVconst [c]) yes no) 10487 // cond: c > 0 10488 // result: (First nil no yes) 10489 for { 10490 v := b.Control 10491 if v.Op != OpMIPS64MOVVconst { 10492 break 10493 } 10494 c := v.AuxInt 10495 if !(c > 0) { 10496 break 10497 } 10498 b.Kind = BlockFirst 10499 b.SetControl(nil) 10500 b.swapSuccessors() 10501 return true 10502 } 10503 case BlockMIPS64LTZ: 10504 // match: (LTZ (MOVVconst [c]) yes no) 10505 // cond: c < 0 10506 // result: (First nil yes no) 10507 for { 10508 v := b.Control 10509 if v.Op != OpMIPS64MOVVconst { 10510 break 10511 } 10512 c := v.AuxInt 10513 if !(c < 0) { 10514 break 10515 } 10516 b.Kind = BlockFirst 10517 b.SetControl(nil) 10518 return true 10519 } 10520 // match: (LTZ (MOVVconst [c]) yes no) 10521 // cond: c >= 0 10522 // result: (First nil no yes) 10523 for { 10524 v := b.Control 10525 if v.Op != OpMIPS64MOVVconst { 10526 break 10527 } 10528 c := v.AuxInt 10529 if !(c >= 0) { 10530 break 10531 } 10532 b.Kind = BlockFirst 10533 b.SetControl(nil) 10534 b.swapSuccessors() 10535 return true 10536 } 10537 case BlockMIPS64NE: 10538 // match: (NE (FPFlagTrue cmp) yes no) 10539 // cond: 10540 // result: (FPT cmp yes no) 10541 for { 10542 v := b.Control 10543 if v.Op != OpMIPS64FPFlagTrue { 10544 break 10545 } 10546 cmp := v.Args[0] 10547 b.Kind = BlockMIPS64FPT 10548 b.SetControl(cmp) 10549 return true 10550 } 10551 // match: (NE (FPFlagFalse cmp) yes no) 10552 // cond: 10553 // result: (FPF cmp yes no) 10554 for { 10555 v := b.Control 10556 if v.Op != OpMIPS64FPFlagFalse { 10557 break 10558 } 10559 cmp := v.Args[0] 10560 b.Kind = BlockMIPS64FPF 10561 b.SetControl(cmp) 10562 return true 10563 } 10564 // match: (NE (XORconst [1] cmp:(SGT _ _)) yes no) 10565 // cond: 10566 // result: (EQ cmp yes no) 10567 for { 10568 v := b.Control 10569 if v.Op != OpMIPS64XORconst { 10570 break 10571 } 10572 if v.AuxInt != 1 { 10573 break 10574 } 10575 cmp := v.Args[0] 10576 if cmp.Op != OpMIPS64SGT { 10577 break 10578 } 10579 _ = cmp.Args[1] 10580 b.Kind = BlockMIPS64EQ 10581 b.SetControl(cmp) 10582 return true 10583 } 10584 // match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no) 10585 // cond: 10586 // result: (EQ cmp yes no) 10587 for { 10588 v := b.Control 10589 if v.Op != OpMIPS64XORconst { 10590 break 10591 } 10592 if v.AuxInt != 1 { 10593 break 10594 } 10595 cmp := v.Args[0] 10596 if cmp.Op != OpMIPS64SGTU { 10597 break 10598 } 10599 _ = cmp.Args[1] 10600 b.Kind = BlockMIPS64EQ 10601 b.SetControl(cmp) 10602 return true 10603 } 10604 // match: (NE (XORconst [1] cmp:(SGTconst _)) yes no) 10605 // cond: 10606 // result: (EQ cmp yes no) 10607 for { 10608 v := b.Control 10609 if v.Op != OpMIPS64XORconst { 10610 break 10611 } 10612 if v.AuxInt != 1 { 10613 break 10614 } 10615 cmp := v.Args[0] 10616 if cmp.Op != OpMIPS64SGTconst { 10617 break 10618 } 10619 b.Kind = BlockMIPS64EQ 10620 b.SetControl(cmp) 10621 return true 10622 } 10623 // match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no) 10624 // cond: 10625 // result: (EQ cmp yes no) 10626 for { 10627 v := b.Control 10628 if v.Op != OpMIPS64XORconst { 10629 break 10630 } 10631 if v.AuxInt != 1 { 10632 break 10633 } 10634 cmp := v.Args[0] 10635 if cmp.Op != OpMIPS64SGTUconst { 10636 break 10637 } 10638 b.Kind = BlockMIPS64EQ 10639 b.SetControl(cmp) 10640 return true 10641 } 10642 // match: (NE (SGTUconst [1] x) yes no) 10643 // cond: 10644 // result: (EQ x yes no) 10645 for { 10646 v := b.Control 10647 if v.Op != OpMIPS64SGTUconst { 10648 break 10649 } 10650 if v.AuxInt != 1 { 10651 break 10652 } 10653 x := v.Args[0] 10654 b.Kind = BlockMIPS64EQ 10655 b.SetControl(x) 10656 return true 10657 } 10658 // match: (NE (SGTU x (MOVVconst [0])) yes no) 10659 // cond: 10660 // result: (NE x yes no) 10661 for { 10662 v := b.Control 10663 if v.Op != OpMIPS64SGTU { 10664 break 10665 } 10666 _ = v.Args[1] 10667 x := v.Args[0] 10668 v_1 := v.Args[1] 10669 if v_1.Op != OpMIPS64MOVVconst { 10670 break 10671 } 10672 if v_1.AuxInt != 0 { 10673 break 10674 } 10675 b.Kind = BlockMIPS64NE 10676 b.SetControl(x) 10677 return true 10678 } 10679 // match: (NE (SGTconst [0] x) yes no) 10680 // cond: 10681 // result: (LTZ x yes no) 10682 for { 10683 v := b.Control 10684 if v.Op != OpMIPS64SGTconst { 10685 break 10686 } 10687 if v.AuxInt != 0 { 10688 break 10689 } 10690 x := v.Args[0] 10691 b.Kind = BlockMIPS64LTZ 10692 b.SetControl(x) 10693 return true 10694 } 10695 // match: (NE (SGT x (MOVVconst [0])) yes no) 10696 // cond: 10697 // result: (GTZ x yes no) 10698 for { 10699 v := b.Control 10700 if v.Op != OpMIPS64SGT { 10701 break 10702 } 10703 _ = v.Args[1] 10704 x := v.Args[0] 10705 v_1 := v.Args[1] 10706 if v_1.Op != OpMIPS64MOVVconst { 10707 break 10708 } 10709 if v_1.AuxInt != 0 { 10710 break 10711 } 10712 b.Kind = BlockMIPS64GTZ 10713 b.SetControl(x) 10714 return true 10715 } 10716 // match: (NE (MOVVconst [0]) yes no) 10717 // cond: 10718 // result: (First nil no yes) 10719 for { 10720 v := b.Control 10721 if v.Op != OpMIPS64MOVVconst { 10722 break 10723 } 10724 if v.AuxInt != 0 { 10725 break 10726 } 10727 b.Kind = BlockFirst 10728 b.SetControl(nil) 10729 b.swapSuccessors() 10730 return true 10731 } 10732 // match: (NE (MOVVconst [c]) yes no) 10733 // cond: c != 0 10734 // result: (First nil yes no) 10735 for { 10736 v := b.Control 10737 if v.Op != OpMIPS64MOVVconst { 10738 break 10739 } 10740 c := v.AuxInt 10741 if !(c != 0) { 10742 break 10743 } 10744 b.Kind = BlockFirst 10745 b.SetControl(nil) 10746 return true 10747 } 10748 } 10749 return false 10750 }