github.com/megatontech/mynoteforgo@v0.0.0-20200507084910-5d0c6ea6e890/源码/cmd/compile/internal/ssa/rewriteMIPS64.go (about) 1 // Code generated from gen/MIPS64.rules; DO NOT EDIT. 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "fmt" 7 import "math" 8 import "cmd/internal/obj" 9 import "cmd/internal/objabi" 10 import "cmd/compile/internal/types" 11 12 var _ = fmt.Println // in case not otherwise used 13 var _ = math.MinInt8 // in case not otherwise used 14 var _ = obj.ANOP // in case not otherwise used 15 var _ = objabi.GOROOT // in case not otherwise used 16 var _ = types.TypeMem // in case not otherwise used 17 18 func rewriteValueMIPS64(v *Value) bool { 19 switch v.Op { 20 case OpAdd16: 21 return rewriteValueMIPS64_OpAdd16_0(v) 22 case OpAdd32: 23 return rewriteValueMIPS64_OpAdd32_0(v) 24 case OpAdd32F: 25 return rewriteValueMIPS64_OpAdd32F_0(v) 26 case OpAdd64: 27 return rewriteValueMIPS64_OpAdd64_0(v) 28 case OpAdd64F: 29 return rewriteValueMIPS64_OpAdd64F_0(v) 30 case OpAdd8: 31 return rewriteValueMIPS64_OpAdd8_0(v) 32 case OpAddPtr: 33 return rewriteValueMIPS64_OpAddPtr_0(v) 34 case OpAddr: 35 return rewriteValueMIPS64_OpAddr_0(v) 36 case OpAnd16: 37 return rewriteValueMIPS64_OpAnd16_0(v) 38 case OpAnd32: 39 return rewriteValueMIPS64_OpAnd32_0(v) 40 case OpAnd64: 41 return rewriteValueMIPS64_OpAnd64_0(v) 42 case OpAnd8: 43 return rewriteValueMIPS64_OpAnd8_0(v) 44 case OpAndB: 45 return rewriteValueMIPS64_OpAndB_0(v) 46 case OpAtomicAdd32: 47 return rewriteValueMIPS64_OpAtomicAdd32_0(v) 48 case OpAtomicAdd64: 49 return rewriteValueMIPS64_OpAtomicAdd64_0(v) 50 case OpAtomicCompareAndSwap32: 51 return rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v) 52 case OpAtomicCompareAndSwap64: 53 return rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v) 54 case OpAtomicExchange32: 55 return rewriteValueMIPS64_OpAtomicExchange32_0(v) 56 case OpAtomicExchange64: 57 return rewriteValueMIPS64_OpAtomicExchange64_0(v) 58 case OpAtomicLoad32: 59 return rewriteValueMIPS64_OpAtomicLoad32_0(v) 60 case OpAtomicLoad64: 61 return rewriteValueMIPS64_OpAtomicLoad64_0(v) 62 case OpAtomicLoadPtr: 63 return rewriteValueMIPS64_OpAtomicLoadPtr_0(v) 64 case OpAtomicStore32: 65 return rewriteValueMIPS64_OpAtomicStore32_0(v) 66 case OpAtomicStore64: 67 return rewriteValueMIPS64_OpAtomicStore64_0(v) 68 case OpAtomicStorePtrNoWB: 69 return rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v) 70 case OpAvg64u: 71 return rewriteValueMIPS64_OpAvg64u_0(v) 72 case OpClosureCall: 73 return rewriteValueMIPS64_OpClosureCall_0(v) 74 case OpCom16: 75 return rewriteValueMIPS64_OpCom16_0(v) 76 case OpCom32: 77 return rewriteValueMIPS64_OpCom32_0(v) 78 case OpCom64: 79 return rewriteValueMIPS64_OpCom64_0(v) 80 case OpCom8: 81 return rewriteValueMIPS64_OpCom8_0(v) 82 case OpConst16: 83 return rewriteValueMIPS64_OpConst16_0(v) 84 case OpConst32: 85 return rewriteValueMIPS64_OpConst32_0(v) 86 case OpConst32F: 87 return rewriteValueMIPS64_OpConst32F_0(v) 88 case OpConst64: 89 return rewriteValueMIPS64_OpConst64_0(v) 90 case OpConst64F: 91 return rewriteValueMIPS64_OpConst64F_0(v) 92 case OpConst8: 93 return rewriteValueMIPS64_OpConst8_0(v) 94 case OpConstBool: 95 return rewriteValueMIPS64_OpConstBool_0(v) 96 case OpConstNil: 97 return rewriteValueMIPS64_OpConstNil_0(v) 98 case OpCvt32Fto32: 99 return rewriteValueMIPS64_OpCvt32Fto32_0(v) 100 case OpCvt32Fto64: 101 return rewriteValueMIPS64_OpCvt32Fto64_0(v) 102 case OpCvt32Fto64F: 103 return rewriteValueMIPS64_OpCvt32Fto64F_0(v) 104 case OpCvt32to32F: 105 return rewriteValueMIPS64_OpCvt32to32F_0(v) 106 case OpCvt32to64F: 107 return rewriteValueMIPS64_OpCvt32to64F_0(v) 108 case OpCvt64Fto32: 109 return rewriteValueMIPS64_OpCvt64Fto32_0(v) 110 case OpCvt64Fto32F: 111 return rewriteValueMIPS64_OpCvt64Fto32F_0(v) 112 case OpCvt64Fto64: 113 return rewriteValueMIPS64_OpCvt64Fto64_0(v) 114 case OpCvt64to32F: 115 return rewriteValueMIPS64_OpCvt64to32F_0(v) 116 case OpCvt64to64F: 117 return rewriteValueMIPS64_OpCvt64to64F_0(v) 118 case OpDiv16: 119 return rewriteValueMIPS64_OpDiv16_0(v) 120 case OpDiv16u: 121 return rewriteValueMIPS64_OpDiv16u_0(v) 122 case OpDiv32: 123 return rewriteValueMIPS64_OpDiv32_0(v) 124 case OpDiv32F: 125 return rewriteValueMIPS64_OpDiv32F_0(v) 126 case OpDiv32u: 127 return rewriteValueMIPS64_OpDiv32u_0(v) 128 case OpDiv64: 129 return rewriteValueMIPS64_OpDiv64_0(v) 130 case OpDiv64F: 131 return rewriteValueMIPS64_OpDiv64F_0(v) 132 case OpDiv64u: 133 return rewriteValueMIPS64_OpDiv64u_0(v) 134 case OpDiv8: 135 return rewriteValueMIPS64_OpDiv8_0(v) 136 case OpDiv8u: 137 return rewriteValueMIPS64_OpDiv8u_0(v) 138 case OpEq16: 139 return rewriteValueMIPS64_OpEq16_0(v) 140 case OpEq32: 141 return rewriteValueMIPS64_OpEq32_0(v) 142 case OpEq32F: 143 return rewriteValueMIPS64_OpEq32F_0(v) 144 case OpEq64: 145 return rewriteValueMIPS64_OpEq64_0(v) 146 case OpEq64F: 147 return rewriteValueMIPS64_OpEq64F_0(v) 148 case OpEq8: 149 return rewriteValueMIPS64_OpEq8_0(v) 150 case OpEqB: 151 return rewriteValueMIPS64_OpEqB_0(v) 152 case OpEqPtr: 153 return rewriteValueMIPS64_OpEqPtr_0(v) 154 case OpGeq16: 155 return rewriteValueMIPS64_OpGeq16_0(v) 156 case OpGeq16U: 157 return rewriteValueMIPS64_OpGeq16U_0(v) 158 case OpGeq32: 159 return rewriteValueMIPS64_OpGeq32_0(v) 160 case OpGeq32F: 161 return rewriteValueMIPS64_OpGeq32F_0(v) 162 case OpGeq32U: 163 return rewriteValueMIPS64_OpGeq32U_0(v) 164 case OpGeq64: 165 return rewriteValueMIPS64_OpGeq64_0(v) 166 case OpGeq64F: 167 return rewriteValueMIPS64_OpGeq64F_0(v) 168 case OpGeq64U: 169 return rewriteValueMIPS64_OpGeq64U_0(v) 170 case OpGeq8: 171 return rewriteValueMIPS64_OpGeq8_0(v) 172 case OpGeq8U: 173 return rewriteValueMIPS64_OpGeq8U_0(v) 174 case OpGetCallerPC: 175 return rewriteValueMIPS64_OpGetCallerPC_0(v) 176 case OpGetCallerSP: 177 return rewriteValueMIPS64_OpGetCallerSP_0(v) 178 case OpGetClosurePtr: 179 return rewriteValueMIPS64_OpGetClosurePtr_0(v) 180 case OpGreater16: 181 return rewriteValueMIPS64_OpGreater16_0(v) 182 case OpGreater16U: 183 return rewriteValueMIPS64_OpGreater16U_0(v) 184 case OpGreater32: 185 return rewriteValueMIPS64_OpGreater32_0(v) 186 case OpGreater32F: 187 return rewriteValueMIPS64_OpGreater32F_0(v) 188 case OpGreater32U: 189 return rewriteValueMIPS64_OpGreater32U_0(v) 190 case OpGreater64: 191 return rewriteValueMIPS64_OpGreater64_0(v) 192 case OpGreater64F: 193 return rewriteValueMIPS64_OpGreater64F_0(v) 194 case OpGreater64U: 195 return rewriteValueMIPS64_OpGreater64U_0(v) 196 case OpGreater8: 197 return rewriteValueMIPS64_OpGreater8_0(v) 198 case OpGreater8U: 199 return rewriteValueMIPS64_OpGreater8U_0(v) 200 case OpHmul32: 201 return rewriteValueMIPS64_OpHmul32_0(v) 202 case OpHmul32u: 203 return rewriteValueMIPS64_OpHmul32u_0(v) 204 case OpHmul64: 205 return rewriteValueMIPS64_OpHmul64_0(v) 206 case OpHmul64u: 207 return rewriteValueMIPS64_OpHmul64u_0(v) 208 case OpInterCall: 209 return rewriteValueMIPS64_OpInterCall_0(v) 210 case OpIsInBounds: 211 return rewriteValueMIPS64_OpIsInBounds_0(v) 212 case OpIsNonNil: 213 return rewriteValueMIPS64_OpIsNonNil_0(v) 214 case OpIsSliceInBounds: 215 return rewriteValueMIPS64_OpIsSliceInBounds_0(v) 216 case OpLeq16: 217 return rewriteValueMIPS64_OpLeq16_0(v) 218 case OpLeq16U: 219 return rewriteValueMIPS64_OpLeq16U_0(v) 220 case OpLeq32: 221 return rewriteValueMIPS64_OpLeq32_0(v) 222 case OpLeq32F: 223 return rewriteValueMIPS64_OpLeq32F_0(v) 224 case OpLeq32U: 225 return rewriteValueMIPS64_OpLeq32U_0(v) 226 case OpLeq64: 227 return rewriteValueMIPS64_OpLeq64_0(v) 228 case OpLeq64F: 229 return rewriteValueMIPS64_OpLeq64F_0(v) 230 case OpLeq64U: 231 return rewriteValueMIPS64_OpLeq64U_0(v) 232 case OpLeq8: 233 return rewriteValueMIPS64_OpLeq8_0(v) 234 case OpLeq8U: 235 return rewriteValueMIPS64_OpLeq8U_0(v) 236 case OpLess16: 237 return rewriteValueMIPS64_OpLess16_0(v) 238 case OpLess16U: 239 return rewriteValueMIPS64_OpLess16U_0(v) 240 case OpLess32: 241 return rewriteValueMIPS64_OpLess32_0(v) 242 case OpLess32F: 243 return rewriteValueMIPS64_OpLess32F_0(v) 244 case OpLess32U: 245 return rewriteValueMIPS64_OpLess32U_0(v) 246 case OpLess64: 247 return rewriteValueMIPS64_OpLess64_0(v) 248 case OpLess64F: 249 return rewriteValueMIPS64_OpLess64F_0(v) 250 case OpLess64U: 251 return rewriteValueMIPS64_OpLess64U_0(v) 252 case OpLess8: 253 return rewriteValueMIPS64_OpLess8_0(v) 254 case OpLess8U: 255 return rewriteValueMIPS64_OpLess8U_0(v) 256 case OpLoad: 257 return rewriteValueMIPS64_OpLoad_0(v) 258 case OpLocalAddr: 259 return rewriteValueMIPS64_OpLocalAddr_0(v) 260 case OpLsh16x16: 261 return rewriteValueMIPS64_OpLsh16x16_0(v) 262 case OpLsh16x32: 263 return rewriteValueMIPS64_OpLsh16x32_0(v) 264 case OpLsh16x64: 265 return rewriteValueMIPS64_OpLsh16x64_0(v) 266 case OpLsh16x8: 267 return rewriteValueMIPS64_OpLsh16x8_0(v) 268 case OpLsh32x16: 269 return rewriteValueMIPS64_OpLsh32x16_0(v) 270 case OpLsh32x32: 271 return rewriteValueMIPS64_OpLsh32x32_0(v) 272 case OpLsh32x64: 273 return rewriteValueMIPS64_OpLsh32x64_0(v) 274 case OpLsh32x8: 275 return rewriteValueMIPS64_OpLsh32x8_0(v) 276 case OpLsh64x16: 277 return rewriteValueMIPS64_OpLsh64x16_0(v) 278 case OpLsh64x32: 279 return rewriteValueMIPS64_OpLsh64x32_0(v) 280 case OpLsh64x64: 281 return rewriteValueMIPS64_OpLsh64x64_0(v) 282 case OpLsh64x8: 283 return rewriteValueMIPS64_OpLsh64x8_0(v) 284 case OpLsh8x16: 285 return rewriteValueMIPS64_OpLsh8x16_0(v) 286 case OpLsh8x32: 287 return rewriteValueMIPS64_OpLsh8x32_0(v) 288 case OpLsh8x64: 289 return rewriteValueMIPS64_OpLsh8x64_0(v) 290 case OpLsh8x8: 291 return rewriteValueMIPS64_OpLsh8x8_0(v) 292 case OpMIPS64ADDV: 293 return rewriteValueMIPS64_OpMIPS64ADDV_0(v) 294 case OpMIPS64ADDVconst: 295 return rewriteValueMIPS64_OpMIPS64ADDVconst_0(v) 296 case OpMIPS64AND: 297 return rewriteValueMIPS64_OpMIPS64AND_0(v) 298 case OpMIPS64ANDconst: 299 return rewriteValueMIPS64_OpMIPS64ANDconst_0(v) 300 case OpMIPS64LoweredAtomicAdd32: 301 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v) 302 case OpMIPS64LoweredAtomicAdd64: 303 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v) 304 case OpMIPS64LoweredAtomicStore32: 305 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v) 306 case OpMIPS64LoweredAtomicStore64: 307 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v) 308 case OpMIPS64MOVBUload: 309 return rewriteValueMIPS64_OpMIPS64MOVBUload_0(v) 310 case OpMIPS64MOVBUreg: 311 return rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v) 312 case OpMIPS64MOVBload: 313 return rewriteValueMIPS64_OpMIPS64MOVBload_0(v) 314 case OpMIPS64MOVBreg: 315 return rewriteValueMIPS64_OpMIPS64MOVBreg_0(v) 316 case OpMIPS64MOVBstore: 317 return rewriteValueMIPS64_OpMIPS64MOVBstore_0(v) 318 case OpMIPS64MOVBstorezero: 319 return rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v) 320 case OpMIPS64MOVDload: 321 return rewriteValueMIPS64_OpMIPS64MOVDload_0(v) 322 case OpMIPS64MOVDstore: 323 return rewriteValueMIPS64_OpMIPS64MOVDstore_0(v) 324 case OpMIPS64MOVFload: 325 return rewriteValueMIPS64_OpMIPS64MOVFload_0(v) 326 case OpMIPS64MOVFstore: 327 return rewriteValueMIPS64_OpMIPS64MOVFstore_0(v) 328 case OpMIPS64MOVHUload: 329 return rewriteValueMIPS64_OpMIPS64MOVHUload_0(v) 330 case OpMIPS64MOVHUreg: 331 return rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v) 332 case OpMIPS64MOVHload: 333 return rewriteValueMIPS64_OpMIPS64MOVHload_0(v) 334 case OpMIPS64MOVHreg: 335 return rewriteValueMIPS64_OpMIPS64MOVHreg_0(v) 336 case OpMIPS64MOVHstore: 337 return rewriteValueMIPS64_OpMIPS64MOVHstore_0(v) 338 case OpMIPS64MOVHstorezero: 339 return rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v) 340 case OpMIPS64MOVVload: 341 return rewriteValueMIPS64_OpMIPS64MOVVload_0(v) 342 case OpMIPS64MOVVreg: 343 return rewriteValueMIPS64_OpMIPS64MOVVreg_0(v) 344 case OpMIPS64MOVVstore: 345 return rewriteValueMIPS64_OpMIPS64MOVVstore_0(v) 346 case OpMIPS64MOVVstorezero: 347 return rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v) 348 case OpMIPS64MOVWUload: 349 return rewriteValueMIPS64_OpMIPS64MOVWUload_0(v) 350 case OpMIPS64MOVWUreg: 351 return rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v) 352 case OpMIPS64MOVWload: 353 return rewriteValueMIPS64_OpMIPS64MOVWload_0(v) 354 case OpMIPS64MOVWreg: 355 return rewriteValueMIPS64_OpMIPS64MOVWreg_0(v) || rewriteValueMIPS64_OpMIPS64MOVWreg_10(v) 356 case OpMIPS64MOVWstore: 357 return rewriteValueMIPS64_OpMIPS64MOVWstore_0(v) 358 case OpMIPS64MOVWstorezero: 359 return rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v) 360 case OpMIPS64NEGV: 361 return rewriteValueMIPS64_OpMIPS64NEGV_0(v) 362 case OpMIPS64NOR: 363 return rewriteValueMIPS64_OpMIPS64NOR_0(v) 364 case OpMIPS64NORconst: 365 return rewriteValueMIPS64_OpMIPS64NORconst_0(v) 366 case OpMIPS64OR: 367 return rewriteValueMIPS64_OpMIPS64OR_0(v) 368 case OpMIPS64ORconst: 369 return rewriteValueMIPS64_OpMIPS64ORconst_0(v) 370 case OpMIPS64SGT: 371 return rewriteValueMIPS64_OpMIPS64SGT_0(v) 372 case OpMIPS64SGTU: 373 return rewriteValueMIPS64_OpMIPS64SGTU_0(v) 374 case OpMIPS64SGTUconst: 375 return rewriteValueMIPS64_OpMIPS64SGTUconst_0(v) 376 case OpMIPS64SGTconst: 377 return rewriteValueMIPS64_OpMIPS64SGTconst_0(v) || rewriteValueMIPS64_OpMIPS64SGTconst_10(v) 378 case OpMIPS64SLLV: 379 return rewriteValueMIPS64_OpMIPS64SLLV_0(v) 380 case OpMIPS64SLLVconst: 381 return rewriteValueMIPS64_OpMIPS64SLLVconst_0(v) 382 case OpMIPS64SRAV: 383 return rewriteValueMIPS64_OpMIPS64SRAV_0(v) 384 case OpMIPS64SRAVconst: 385 return rewriteValueMIPS64_OpMIPS64SRAVconst_0(v) 386 case OpMIPS64SRLV: 387 return rewriteValueMIPS64_OpMIPS64SRLV_0(v) 388 case OpMIPS64SRLVconst: 389 return rewriteValueMIPS64_OpMIPS64SRLVconst_0(v) 390 case OpMIPS64SUBV: 391 return rewriteValueMIPS64_OpMIPS64SUBV_0(v) 392 case OpMIPS64SUBVconst: 393 return rewriteValueMIPS64_OpMIPS64SUBVconst_0(v) 394 case OpMIPS64XOR: 395 return rewriteValueMIPS64_OpMIPS64XOR_0(v) 396 case OpMIPS64XORconst: 397 return rewriteValueMIPS64_OpMIPS64XORconst_0(v) 398 case OpMod16: 399 return rewriteValueMIPS64_OpMod16_0(v) 400 case OpMod16u: 401 return rewriteValueMIPS64_OpMod16u_0(v) 402 case OpMod32: 403 return rewriteValueMIPS64_OpMod32_0(v) 404 case OpMod32u: 405 return rewriteValueMIPS64_OpMod32u_0(v) 406 case OpMod64: 407 return rewriteValueMIPS64_OpMod64_0(v) 408 case OpMod64u: 409 return rewriteValueMIPS64_OpMod64u_0(v) 410 case OpMod8: 411 return rewriteValueMIPS64_OpMod8_0(v) 412 case OpMod8u: 413 return rewriteValueMIPS64_OpMod8u_0(v) 414 case OpMove: 415 return rewriteValueMIPS64_OpMove_0(v) || rewriteValueMIPS64_OpMove_10(v) 416 case OpMul16: 417 return rewriteValueMIPS64_OpMul16_0(v) 418 case OpMul32: 419 return rewriteValueMIPS64_OpMul32_0(v) 420 case OpMul32F: 421 return rewriteValueMIPS64_OpMul32F_0(v) 422 case OpMul64: 423 return rewriteValueMIPS64_OpMul64_0(v) 424 case OpMul64F: 425 return rewriteValueMIPS64_OpMul64F_0(v) 426 case OpMul8: 427 return rewriteValueMIPS64_OpMul8_0(v) 428 case OpNeg16: 429 return rewriteValueMIPS64_OpNeg16_0(v) 430 case OpNeg32: 431 return rewriteValueMIPS64_OpNeg32_0(v) 432 case OpNeg32F: 433 return rewriteValueMIPS64_OpNeg32F_0(v) 434 case OpNeg64: 435 return rewriteValueMIPS64_OpNeg64_0(v) 436 case OpNeg64F: 437 return rewriteValueMIPS64_OpNeg64F_0(v) 438 case OpNeg8: 439 return rewriteValueMIPS64_OpNeg8_0(v) 440 case OpNeq16: 441 return rewriteValueMIPS64_OpNeq16_0(v) 442 case OpNeq32: 443 return rewriteValueMIPS64_OpNeq32_0(v) 444 case OpNeq32F: 445 return rewriteValueMIPS64_OpNeq32F_0(v) 446 case OpNeq64: 447 return rewriteValueMIPS64_OpNeq64_0(v) 448 case OpNeq64F: 449 return rewriteValueMIPS64_OpNeq64F_0(v) 450 case OpNeq8: 451 return rewriteValueMIPS64_OpNeq8_0(v) 452 case OpNeqB: 453 return rewriteValueMIPS64_OpNeqB_0(v) 454 case OpNeqPtr: 455 return rewriteValueMIPS64_OpNeqPtr_0(v) 456 case OpNilCheck: 457 return rewriteValueMIPS64_OpNilCheck_0(v) 458 case OpNot: 459 return rewriteValueMIPS64_OpNot_0(v) 460 case OpOffPtr: 461 return rewriteValueMIPS64_OpOffPtr_0(v) 462 case OpOr16: 463 return rewriteValueMIPS64_OpOr16_0(v) 464 case OpOr32: 465 return rewriteValueMIPS64_OpOr32_0(v) 466 case OpOr64: 467 return rewriteValueMIPS64_OpOr64_0(v) 468 case OpOr8: 469 return rewriteValueMIPS64_OpOr8_0(v) 470 case OpOrB: 471 return rewriteValueMIPS64_OpOrB_0(v) 472 case OpRound32F: 473 return rewriteValueMIPS64_OpRound32F_0(v) 474 case OpRound64F: 475 return rewriteValueMIPS64_OpRound64F_0(v) 476 case OpRsh16Ux16: 477 return rewriteValueMIPS64_OpRsh16Ux16_0(v) 478 case OpRsh16Ux32: 479 return rewriteValueMIPS64_OpRsh16Ux32_0(v) 480 case OpRsh16Ux64: 481 return rewriteValueMIPS64_OpRsh16Ux64_0(v) 482 case OpRsh16Ux8: 483 return rewriteValueMIPS64_OpRsh16Ux8_0(v) 484 case OpRsh16x16: 485 return rewriteValueMIPS64_OpRsh16x16_0(v) 486 case OpRsh16x32: 487 return rewriteValueMIPS64_OpRsh16x32_0(v) 488 case OpRsh16x64: 489 return rewriteValueMIPS64_OpRsh16x64_0(v) 490 case OpRsh16x8: 491 return rewriteValueMIPS64_OpRsh16x8_0(v) 492 case OpRsh32Ux16: 493 return rewriteValueMIPS64_OpRsh32Ux16_0(v) 494 case OpRsh32Ux32: 495 return rewriteValueMIPS64_OpRsh32Ux32_0(v) 496 case OpRsh32Ux64: 497 return rewriteValueMIPS64_OpRsh32Ux64_0(v) 498 case OpRsh32Ux8: 499 return rewriteValueMIPS64_OpRsh32Ux8_0(v) 500 case OpRsh32x16: 501 return rewriteValueMIPS64_OpRsh32x16_0(v) 502 case OpRsh32x32: 503 return rewriteValueMIPS64_OpRsh32x32_0(v) 504 case OpRsh32x64: 505 return rewriteValueMIPS64_OpRsh32x64_0(v) 506 case OpRsh32x8: 507 return rewriteValueMIPS64_OpRsh32x8_0(v) 508 case OpRsh64Ux16: 509 return rewriteValueMIPS64_OpRsh64Ux16_0(v) 510 case OpRsh64Ux32: 511 return rewriteValueMIPS64_OpRsh64Ux32_0(v) 512 case OpRsh64Ux64: 513 return rewriteValueMIPS64_OpRsh64Ux64_0(v) 514 case OpRsh64Ux8: 515 return rewriteValueMIPS64_OpRsh64Ux8_0(v) 516 case OpRsh64x16: 517 return rewriteValueMIPS64_OpRsh64x16_0(v) 518 case OpRsh64x32: 519 return rewriteValueMIPS64_OpRsh64x32_0(v) 520 case OpRsh64x64: 521 return rewriteValueMIPS64_OpRsh64x64_0(v) 522 case OpRsh64x8: 523 return rewriteValueMIPS64_OpRsh64x8_0(v) 524 case OpRsh8Ux16: 525 return rewriteValueMIPS64_OpRsh8Ux16_0(v) 526 case OpRsh8Ux32: 527 return rewriteValueMIPS64_OpRsh8Ux32_0(v) 528 case OpRsh8Ux64: 529 return rewriteValueMIPS64_OpRsh8Ux64_0(v) 530 case OpRsh8Ux8: 531 return rewriteValueMIPS64_OpRsh8Ux8_0(v) 532 case OpRsh8x16: 533 return rewriteValueMIPS64_OpRsh8x16_0(v) 534 case OpRsh8x32: 535 return rewriteValueMIPS64_OpRsh8x32_0(v) 536 case OpRsh8x64: 537 return rewriteValueMIPS64_OpRsh8x64_0(v) 538 case OpRsh8x8: 539 return rewriteValueMIPS64_OpRsh8x8_0(v) 540 case OpSelect0: 541 return rewriteValueMIPS64_OpSelect0_0(v) 542 case OpSelect1: 543 return rewriteValueMIPS64_OpSelect1_0(v) || rewriteValueMIPS64_OpSelect1_10(v) || rewriteValueMIPS64_OpSelect1_20(v) 544 case OpSignExt16to32: 545 return rewriteValueMIPS64_OpSignExt16to32_0(v) 546 case OpSignExt16to64: 547 return rewriteValueMIPS64_OpSignExt16to64_0(v) 548 case OpSignExt32to64: 549 return rewriteValueMIPS64_OpSignExt32to64_0(v) 550 case OpSignExt8to16: 551 return rewriteValueMIPS64_OpSignExt8to16_0(v) 552 case OpSignExt8to32: 553 return rewriteValueMIPS64_OpSignExt8to32_0(v) 554 case OpSignExt8to64: 555 return rewriteValueMIPS64_OpSignExt8to64_0(v) 556 case OpSlicemask: 557 return rewriteValueMIPS64_OpSlicemask_0(v) 558 case OpSqrt: 559 return rewriteValueMIPS64_OpSqrt_0(v) 560 case OpStaticCall: 561 return rewriteValueMIPS64_OpStaticCall_0(v) 562 case OpStore: 563 return rewriteValueMIPS64_OpStore_0(v) 564 case OpSub16: 565 return rewriteValueMIPS64_OpSub16_0(v) 566 case OpSub32: 567 return rewriteValueMIPS64_OpSub32_0(v) 568 case OpSub32F: 569 return rewriteValueMIPS64_OpSub32F_0(v) 570 case OpSub64: 571 return rewriteValueMIPS64_OpSub64_0(v) 572 case OpSub64F: 573 return rewriteValueMIPS64_OpSub64F_0(v) 574 case OpSub8: 575 return rewriteValueMIPS64_OpSub8_0(v) 576 case OpSubPtr: 577 return rewriteValueMIPS64_OpSubPtr_0(v) 578 case OpTrunc16to8: 579 return rewriteValueMIPS64_OpTrunc16to8_0(v) 580 case OpTrunc32to16: 581 return rewriteValueMIPS64_OpTrunc32to16_0(v) 582 case OpTrunc32to8: 583 return rewriteValueMIPS64_OpTrunc32to8_0(v) 584 case OpTrunc64to16: 585 return rewriteValueMIPS64_OpTrunc64to16_0(v) 586 case OpTrunc64to32: 587 return rewriteValueMIPS64_OpTrunc64to32_0(v) 588 case OpTrunc64to8: 589 return rewriteValueMIPS64_OpTrunc64to8_0(v) 590 case OpWB: 591 return rewriteValueMIPS64_OpWB_0(v) 592 case OpXor16: 593 return rewriteValueMIPS64_OpXor16_0(v) 594 case OpXor32: 595 return rewriteValueMIPS64_OpXor32_0(v) 596 case OpXor64: 597 return rewriteValueMIPS64_OpXor64_0(v) 598 case OpXor8: 599 return rewriteValueMIPS64_OpXor8_0(v) 600 case OpZero: 601 return rewriteValueMIPS64_OpZero_0(v) || rewriteValueMIPS64_OpZero_10(v) 602 case OpZeroExt16to32: 603 return rewriteValueMIPS64_OpZeroExt16to32_0(v) 604 case OpZeroExt16to64: 605 return rewriteValueMIPS64_OpZeroExt16to64_0(v) 606 case OpZeroExt32to64: 607 return rewriteValueMIPS64_OpZeroExt32to64_0(v) 608 case OpZeroExt8to16: 609 return rewriteValueMIPS64_OpZeroExt8to16_0(v) 610 case OpZeroExt8to32: 611 return rewriteValueMIPS64_OpZeroExt8to32_0(v) 612 case OpZeroExt8to64: 613 return rewriteValueMIPS64_OpZeroExt8to64_0(v) 614 } 615 return false 616 } 617 func rewriteValueMIPS64_OpAdd16_0(v *Value) bool { 618 // match: (Add16 x y) 619 // cond: 620 // result: (ADDV x y) 621 for { 622 _ = v.Args[1] 623 x := v.Args[0] 624 y := v.Args[1] 625 v.reset(OpMIPS64ADDV) 626 v.AddArg(x) 627 v.AddArg(y) 628 return true 629 } 630 } 631 func rewriteValueMIPS64_OpAdd32_0(v *Value) bool { 632 // match: (Add32 x y) 633 // cond: 634 // result: (ADDV x y) 635 for { 636 _ = v.Args[1] 637 x := v.Args[0] 638 y := v.Args[1] 639 v.reset(OpMIPS64ADDV) 640 v.AddArg(x) 641 v.AddArg(y) 642 return true 643 } 644 } 645 func rewriteValueMIPS64_OpAdd32F_0(v *Value) bool { 646 // match: (Add32F x y) 647 // cond: 648 // result: (ADDF x y) 649 for { 650 _ = v.Args[1] 651 x := v.Args[0] 652 y := v.Args[1] 653 v.reset(OpMIPS64ADDF) 654 v.AddArg(x) 655 v.AddArg(y) 656 return true 657 } 658 } 659 func rewriteValueMIPS64_OpAdd64_0(v *Value) bool { 660 // match: (Add64 x y) 661 // cond: 662 // result: (ADDV x y) 663 for { 664 _ = v.Args[1] 665 x := v.Args[0] 666 y := v.Args[1] 667 v.reset(OpMIPS64ADDV) 668 v.AddArg(x) 669 v.AddArg(y) 670 return true 671 } 672 } 673 func rewriteValueMIPS64_OpAdd64F_0(v *Value) bool { 674 // match: (Add64F x y) 675 // cond: 676 // result: (ADDD x y) 677 for { 678 _ = v.Args[1] 679 x := v.Args[0] 680 y := v.Args[1] 681 v.reset(OpMIPS64ADDD) 682 v.AddArg(x) 683 v.AddArg(y) 684 return true 685 } 686 } 687 func rewriteValueMIPS64_OpAdd8_0(v *Value) bool { 688 // match: (Add8 x y) 689 // cond: 690 // result: (ADDV x y) 691 for { 692 _ = v.Args[1] 693 x := v.Args[0] 694 y := v.Args[1] 695 v.reset(OpMIPS64ADDV) 696 v.AddArg(x) 697 v.AddArg(y) 698 return true 699 } 700 } 701 func rewriteValueMIPS64_OpAddPtr_0(v *Value) bool { 702 // match: (AddPtr x y) 703 // cond: 704 // result: (ADDV x y) 705 for { 706 _ = v.Args[1] 707 x := v.Args[0] 708 y := v.Args[1] 709 v.reset(OpMIPS64ADDV) 710 v.AddArg(x) 711 v.AddArg(y) 712 return true 713 } 714 } 715 func rewriteValueMIPS64_OpAddr_0(v *Value) bool { 716 // match: (Addr {sym} base) 717 // cond: 718 // result: (MOVVaddr {sym} base) 719 for { 720 sym := v.Aux 721 base := v.Args[0] 722 v.reset(OpMIPS64MOVVaddr) 723 v.Aux = sym 724 v.AddArg(base) 725 return true 726 } 727 } 728 func rewriteValueMIPS64_OpAnd16_0(v *Value) bool { 729 // match: (And16 x y) 730 // cond: 731 // result: (AND x y) 732 for { 733 _ = v.Args[1] 734 x := v.Args[0] 735 y := v.Args[1] 736 v.reset(OpMIPS64AND) 737 v.AddArg(x) 738 v.AddArg(y) 739 return true 740 } 741 } 742 func rewriteValueMIPS64_OpAnd32_0(v *Value) bool { 743 // match: (And32 x y) 744 // cond: 745 // result: (AND x y) 746 for { 747 _ = v.Args[1] 748 x := v.Args[0] 749 y := v.Args[1] 750 v.reset(OpMIPS64AND) 751 v.AddArg(x) 752 v.AddArg(y) 753 return true 754 } 755 } 756 func rewriteValueMIPS64_OpAnd64_0(v *Value) bool { 757 // match: (And64 x y) 758 // cond: 759 // result: (AND x y) 760 for { 761 _ = v.Args[1] 762 x := v.Args[0] 763 y := v.Args[1] 764 v.reset(OpMIPS64AND) 765 v.AddArg(x) 766 v.AddArg(y) 767 return true 768 } 769 } 770 func rewriteValueMIPS64_OpAnd8_0(v *Value) bool { 771 // match: (And8 x y) 772 // cond: 773 // result: (AND x y) 774 for { 775 _ = v.Args[1] 776 x := v.Args[0] 777 y := v.Args[1] 778 v.reset(OpMIPS64AND) 779 v.AddArg(x) 780 v.AddArg(y) 781 return true 782 } 783 } 784 func rewriteValueMIPS64_OpAndB_0(v *Value) bool { 785 // match: (AndB x y) 786 // cond: 787 // result: (AND x y) 788 for { 789 _ = v.Args[1] 790 x := v.Args[0] 791 y := v.Args[1] 792 v.reset(OpMIPS64AND) 793 v.AddArg(x) 794 v.AddArg(y) 795 return true 796 } 797 } 798 func rewriteValueMIPS64_OpAtomicAdd32_0(v *Value) bool { 799 // match: (AtomicAdd32 ptr val mem) 800 // cond: 801 // result: (LoweredAtomicAdd32 ptr val mem) 802 for { 803 _ = v.Args[2] 804 ptr := v.Args[0] 805 val := v.Args[1] 806 mem := v.Args[2] 807 v.reset(OpMIPS64LoweredAtomicAdd32) 808 v.AddArg(ptr) 809 v.AddArg(val) 810 v.AddArg(mem) 811 return true 812 } 813 } 814 func rewriteValueMIPS64_OpAtomicAdd64_0(v *Value) bool { 815 // match: (AtomicAdd64 ptr val mem) 816 // cond: 817 // result: (LoweredAtomicAdd64 ptr val mem) 818 for { 819 _ = v.Args[2] 820 ptr := v.Args[0] 821 val := v.Args[1] 822 mem := v.Args[2] 823 v.reset(OpMIPS64LoweredAtomicAdd64) 824 v.AddArg(ptr) 825 v.AddArg(val) 826 v.AddArg(mem) 827 return true 828 } 829 } 830 func rewriteValueMIPS64_OpAtomicCompareAndSwap32_0(v *Value) bool { 831 // match: (AtomicCompareAndSwap32 ptr old new_ mem) 832 // cond: 833 // result: (LoweredAtomicCas32 ptr old new_ mem) 834 for { 835 _ = v.Args[3] 836 ptr := v.Args[0] 837 old := v.Args[1] 838 new_ := v.Args[2] 839 mem := v.Args[3] 840 v.reset(OpMIPS64LoweredAtomicCas32) 841 v.AddArg(ptr) 842 v.AddArg(old) 843 v.AddArg(new_) 844 v.AddArg(mem) 845 return true 846 } 847 } 848 func rewriteValueMIPS64_OpAtomicCompareAndSwap64_0(v *Value) bool { 849 // match: (AtomicCompareAndSwap64 ptr old new_ mem) 850 // cond: 851 // result: (LoweredAtomicCas64 ptr old new_ mem) 852 for { 853 _ = v.Args[3] 854 ptr := v.Args[0] 855 old := v.Args[1] 856 new_ := v.Args[2] 857 mem := v.Args[3] 858 v.reset(OpMIPS64LoweredAtomicCas64) 859 v.AddArg(ptr) 860 v.AddArg(old) 861 v.AddArg(new_) 862 v.AddArg(mem) 863 return true 864 } 865 } 866 func rewriteValueMIPS64_OpAtomicExchange32_0(v *Value) bool { 867 // match: (AtomicExchange32 ptr val mem) 868 // cond: 869 // result: (LoweredAtomicExchange32 ptr val mem) 870 for { 871 _ = v.Args[2] 872 ptr := v.Args[0] 873 val := v.Args[1] 874 mem := v.Args[2] 875 v.reset(OpMIPS64LoweredAtomicExchange32) 876 v.AddArg(ptr) 877 v.AddArg(val) 878 v.AddArg(mem) 879 return true 880 } 881 } 882 func rewriteValueMIPS64_OpAtomicExchange64_0(v *Value) bool { 883 // match: (AtomicExchange64 ptr val mem) 884 // cond: 885 // result: (LoweredAtomicExchange64 ptr val mem) 886 for { 887 _ = v.Args[2] 888 ptr := v.Args[0] 889 val := v.Args[1] 890 mem := v.Args[2] 891 v.reset(OpMIPS64LoweredAtomicExchange64) 892 v.AddArg(ptr) 893 v.AddArg(val) 894 v.AddArg(mem) 895 return true 896 } 897 } 898 func rewriteValueMIPS64_OpAtomicLoad32_0(v *Value) bool { 899 // match: (AtomicLoad32 ptr mem) 900 // cond: 901 // result: (LoweredAtomicLoad32 ptr mem) 902 for { 903 _ = v.Args[1] 904 ptr := v.Args[0] 905 mem := v.Args[1] 906 v.reset(OpMIPS64LoweredAtomicLoad32) 907 v.AddArg(ptr) 908 v.AddArg(mem) 909 return true 910 } 911 } 912 func rewriteValueMIPS64_OpAtomicLoad64_0(v *Value) bool { 913 // match: (AtomicLoad64 ptr mem) 914 // cond: 915 // result: (LoweredAtomicLoad64 ptr mem) 916 for { 917 _ = v.Args[1] 918 ptr := v.Args[0] 919 mem := v.Args[1] 920 v.reset(OpMIPS64LoweredAtomicLoad64) 921 v.AddArg(ptr) 922 v.AddArg(mem) 923 return true 924 } 925 } 926 func rewriteValueMIPS64_OpAtomicLoadPtr_0(v *Value) bool { 927 // match: (AtomicLoadPtr ptr mem) 928 // cond: 929 // result: (LoweredAtomicLoad64 ptr mem) 930 for { 931 _ = v.Args[1] 932 ptr := v.Args[0] 933 mem := v.Args[1] 934 v.reset(OpMIPS64LoweredAtomicLoad64) 935 v.AddArg(ptr) 936 v.AddArg(mem) 937 return true 938 } 939 } 940 func rewriteValueMIPS64_OpAtomicStore32_0(v *Value) bool { 941 // match: (AtomicStore32 ptr val mem) 942 // cond: 943 // result: (LoweredAtomicStore32 ptr val mem) 944 for { 945 _ = v.Args[2] 946 ptr := v.Args[0] 947 val := v.Args[1] 948 mem := v.Args[2] 949 v.reset(OpMIPS64LoweredAtomicStore32) 950 v.AddArg(ptr) 951 v.AddArg(val) 952 v.AddArg(mem) 953 return true 954 } 955 } 956 func rewriteValueMIPS64_OpAtomicStore64_0(v *Value) bool { 957 // match: (AtomicStore64 ptr val mem) 958 // cond: 959 // result: (LoweredAtomicStore64 ptr val mem) 960 for { 961 _ = v.Args[2] 962 ptr := v.Args[0] 963 val := v.Args[1] 964 mem := v.Args[2] 965 v.reset(OpMIPS64LoweredAtomicStore64) 966 v.AddArg(ptr) 967 v.AddArg(val) 968 v.AddArg(mem) 969 return true 970 } 971 } 972 func rewriteValueMIPS64_OpAtomicStorePtrNoWB_0(v *Value) bool { 973 // match: (AtomicStorePtrNoWB ptr val mem) 974 // cond: 975 // result: (LoweredAtomicStore64 ptr val mem) 976 for { 977 _ = v.Args[2] 978 ptr := v.Args[0] 979 val := v.Args[1] 980 mem := v.Args[2] 981 v.reset(OpMIPS64LoweredAtomicStore64) 982 v.AddArg(ptr) 983 v.AddArg(val) 984 v.AddArg(mem) 985 return true 986 } 987 } 988 func rewriteValueMIPS64_OpAvg64u_0(v *Value) bool { 989 b := v.Block 990 _ = b 991 // match: (Avg64u <t> x y) 992 // cond: 993 // result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y) 994 for { 995 t := v.Type 996 _ = v.Args[1] 997 x := v.Args[0] 998 y := v.Args[1] 999 v.reset(OpMIPS64ADDV) 1000 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t) 1001 v0.AuxInt = 1 1002 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t) 1003 v1.AddArg(x) 1004 v1.AddArg(y) 1005 v0.AddArg(v1) 1006 v.AddArg(v0) 1007 v.AddArg(y) 1008 return true 1009 } 1010 } 1011 func rewriteValueMIPS64_OpClosureCall_0(v *Value) bool { 1012 // match: (ClosureCall [argwid] entry closure mem) 1013 // cond: 1014 // result: (CALLclosure [argwid] entry closure mem) 1015 for { 1016 argwid := v.AuxInt 1017 _ = v.Args[2] 1018 entry := v.Args[0] 1019 closure := v.Args[1] 1020 mem := v.Args[2] 1021 v.reset(OpMIPS64CALLclosure) 1022 v.AuxInt = argwid 1023 v.AddArg(entry) 1024 v.AddArg(closure) 1025 v.AddArg(mem) 1026 return true 1027 } 1028 } 1029 func rewriteValueMIPS64_OpCom16_0(v *Value) bool { 1030 b := v.Block 1031 _ = b 1032 typ := &b.Func.Config.Types 1033 _ = typ 1034 // match: (Com16 x) 1035 // cond: 1036 // result: (NOR (MOVVconst [0]) x) 1037 for { 1038 x := v.Args[0] 1039 v.reset(OpMIPS64NOR) 1040 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1041 v0.AuxInt = 0 1042 v.AddArg(v0) 1043 v.AddArg(x) 1044 return true 1045 } 1046 } 1047 func rewriteValueMIPS64_OpCom32_0(v *Value) bool { 1048 b := v.Block 1049 _ = b 1050 typ := &b.Func.Config.Types 1051 _ = typ 1052 // match: (Com32 x) 1053 // cond: 1054 // result: (NOR (MOVVconst [0]) x) 1055 for { 1056 x := v.Args[0] 1057 v.reset(OpMIPS64NOR) 1058 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1059 v0.AuxInt = 0 1060 v.AddArg(v0) 1061 v.AddArg(x) 1062 return true 1063 } 1064 } 1065 func rewriteValueMIPS64_OpCom64_0(v *Value) bool { 1066 b := v.Block 1067 _ = b 1068 typ := &b.Func.Config.Types 1069 _ = typ 1070 // match: (Com64 x) 1071 // cond: 1072 // result: (NOR (MOVVconst [0]) x) 1073 for { 1074 x := v.Args[0] 1075 v.reset(OpMIPS64NOR) 1076 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1077 v0.AuxInt = 0 1078 v.AddArg(v0) 1079 v.AddArg(x) 1080 return true 1081 } 1082 } 1083 func rewriteValueMIPS64_OpCom8_0(v *Value) bool { 1084 b := v.Block 1085 _ = b 1086 typ := &b.Func.Config.Types 1087 _ = typ 1088 // match: (Com8 x) 1089 // cond: 1090 // result: (NOR (MOVVconst [0]) x) 1091 for { 1092 x := v.Args[0] 1093 v.reset(OpMIPS64NOR) 1094 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1095 v0.AuxInt = 0 1096 v.AddArg(v0) 1097 v.AddArg(x) 1098 return true 1099 } 1100 } 1101 func rewriteValueMIPS64_OpConst16_0(v *Value) bool { 1102 // match: (Const16 [val]) 1103 // cond: 1104 // result: (MOVVconst [val]) 1105 for { 1106 val := v.AuxInt 1107 v.reset(OpMIPS64MOVVconst) 1108 v.AuxInt = val 1109 return true 1110 } 1111 } 1112 func rewriteValueMIPS64_OpConst32_0(v *Value) bool { 1113 // match: (Const32 [val]) 1114 // cond: 1115 // result: (MOVVconst [val]) 1116 for { 1117 val := v.AuxInt 1118 v.reset(OpMIPS64MOVVconst) 1119 v.AuxInt = val 1120 return true 1121 } 1122 } 1123 func rewriteValueMIPS64_OpConst32F_0(v *Value) bool { 1124 // match: (Const32F [val]) 1125 // cond: 1126 // result: (MOVFconst [val]) 1127 for { 1128 val := v.AuxInt 1129 v.reset(OpMIPS64MOVFconst) 1130 v.AuxInt = val 1131 return true 1132 } 1133 } 1134 func rewriteValueMIPS64_OpConst64_0(v *Value) bool { 1135 // match: (Const64 [val]) 1136 // cond: 1137 // result: (MOVVconst [val]) 1138 for { 1139 val := v.AuxInt 1140 v.reset(OpMIPS64MOVVconst) 1141 v.AuxInt = val 1142 return true 1143 } 1144 } 1145 func rewriteValueMIPS64_OpConst64F_0(v *Value) bool { 1146 // match: (Const64F [val]) 1147 // cond: 1148 // result: (MOVDconst [val]) 1149 for { 1150 val := v.AuxInt 1151 v.reset(OpMIPS64MOVDconst) 1152 v.AuxInt = val 1153 return true 1154 } 1155 } 1156 func rewriteValueMIPS64_OpConst8_0(v *Value) bool { 1157 // match: (Const8 [val]) 1158 // cond: 1159 // result: (MOVVconst [val]) 1160 for { 1161 val := v.AuxInt 1162 v.reset(OpMIPS64MOVVconst) 1163 v.AuxInt = val 1164 return true 1165 } 1166 } 1167 func rewriteValueMIPS64_OpConstBool_0(v *Value) bool { 1168 // match: (ConstBool [b]) 1169 // cond: 1170 // result: (MOVVconst [b]) 1171 for { 1172 b := v.AuxInt 1173 v.reset(OpMIPS64MOVVconst) 1174 v.AuxInt = b 1175 return true 1176 } 1177 } 1178 func rewriteValueMIPS64_OpConstNil_0(v *Value) bool { 1179 // match: (ConstNil) 1180 // cond: 1181 // result: (MOVVconst [0]) 1182 for { 1183 v.reset(OpMIPS64MOVVconst) 1184 v.AuxInt = 0 1185 return true 1186 } 1187 } 1188 func rewriteValueMIPS64_OpCvt32Fto32_0(v *Value) bool { 1189 // match: (Cvt32Fto32 x) 1190 // cond: 1191 // result: (TRUNCFW x) 1192 for { 1193 x := v.Args[0] 1194 v.reset(OpMIPS64TRUNCFW) 1195 v.AddArg(x) 1196 return true 1197 } 1198 } 1199 func rewriteValueMIPS64_OpCvt32Fto64_0(v *Value) bool { 1200 // match: (Cvt32Fto64 x) 1201 // cond: 1202 // result: (TRUNCFV x) 1203 for { 1204 x := v.Args[0] 1205 v.reset(OpMIPS64TRUNCFV) 1206 v.AddArg(x) 1207 return true 1208 } 1209 } 1210 func rewriteValueMIPS64_OpCvt32Fto64F_0(v *Value) bool { 1211 // match: (Cvt32Fto64F x) 1212 // cond: 1213 // result: (MOVFD x) 1214 for { 1215 x := v.Args[0] 1216 v.reset(OpMIPS64MOVFD) 1217 v.AddArg(x) 1218 return true 1219 } 1220 } 1221 func rewriteValueMIPS64_OpCvt32to32F_0(v *Value) bool { 1222 // match: (Cvt32to32F x) 1223 // cond: 1224 // result: (MOVWF x) 1225 for { 1226 x := v.Args[0] 1227 v.reset(OpMIPS64MOVWF) 1228 v.AddArg(x) 1229 return true 1230 } 1231 } 1232 func rewriteValueMIPS64_OpCvt32to64F_0(v *Value) bool { 1233 // match: (Cvt32to64F x) 1234 // cond: 1235 // result: (MOVWD x) 1236 for { 1237 x := v.Args[0] 1238 v.reset(OpMIPS64MOVWD) 1239 v.AddArg(x) 1240 return true 1241 } 1242 } 1243 func rewriteValueMIPS64_OpCvt64Fto32_0(v *Value) bool { 1244 // match: (Cvt64Fto32 x) 1245 // cond: 1246 // result: (TRUNCDW x) 1247 for { 1248 x := v.Args[0] 1249 v.reset(OpMIPS64TRUNCDW) 1250 v.AddArg(x) 1251 return true 1252 } 1253 } 1254 func rewriteValueMIPS64_OpCvt64Fto32F_0(v *Value) bool { 1255 // match: (Cvt64Fto32F x) 1256 // cond: 1257 // result: (MOVDF x) 1258 for { 1259 x := v.Args[0] 1260 v.reset(OpMIPS64MOVDF) 1261 v.AddArg(x) 1262 return true 1263 } 1264 } 1265 func rewriteValueMIPS64_OpCvt64Fto64_0(v *Value) bool { 1266 // match: (Cvt64Fto64 x) 1267 // cond: 1268 // result: (TRUNCDV x) 1269 for { 1270 x := v.Args[0] 1271 v.reset(OpMIPS64TRUNCDV) 1272 v.AddArg(x) 1273 return true 1274 } 1275 } 1276 func rewriteValueMIPS64_OpCvt64to32F_0(v *Value) bool { 1277 // match: (Cvt64to32F x) 1278 // cond: 1279 // result: (MOVVF x) 1280 for { 1281 x := v.Args[0] 1282 v.reset(OpMIPS64MOVVF) 1283 v.AddArg(x) 1284 return true 1285 } 1286 } 1287 func rewriteValueMIPS64_OpCvt64to64F_0(v *Value) bool { 1288 // match: (Cvt64to64F x) 1289 // cond: 1290 // result: (MOVVD x) 1291 for { 1292 x := v.Args[0] 1293 v.reset(OpMIPS64MOVVD) 1294 v.AddArg(x) 1295 return true 1296 } 1297 } 1298 func rewriteValueMIPS64_OpDiv16_0(v *Value) bool { 1299 b := v.Block 1300 _ = b 1301 typ := &b.Func.Config.Types 1302 _ = typ 1303 // match: (Div16 x y) 1304 // cond: 1305 // result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 1306 for { 1307 _ = v.Args[1] 1308 x := v.Args[0] 1309 y := v.Args[1] 1310 v.reset(OpSelect1) 1311 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1312 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1313 v1.AddArg(x) 1314 v0.AddArg(v1) 1315 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1316 v2.AddArg(y) 1317 v0.AddArg(v2) 1318 v.AddArg(v0) 1319 return true 1320 } 1321 } 1322 func rewriteValueMIPS64_OpDiv16u_0(v *Value) bool { 1323 b := v.Block 1324 _ = b 1325 typ := &b.Func.Config.Types 1326 _ = typ 1327 // match: (Div16u x y) 1328 // cond: 1329 // result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 1330 for { 1331 _ = v.Args[1] 1332 x := v.Args[0] 1333 y := v.Args[1] 1334 v.reset(OpSelect1) 1335 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1336 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1337 v1.AddArg(x) 1338 v0.AddArg(v1) 1339 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1340 v2.AddArg(y) 1341 v0.AddArg(v2) 1342 v.AddArg(v0) 1343 return true 1344 } 1345 } 1346 func rewriteValueMIPS64_OpDiv32_0(v *Value) bool { 1347 b := v.Block 1348 _ = b 1349 typ := &b.Func.Config.Types 1350 _ = typ 1351 // match: (Div32 x y) 1352 // cond: 1353 // result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 1354 for { 1355 _ = v.Args[1] 1356 x := v.Args[0] 1357 y := v.Args[1] 1358 v.reset(OpSelect1) 1359 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1360 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1361 v1.AddArg(x) 1362 v0.AddArg(v1) 1363 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1364 v2.AddArg(y) 1365 v0.AddArg(v2) 1366 v.AddArg(v0) 1367 return true 1368 } 1369 } 1370 func rewriteValueMIPS64_OpDiv32F_0(v *Value) bool { 1371 // match: (Div32F x y) 1372 // cond: 1373 // result: (DIVF x y) 1374 for { 1375 _ = v.Args[1] 1376 x := v.Args[0] 1377 y := v.Args[1] 1378 v.reset(OpMIPS64DIVF) 1379 v.AddArg(x) 1380 v.AddArg(y) 1381 return true 1382 } 1383 } 1384 func rewriteValueMIPS64_OpDiv32u_0(v *Value) bool { 1385 b := v.Block 1386 _ = b 1387 typ := &b.Func.Config.Types 1388 _ = typ 1389 // match: (Div32u x y) 1390 // cond: 1391 // result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 1392 for { 1393 _ = v.Args[1] 1394 x := v.Args[0] 1395 y := v.Args[1] 1396 v.reset(OpSelect1) 1397 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1398 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1399 v1.AddArg(x) 1400 v0.AddArg(v1) 1401 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1402 v2.AddArg(y) 1403 v0.AddArg(v2) 1404 v.AddArg(v0) 1405 return true 1406 } 1407 } 1408 func rewriteValueMIPS64_OpDiv64_0(v *Value) bool { 1409 b := v.Block 1410 _ = b 1411 typ := &b.Func.Config.Types 1412 _ = typ 1413 // match: (Div64 x y) 1414 // cond: 1415 // result: (Select1 (DIVV x y)) 1416 for { 1417 _ = v.Args[1] 1418 x := v.Args[0] 1419 y := v.Args[1] 1420 v.reset(OpSelect1) 1421 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1422 v0.AddArg(x) 1423 v0.AddArg(y) 1424 v.AddArg(v0) 1425 return true 1426 } 1427 } 1428 func rewriteValueMIPS64_OpDiv64F_0(v *Value) bool { 1429 // match: (Div64F x y) 1430 // cond: 1431 // result: (DIVD x y) 1432 for { 1433 _ = v.Args[1] 1434 x := v.Args[0] 1435 y := v.Args[1] 1436 v.reset(OpMIPS64DIVD) 1437 v.AddArg(x) 1438 v.AddArg(y) 1439 return true 1440 } 1441 } 1442 func rewriteValueMIPS64_OpDiv64u_0(v *Value) bool { 1443 b := v.Block 1444 _ = b 1445 typ := &b.Func.Config.Types 1446 _ = typ 1447 // match: (Div64u x y) 1448 // cond: 1449 // result: (Select1 (DIVVU x y)) 1450 for { 1451 _ = v.Args[1] 1452 x := v.Args[0] 1453 y := v.Args[1] 1454 v.reset(OpSelect1) 1455 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1456 v0.AddArg(x) 1457 v0.AddArg(y) 1458 v.AddArg(v0) 1459 return true 1460 } 1461 } 1462 func rewriteValueMIPS64_OpDiv8_0(v *Value) bool { 1463 b := v.Block 1464 _ = b 1465 typ := &b.Func.Config.Types 1466 _ = typ 1467 // match: (Div8 x y) 1468 // cond: 1469 // result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 1470 for { 1471 _ = v.Args[1] 1472 x := v.Args[0] 1473 y := v.Args[1] 1474 v.reset(OpSelect1) 1475 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1476 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1477 v1.AddArg(x) 1478 v0.AddArg(v1) 1479 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1480 v2.AddArg(y) 1481 v0.AddArg(v2) 1482 v.AddArg(v0) 1483 return true 1484 } 1485 } 1486 func rewriteValueMIPS64_OpDiv8u_0(v *Value) bool { 1487 b := v.Block 1488 _ = b 1489 typ := &b.Func.Config.Types 1490 _ = typ 1491 // match: (Div8u x y) 1492 // cond: 1493 // result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 1494 for { 1495 _ = v.Args[1] 1496 x := v.Args[0] 1497 y := v.Args[1] 1498 v.reset(OpSelect1) 1499 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1500 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1501 v1.AddArg(x) 1502 v0.AddArg(v1) 1503 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1504 v2.AddArg(y) 1505 v0.AddArg(v2) 1506 v.AddArg(v0) 1507 return true 1508 } 1509 } 1510 func rewriteValueMIPS64_OpEq16_0(v *Value) bool { 1511 b := v.Block 1512 _ = b 1513 typ := &b.Func.Config.Types 1514 _ = typ 1515 // match: (Eq16 x y) 1516 // cond: 1517 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y))) 1518 for { 1519 _ = v.Args[1] 1520 x := v.Args[0] 1521 y := v.Args[1] 1522 v.reset(OpMIPS64SGTU) 1523 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1524 v0.AuxInt = 1 1525 v.AddArg(v0) 1526 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1527 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1528 v2.AddArg(x) 1529 v1.AddArg(v2) 1530 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1531 v3.AddArg(y) 1532 v1.AddArg(v3) 1533 v.AddArg(v1) 1534 return true 1535 } 1536 } 1537 func rewriteValueMIPS64_OpEq32_0(v *Value) bool { 1538 b := v.Block 1539 _ = b 1540 typ := &b.Func.Config.Types 1541 _ = typ 1542 // match: (Eq32 x y) 1543 // cond: 1544 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y))) 1545 for { 1546 _ = v.Args[1] 1547 x := v.Args[0] 1548 y := v.Args[1] 1549 v.reset(OpMIPS64SGTU) 1550 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1551 v0.AuxInt = 1 1552 v.AddArg(v0) 1553 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1554 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1555 v2.AddArg(x) 1556 v1.AddArg(v2) 1557 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1558 v3.AddArg(y) 1559 v1.AddArg(v3) 1560 v.AddArg(v1) 1561 return true 1562 } 1563 } 1564 func rewriteValueMIPS64_OpEq32F_0(v *Value) bool { 1565 b := v.Block 1566 _ = b 1567 // match: (Eq32F x y) 1568 // cond: 1569 // result: (FPFlagTrue (CMPEQF x y)) 1570 for { 1571 _ = v.Args[1] 1572 x := v.Args[0] 1573 y := v.Args[1] 1574 v.reset(OpMIPS64FPFlagTrue) 1575 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags) 1576 v0.AddArg(x) 1577 v0.AddArg(y) 1578 v.AddArg(v0) 1579 return true 1580 } 1581 } 1582 func rewriteValueMIPS64_OpEq64_0(v *Value) bool { 1583 b := v.Block 1584 _ = b 1585 typ := &b.Func.Config.Types 1586 _ = typ 1587 // match: (Eq64 x y) 1588 // cond: 1589 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1590 for { 1591 _ = v.Args[1] 1592 x := v.Args[0] 1593 y := v.Args[1] 1594 v.reset(OpMIPS64SGTU) 1595 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1596 v0.AuxInt = 1 1597 v.AddArg(v0) 1598 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1599 v1.AddArg(x) 1600 v1.AddArg(y) 1601 v.AddArg(v1) 1602 return true 1603 } 1604 } 1605 func rewriteValueMIPS64_OpEq64F_0(v *Value) bool { 1606 b := v.Block 1607 _ = b 1608 // match: (Eq64F x y) 1609 // cond: 1610 // result: (FPFlagTrue (CMPEQD x y)) 1611 for { 1612 _ = v.Args[1] 1613 x := v.Args[0] 1614 y := v.Args[1] 1615 v.reset(OpMIPS64FPFlagTrue) 1616 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags) 1617 v0.AddArg(x) 1618 v0.AddArg(y) 1619 v.AddArg(v0) 1620 return true 1621 } 1622 } 1623 func rewriteValueMIPS64_OpEq8_0(v *Value) bool { 1624 b := v.Block 1625 _ = b 1626 typ := &b.Func.Config.Types 1627 _ = typ 1628 // match: (Eq8 x y) 1629 // cond: 1630 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y))) 1631 for { 1632 _ = v.Args[1] 1633 x := v.Args[0] 1634 y := v.Args[1] 1635 v.reset(OpMIPS64SGTU) 1636 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1637 v0.AuxInt = 1 1638 v.AddArg(v0) 1639 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1640 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1641 v2.AddArg(x) 1642 v1.AddArg(v2) 1643 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1644 v3.AddArg(y) 1645 v1.AddArg(v3) 1646 v.AddArg(v1) 1647 return true 1648 } 1649 } 1650 func rewriteValueMIPS64_OpEqB_0(v *Value) bool { 1651 b := v.Block 1652 _ = b 1653 typ := &b.Func.Config.Types 1654 _ = typ 1655 // match: (EqB x y) 1656 // cond: 1657 // result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y)) 1658 for { 1659 _ = v.Args[1] 1660 x := v.Args[0] 1661 y := v.Args[1] 1662 v.reset(OpMIPS64XOR) 1663 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1664 v0.AuxInt = 1 1665 v.AddArg(v0) 1666 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool) 1667 v1.AddArg(x) 1668 v1.AddArg(y) 1669 v.AddArg(v1) 1670 return true 1671 } 1672 } 1673 func rewriteValueMIPS64_OpEqPtr_0(v *Value) bool { 1674 b := v.Block 1675 _ = b 1676 typ := &b.Func.Config.Types 1677 _ = typ 1678 // match: (EqPtr x y) 1679 // cond: 1680 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1681 for { 1682 _ = v.Args[1] 1683 x := v.Args[0] 1684 y := v.Args[1] 1685 v.reset(OpMIPS64SGTU) 1686 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1687 v0.AuxInt = 1 1688 v.AddArg(v0) 1689 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 1690 v1.AddArg(x) 1691 v1.AddArg(y) 1692 v.AddArg(v1) 1693 return true 1694 } 1695 } 1696 func rewriteValueMIPS64_OpGeq16_0(v *Value) bool { 1697 b := v.Block 1698 _ = b 1699 typ := &b.Func.Config.Types 1700 _ = typ 1701 // match: (Geq16 x y) 1702 // cond: 1703 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 y) (SignExt16to64 x))) 1704 for { 1705 _ = v.Args[1] 1706 x := v.Args[0] 1707 y := v.Args[1] 1708 v.reset(OpMIPS64XOR) 1709 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1710 v0.AuxInt = 1 1711 v.AddArg(v0) 1712 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1713 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1714 v2.AddArg(y) 1715 v1.AddArg(v2) 1716 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1717 v3.AddArg(x) 1718 v1.AddArg(v3) 1719 v.AddArg(v1) 1720 return true 1721 } 1722 } 1723 func rewriteValueMIPS64_OpGeq16U_0(v *Value) bool { 1724 b := v.Block 1725 _ = b 1726 typ := &b.Func.Config.Types 1727 _ = typ 1728 // match: (Geq16U x y) 1729 // cond: 1730 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x))) 1731 for { 1732 _ = v.Args[1] 1733 x := v.Args[0] 1734 y := v.Args[1] 1735 v.reset(OpMIPS64XOR) 1736 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1737 v0.AuxInt = 1 1738 v.AddArg(v0) 1739 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1740 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1741 v2.AddArg(y) 1742 v1.AddArg(v2) 1743 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1744 v3.AddArg(x) 1745 v1.AddArg(v3) 1746 v.AddArg(v1) 1747 return true 1748 } 1749 } 1750 func rewriteValueMIPS64_OpGeq32_0(v *Value) bool { 1751 b := v.Block 1752 _ = b 1753 typ := &b.Func.Config.Types 1754 _ = typ 1755 // match: (Geq32 x y) 1756 // cond: 1757 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 y) (SignExt32to64 x))) 1758 for { 1759 _ = v.Args[1] 1760 x := v.Args[0] 1761 y := v.Args[1] 1762 v.reset(OpMIPS64XOR) 1763 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1764 v0.AuxInt = 1 1765 v.AddArg(v0) 1766 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1767 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1768 v2.AddArg(y) 1769 v1.AddArg(v2) 1770 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1771 v3.AddArg(x) 1772 v1.AddArg(v3) 1773 v.AddArg(v1) 1774 return true 1775 } 1776 } 1777 func rewriteValueMIPS64_OpGeq32F_0(v *Value) bool { 1778 b := v.Block 1779 _ = b 1780 // match: (Geq32F x y) 1781 // cond: 1782 // result: (FPFlagTrue (CMPGEF x y)) 1783 for { 1784 _ = v.Args[1] 1785 x := v.Args[0] 1786 y := v.Args[1] 1787 v.reset(OpMIPS64FPFlagTrue) 1788 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags) 1789 v0.AddArg(x) 1790 v0.AddArg(y) 1791 v.AddArg(v0) 1792 return true 1793 } 1794 } 1795 func rewriteValueMIPS64_OpGeq32U_0(v *Value) bool { 1796 b := v.Block 1797 _ = b 1798 typ := &b.Func.Config.Types 1799 _ = typ 1800 // match: (Geq32U x y) 1801 // cond: 1802 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x))) 1803 for { 1804 _ = v.Args[1] 1805 x := v.Args[0] 1806 y := v.Args[1] 1807 v.reset(OpMIPS64XOR) 1808 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1809 v0.AuxInt = 1 1810 v.AddArg(v0) 1811 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1812 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1813 v2.AddArg(y) 1814 v1.AddArg(v2) 1815 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1816 v3.AddArg(x) 1817 v1.AddArg(v3) 1818 v.AddArg(v1) 1819 return true 1820 } 1821 } 1822 func rewriteValueMIPS64_OpGeq64_0(v *Value) bool { 1823 b := v.Block 1824 _ = b 1825 typ := &b.Func.Config.Types 1826 _ = typ 1827 // match: (Geq64 x y) 1828 // cond: 1829 // result: (XOR (MOVVconst [1]) (SGT y x)) 1830 for { 1831 _ = v.Args[1] 1832 x := v.Args[0] 1833 y := v.Args[1] 1834 v.reset(OpMIPS64XOR) 1835 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1836 v0.AuxInt = 1 1837 v.AddArg(v0) 1838 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1839 v1.AddArg(y) 1840 v1.AddArg(x) 1841 v.AddArg(v1) 1842 return true 1843 } 1844 } 1845 func rewriteValueMIPS64_OpGeq64F_0(v *Value) bool { 1846 b := v.Block 1847 _ = b 1848 // match: (Geq64F x y) 1849 // cond: 1850 // result: (FPFlagTrue (CMPGED x y)) 1851 for { 1852 _ = v.Args[1] 1853 x := v.Args[0] 1854 y := v.Args[1] 1855 v.reset(OpMIPS64FPFlagTrue) 1856 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags) 1857 v0.AddArg(x) 1858 v0.AddArg(y) 1859 v.AddArg(v0) 1860 return true 1861 } 1862 } 1863 func rewriteValueMIPS64_OpGeq64U_0(v *Value) bool { 1864 b := v.Block 1865 _ = b 1866 typ := &b.Func.Config.Types 1867 _ = typ 1868 // match: (Geq64U x y) 1869 // cond: 1870 // result: (XOR (MOVVconst [1]) (SGTU y x)) 1871 for { 1872 _ = v.Args[1] 1873 x := v.Args[0] 1874 y := v.Args[1] 1875 v.reset(OpMIPS64XOR) 1876 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1877 v0.AuxInt = 1 1878 v.AddArg(v0) 1879 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1880 v1.AddArg(y) 1881 v1.AddArg(x) 1882 v.AddArg(v1) 1883 return true 1884 } 1885 } 1886 func rewriteValueMIPS64_OpGeq8_0(v *Value) bool { 1887 b := v.Block 1888 _ = b 1889 typ := &b.Func.Config.Types 1890 _ = typ 1891 // match: (Geq8 x y) 1892 // cond: 1893 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 y) (SignExt8to64 x))) 1894 for { 1895 _ = v.Args[1] 1896 x := v.Args[0] 1897 y := v.Args[1] 1898 v.reset(OpMIPS64XOR) 1899 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1900 v0.AuxInt = 1 1901 v.AddArg(v0) 1902 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 1903 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1904 v2.AddArg(y) 1905 v1.AddArg(v2) 1906 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1907 v3.AddArg(x) 1908 v1.AddArg(v3) 1909 v.AddArg(v1) 1910 return true 1911 } 1912 } 1913 func rewriteValueMIPS64_OpGeq8U_0(v *Value) bool { 1914 b := v.Block 1915 _ = b 1916 typ := &b.Func.Config.Types 1917 _ = typ 1918 // match: (Geq8U x y) 1919 // cond: 1920 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x))) 1921 for { 1922 _ = v.Args[1] 1923 x := v.Args[0] 1924 y := v.Args[1] 1925 v.reset(OpMIPS64XOR) 1926 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 1927 v0.AuxInt = 1 1928 v.AddArg(v0) 1929 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 1930 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1931 v2.AddArg(y) 1932 v1.AddArg(v2) 1933 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1934 v3.AddArg(x) 1935 v1.AddArg(v3) 1936 v.AddArg(v1) 1937 return true 1938 } 1939 } 1940 func rewriteValueMIPS64_OpGetCallerPC_0(v *Value) bool { 1941 // match: (GetCallerPC) 1942 // cond: 1943 // result: (LoweredGetCallerPC) 1944 for { 1945 v.reset(OpMIPS64LoweredGetCallerPC) 1946 return true 1947 } 1948 } 1949 func rewriteValueMIPS64_OpGetCallerSP_0(v *Value) bool { 1950 // match: (GetCallerSP) 1951 // cond: 1952 // result: (LoweredGetCallerSP) 1953 for { 1954 v.reset(OpMIPS64LoweredGetCallerSP) 1955 return true 1956 } 1957 } 1958 func rewriteValueMIPS64_OpGetClosurePtr_0(v *Value) bool { 1959 // match: (GetClosurePtr) 1960 // cond: 1961 // result: (LoweredGetClosurePtr) 1962 for { 1963 v.reset(OpMIPS64LoweredGetClosurePtr) 1964 return true 1965 } 1966 } 1967 func rewriteValueMIPS64_OpGreater16_0(v *Value) bool { 1968 b := v.Block 1969 _ = b 1970 typ := &b.Func.Config.Types 1971 _ = typ 1972 // match: (Greater16 x y) 1973 // cond: 1974 // result: (SGT (SignExt16to64 x) (SignExt16to64 y)) 1975 for { 1976 _ = v.Args[1] 1977 x := v.Args[0] 1978 y := v.Args[1] 1979 v.reset(OpMIPS64SGT) 1980 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1981 v0.AddArg(x) 1982 v.AddArg(v0) 1983 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 1984 v1.AddArg(y) 1985 v.AddArg(v1) 1986 return true 1987 } 1988 } 1989 func rewriteValueMIPS64_OpGreater16U_0(v *Value) bool { 1990 b := v.Block 1991 _ = b 1992 typ := &b.Func.Config.Types 1993 _ = typ 1994 // match: (Greater16U x y) 1995 // cond: 1996 // result: (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y)) 1997 for { 1998 _ = v.Args[1] 1999 x := v.Args[0] 2000 y := v.Args[1] 2001 v.reset(OpMIPS64SGTU) 2002 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2003 v0.AddArg(x) 2004 v.AddArg(v0) 2005 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2006 v1.AddArg(y) 2007 v.AddArg(v1) 2008 return true 2009 } 2010 } 2011 func rewriteValueMIPS64_OpGreater32_0(v *Value) bool { 2012 b := v.Block 2013 _ = b 2014 typ := &b.Func.Config.Types 2015 _ = typ 2016 // match: (Greater32 x y) 2017 // cond: 2018 // result: (SGT (SignExt32to64 x) (SignExt32to64 y)) 2019 for { 2020 _ = v.Args[1] 2021 x := v.Args[0] 2022 y := v.Args[1] 2023 v.reset(OpMIPS64SGT) 2024 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2025 v0.AddArg(x) 2026 v.AddArg(v0) 2027 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2028 v1.AddArg(y) 2029 v.AddArg(v1) 2030 return true 2031 } 2032 } 2033 func rewriteValueMIPS64_OpGreater32F_0(v *Value) bool { 2034 b := v.Block 2035 _ = b 2036 // match: (Greater32F x y) 2037 // cond: 2038 // result: (FPFlagTrue (CMPGTF x y)) 2039 for { 2040 _ = v.Args[1] 2041 x := v.Args[0] 2042 y := v.Args[1] 2043 v.reset(OpMIPS64FPFlagTrue) 2044 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags) 2045 v0.AddArg(x) 2046 v0.AddArg(y) 2047 v.AddArg(v0) 2048 return true 2049 } 2050 } 2051 func rewriteValueMIPS64_OpGreater32U_0(v *Value) bool { 2052 b := v.Block 2053 _ = b 2054 typ := &b.Func.Config.Types 2055 _ = typ 2056 // match: (Greater32U x y) 2057 // cond: 2058 // result: (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y)) 2059 for { 2060 _ = v.Args[1] 2061 x := v.Args[0] 2062 y := v.Args[1] 2063 v.reset(OpMIPS64SGTU) 2064 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2065 v0.AddArg(x) 2066 v.AddArg(v0) 2067 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2068 v1.AddArg(y) 2069 v.AddArg(v1) 2070 return true 2071 } 2072 } 2073 func rewriteValueMIPS64_OpGreater64_0(v *Value) bool { 2074 // match: (Greater64 x y) 2075 // cond: 2076 // result: (SGT x y) 2077 for { 2078 _ = v.Args[1] 2079 x := v.Args[0] 2080 y := v.Args[1] 2081 v.reset(OpMIPS64SGT) 2082 v.AddArg(x) 2083 v.AddArg(y) 2084 return true 2085 } 2086 } 2087 func rewriteValueMIPS64_OpGreater64F_0(v *Value) bool { 2088 b := v.Block 2089 _ = b 2090 // match: (Greater64F x y) 2091 // cond: 2092 // result: (FPFlagTrue (CMPGTD x y)) 2093 for { 2094 _ = v.Args[1] 2095 x := v.Args[0] 2096 y := v.Args[1] 2097 v.reset(OpMIPS64FPFlagTrue) 2098 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags) 2099 v0.AddArg(x) 2100 v0.AddArg(y) 2101 v.AddArg(v0) 2102 return true 2103 } 2104 } 2105 func rewriteValueMIPS64_OpGreater64U_0(v *Value) bool { 2106 // match: (Greater64U x y) 2107 // cond: 2108 // result: (SGTU x y) 2109 for { 2110 _ = v.Args[1] 2111 x := v.Args[0] 2112 y := v.Args[1] 2113 v.reset(OpMIPS64SGTU) 2114 v.AddArg(x) 2115 v.AddArg(y) 2116 return true 2117 } 2118 } 2119 func rewriteValueMIPS64_OpGreater8_0(v *Value) bool { 2120 b := v.Block 2121 _ = b 2122 typ := &b.Func.Config.Types 2123 _ = typ 2124 // match: (Greater8 x y) 2125 // cond: 2126 // result: (SGT (SignExt8to64 x) (SignExt8to64 y)) 2127 for { 2128 _ = v.Args[1] 2129 x := v.Args[0] 2130 y := v.Args[1] 2131 v.reset(OpMIPS64SGT) 2132 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2133 v0.AddArg(x) 2134 v.AddArg(v0) 2135 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2136 v1.AddArg(y) 2137 v.AddArg(v1) 2138 return true 2139 } 2140 } 2141 func rewriteValueMIPS64_OpGreater8U_0(v *Value) bool { 2142 b := v.Block 2143 _ = b 2144 typ := &b.Func.Config.Types 2145 _ = typ 2146 // match: (Greater8U x y) 2147 // cond: 2148 // result: (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y)) 2149 for { 2150 _ = v.Args[1] 2151 x := v.Args[0] 2152 y := v.Args[1] 2153 v.reset(OpMIPS64SGTU) 2154 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2155 v0.AddArg(x) 2156 v.AddArg(v0) 2157 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2158 v1.AddArg(y) 2159 v.AddArg(v1) 2160 return true 2161 } 2162 } 2163 func rewriteValueMIPS64_OpHmul32_0(v *Value) bool { 2164 b := v.Block 2165 _ = b 2166 typ := &b.Func.Config.Types 2167 _ = typ 2168 // match: (Hmul32 x y) 2169 // cond: 2170 // result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32]) 2171 for { 2172 _ = v.Args[1] 2173 x := v.Args[0] 2174 y := v.Args[1] 2175 v.reset(OpMIPS64SRAVconst) 2176 v.AuxInt = 32 2177 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64) 2178 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64)) 2179 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2180 v2.AddArg(x) 2181 v1.AddArg(v2) 2182 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2183 v3.AddArg(y) 2184 v1.AddArg(v3) 2185 v0.AddArg(v1) 2186 v.AddArg(v0) 2187 return true 2188 } 2189 } 2190 func rewriteValueMIPS64_OpHmul32u_0(v *Value) bool { 2191 b := v.Block 2192 _ = b 2193 typ := &b.Func.Config.Types 2194 _ = typ 2195 // match: (Hmul32u x y) 2196 // cond: 2197 // result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32]) 2198 for { 2199 _ = v.Args[1] 2200 x := v.Args[0] 2201 y := v.Args[1] 2202 v.reset(OpMIPS64SRLVconst) 2203 v.AuxInt = 32 2204 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 2205 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 2206 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2207 v2.AddArg(x) 2208 v1.AddArg(v2) 2209 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2210 v3.AddArg(y) 2211 v1.AddArg(v3) 2212 v0.AddArg(v1) 2213 v.AddArg(v0) 2214 return true 2215 } 2216 } 2217 func rewriteValueMIPS64_OpHmul64_0(v *Value) bool { 2218 b := v.Block 2219 _ = b 2220 typ := &b.Func.Config.Types 2221 _ = typ 2222 // match: (Hmul64 x y) 2223 // cond: 2224 // result: (Select0 (MULV x y)) 2225 for { 2226 _ = v.Args[1] 2227 x := v.Args[0] 2228 y := v.Args[1] 2229 v.reset(OpSelect0) 2230 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64)) 2231 v0.AddArg(x) 2232 v0.AddArg(y) 2233 v.AddArg(v0) 2234 return true 2235 } 2236 } 2237 func rewriteValueMIPS64_OpHmul64u_0(v *Value) bool { 2238 b := v.Block 2239 _ = b 2240 typ := &b.Func.Config.Types 2241 _ = typ 2242 // match: (Hmul64u x y) 2243 // cond: 2244 // result: (Select0 (MULVU x y)) 2245 for { 2246 _ = v.Args[1] 2247 x := v.Args[0] 2248 y := v.Args[1] 2249 v.reset(OpSelect0) 2250 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 2251 v0.AddArg(x) 2252 v0.AddArg(y) 2253 v.AddArg(v0) 2254 return true 2255 } 2256 } 2257 func rewriteValueMIPS64_OpInterCall_0(v *Value) bool { 2258 // match: (InterCall [argwid] entry mem) 2259 // cond: 2260 // result: (CALLinter [argwid] entry mem) 2261 for { 2262 argwid := v.AuxInt 2263 _ = v.Args[1] 2264 entry := v.Args[0] 2265 mem := v.Args[1] 2266 v.reset(OpMIPS64CALLinter) 2267 v.AuxInt = argwid 2268 v.AddArg(entry) 2269 v.AddArg(mem) 2270 return true 2271 } 2272 } 2273 func rewriteValueMIPS64_OpIsInBounds_0(v *Value) bool { 2274 // match: (IsInBounds idx len) 2275 // cond: 2276 // result: (SGTU len idx) 2277 for { 2278 _ = v.Args[1] 2279 idx := v.Args[0] 2280 len := v.Args[1] 2281 v.reset(OpMIPS64SGTU) 2282 v.AddArg(len) 2283 v.AddArg(idx) 2284 return true 2285 } 2286 } 2287 func rewriteValueMIPS64_OpIsNonNil_0(v *Value) bool { 2288 b := v.Block 2289 _ = b 2290 typ := &b.Func.Config.Types 2291 _ = typ 2292 // match: (IsNonNil ptr) 2293 // cond: 2294 // result: (SGTU ptr (MOVVconst [0])) 2295 for { 2296 ptr := v.Args[0] 2297 v.reset(OpMIPS64SGTU) 2298 v.AddArg(ptr) 2299 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2300 v0.AuxInt = 0 2301 v.AddArg(v0) 2302 return true 2303 } 2304 } 2305 func rewriteValueMIPS64_OpIsSliceInBounds_0(v *Value) bool { 2306 b := v.Block 2307 _ = b 2308 typ := &b.Func.Config.Types 2309 _ = typ 2310 // match: (IsSliceInBounds idx len) 2311 // cond: 2312 // result: (XOR (MOVVconst [1]) (SGTU idx len)) 2313 for { 2314 _ = v.Args[1] 2315 idx := v.Args[0] 2316 len := v.Args[1] 2317 v.reset(OpMIPS64XOR) 2318 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2319 v0.AuxInt = 1 2320 v.AddArg(v0) 2321 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2322 v1.AddArg(idx) 2323 v1.AddArg(len) 2324 v.AddArg(v1) 2325 return true 2326 } 2327 } 2328 func rewriteValueMIPS64_OpLeq16_0(v *Value) bool { 2329 b := v.Block 2330 _ = b 2331 typ := &b.Func.Config.Types 2332 _ = typ 2333 // match: (Leq16 x y) 2334 // cond: 2335 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y))) 2336 for { 2337 _ = v.Args[1] 2338 x := v.Args[0] 2339 y := v.Args[1] 2340 v.reset(OpMIPS64XOR) 2341 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2342 v0.AuxInt = 1 2343 v.AddArg(v0) 2344 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2345 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2346 v2.AddArg(x) 2347 v1.AddArg(v2) 2348 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2349 v3.AddArg(y) 2350 v1.AddArg(v3) 2351 v.AddArg(v1) 2352 return true 2353 } 2354 } 2355 func rewriteValueMIPS64_OpLeq16U_0(v *Value) bool { 2356 b := v.Block 2357 _ = b 2358 typ := &b.Func.Config.Types 2359 _ = typ 2360 // match: (Leq16U x y) 2361 // cond: 2362 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))) 2363 for { 2364 _ = v.Args[1] 2365 x := v.Args[0] 2366 y := v.Args[1] 2367 v.reset(OpMIPS64XOR) 2368 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2369 v0.AuxInt = 1 2370 v.AddArg(v0) 2371 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2372 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2373 v2.AddArg(x) 2374 v1.AddArg(v2) 2375 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2376 v3.AddArg(y) 2377 v1.AddArg(v3) 2378 v.AddArg(v1) 2379 return true 2380 } 2381 } 2382 func rewriteValueMIPS64_OpLeq32_0(v *Value) bool { 2383 b := v.Block 2384 _ = b 2385 typ := &b.Func.Config.Types 2386 _ = typ 2387 // match: (Leq32 x y) 2388 // cond: 2389 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y))) 2390 for { 2391 _ = v.Args[1] 2392 x := v.Args[0] 2393 y := v.Args[1] 2394 v.reset(OpMIPS64XOR) 2395 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2396 v0.AuxInt = 1 2397 v.AddArg(v0) 2398 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2399 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2400 v2.AddArg(x) 2401 v1.AddArg(v2) 2402 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2403 v3.AddArg(y) 2404 v1.AddArg(v3) 2405 v.AddArg(v1) 2406 return true 2407 } 2408 } 2409 func rewriteValueMIPS64_OpLeq32F_0(v *Value) bool { 2410 b := v.Block 2411 _ = b 2412 // match: (Leq32F x y) 2413 // cond: 2414 // result: (FPFlagTrue (CMPGEF y x)) 2415 for { 2416 _ = v.Args[1] 2417 x := v.Args[0] 2418 y := v.Args[1] 2419 v.reset(OpMIPS64FPFlagTrue) 2420 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags) 2421 v0.AddArg(y) 2422 v0.AddArg(x) 2423 v.AddArg(v0) 2424 return true 2425 } 2426 } 2427 func rewriteValueMIPS64_OpLeq32U_0(v *Value) bool { 2428 b := v.Block 2429 _ = b 2430 typ := &b.Func.Config.Types 2431 _ = typ 2432 // match: (Leq32U x y) 2433 // cond: 2434 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))) 2435 for { 2436 _ = v.Args[1] 2437 x := v.Args[0] 2438 y := v.Args[1] 2439 v.reset(OpMIPS64XOR) 2440 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2441 v0.AuxInt = 1 2442 v.AddArg(v0) 2443 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2444 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2445 v2.AddArg(x) 2446 v1.AddArg(v2) 2447 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2448 v3.AddArg(y) 2449 v1.AddArg(v3) 2450 v.AddArg(v1) 2451 return true 2452 } 2453 } 2454 func rewriteValueMIPS64_OpLeq64_0(v *Value) bool { 2455 b := v.Block 2456 _ = b 2457 typ := &b.Func.Config.Types 2458 _ = typ 2459 // match: (Leq64 x y) 2460 // cond: 2461 // result: (XOR (MOVVconst [1]) (SGT x y)) 2462 for { 2463 _ = v.Args[1] 2464 x := v.Args[0] 2465 y := v.Args[1] 2466 v.reset(OpMIPS64XOR) 2467 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2468 v0.AuxInt = 1 2469 v.AddArg(v0) 2470 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2471 v1.AddArg(x) 2472 v1.AddArg(y) 2473 v.AddArg(v1) 2474 return true 2475 } 2476 } 2477 func rewriteValueMIPS64_OpLeq64F_0(v *Value) bool { 2478 b := v.Block 2479 _ = b 2480 // match: (Leq64F x y) 2481 // cond: 2482 // result: (FPFlagTrue (CMPGED y x)) 2483 for { 2484 _ = v.Args[1] 2485 x := v.Args[0] 2486 y := v.Args[1] 2487 v.reset(OpMIPS64FPFlagTrue) 2488 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags) 2489 v0.AddArg(y) 2490 v0.AddArg(x) 2491 v.AddArg(v0) 2492 return true 2493 } 2494 } 2495 func rewriteValueMIPS64_OpLeq64U_0(v *Value) bool { 2496 b := v.Block 2497 _ = b 2498 typ := &b.Func.Config.Types 2499 _ = typ 2500 // match: (Leq64U x y) 2501 // cond: 2502 // result: (XOR (MOVVconst [1]) (SGTU x y)) 2503 for { 2504 _ = v.Args[1] 2505 x := v.Args[0] 2506 y := v.Args[1] 2507 v.reset(OpMIPS64XOR) 2508 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2509 v0.AuxInt = 1 2510 v.AddArg(v0) 2511 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2512 v1.AddArg(x) 2513 v1.AddArg(y) 2514 v.AddArg(v1) 2515 return true 2516 } 2517 } 2518 func rewriteValueMIPS64_OpLeq8_0(v *Value) bool { 2519 b := v.Block 2520 _ = b 2521 typ := &b.Func.Config.Types 2522 _ = typ 2523 // match: (Leq8 x y) 2524 // cond: 2525 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y))) 2526 for { 2527 _ = v.Args[1] 2528 x := v.Args[0] 2529 y := v.Args[1] 2530 v.reset(OpMIPS64XOR) 2531 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2532 v0.AuxInt = 1 2533 v.AddArg(v0) 2534 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool) 2535 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2536 v2.AddArg(x) 2537 v1.AddArg(v2) 2538 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2539 v3.AddArg(y) 2540 v1.AddArg(v3) 2541 v.AddArg(v1) 2542 return true 2543 } 2544 } 2545 func rewriteValueMIPS64_OpLeq8U_0(v *Value) bool { 2546 b := v.Block 2547 _ = b 2548 typ := &b.Func.Config.Types 2549 _ = typ 2550 // match: (Leq8U x y) 2551 // cond: 2552 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))) 2553 for { 2554 _ = v.Args[1] 2555 x := v.Args[0] 2556 y := v.Args[1] 2557 v.reset(OpMIPS64XOR) 2558 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2559 v0.AuxInt = 1 2560 v.AddArg(v0) 2561 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2562 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2563 v2.AddArg(x) 2564 v1.AddArg(v2) 2565 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2566 v3.AddArg(y) 2567 v1.AddArg(v3) 2568 v.AddArg(v1) 2569 return true 2570 } 2571 } 2572 func rewriteValueMIPS64_OpLess16_0(v *Value) bool { 2573 b := v.Block 2574 _ = b 2575 typ := &b.Func.Config.Types 2576 _ = typ 2577 // match: (Less16 x y) 2578 // cond: 2579 // result: (SGT (SignExt16to64 y) (SignExt16to64 x)) 2580 for { 2581 _ = v.Args[1] 2582 x := v.Args[0] 2583 y := v.Args[1] 2584 v.reset(OpMIPS64SGT) 2585 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2586 v0.AddArg(y) 2587 v.AddArg(v0) 2588 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 2589 v1.AddArg(x) 2590 v.AddArg(v1) 2591 return true 2592 } 2593 } 2594 func rewriteValueMIPS64_OpLess16U_0(v *Value) bool { 2595 b := v.Block 2596 _ = b 2597 typ := &b.Func.Config.Types 2598 _ = typ 2599 // match: (Less16U x y) 2600 // cond: 2601 // result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)) 2602 for { 2603 _ = v.Args[1] 2604 x := v.Args[0] 2605 y := v.Args[1] 2606 v.reset(OpMIPS64SGTU) 2607 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2608 v0.AddArg(y) 2609 v.AddArg(v0) 2610 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2611 v1.AddArg(x) 2612 v.AddArg(v1) 2613 return true 2614 } 2615 } 2616 func rewriteValueMIPS64_OpLess32_0(v *Value) bool { 2617 b := v.Block 2618 _ = b 2619 typ := &b.Func.Config.Types 2620 _ = typ 2621 // match: (Less32 x y) 2622 // cond: 2623 // result: (SGT (SignExt32to64 y) (SignExt32to64 x)) 2624 for { 2625 _ = v.Args[1] 2626 x := v.Args[0] 2627 y := v.Args[1] 2628 v.reset(OpMIPS64SGT) 2629 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2630 v0.AddArg(y) 2631 v.AddArg(v0) 2632 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 2633 v1.AddArg(x) 2634 v.AddArg(v1) 2635 return true 2636 } 2637 } 2638 func rewriteValueMIPS64_OpLess32F_0(v *Value) bool { 2639 b := v.Block 2640 _ = b 2641 // match: (Less32F x y) 2642 // cond: 2643 // result: (FPFlagTrue (CMPGTF y x)) 2644 for { 2645 _ = v.Args[1] 2646 x := v.Args[0] 2647 y := v.Args[1] 2648 v.reset(OpMIPS64FPFlagTrue) 2649 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags) 2650 v0.AddArg(y) 2651 v0.AddArg(x) 2652 v.AddArg(v0) 2653 return true 2654 } 2655 } 2656 func rewriteValueMIPS64_OpLess32U_0(v *Value) bool { 2657 b := v.Block 2658 _ = b 2659 typ := &b.Func.Config.Types 2660 _ = typ 2661 // match: (Less32U x y) 2662 // cond: 2663 // result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)) 2664 for { 2665 _ = v.Args[1] 2666 x := v.Args[0] 2667 y := v.Args[1] 2668 v.reset(OpMIPS64SGTU) 2669 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2670 v0.AddArg(y) 2671 v.AddArg(v0) 2672 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2673 v1.AddArg(x) 2674 v.AddArg(v1) 2675 return true 2676 } 2677 } 2678 func rewriteValueMIPS64_OpLess64_0(v *Value) bool { 2679 // match: (Less64 x y) 2680 // cond: 2681 // result: (SGT y x) 2682 for { 2683 _ = v.Args[1] 2684 x := v.Args[0] 2685 y := v.Args[1] 2686 v.reset(OpMIPS64SGT) 2687 v.AddArg(y) 2688 v.AddArg(x) 2689 return true 2690 } 2691 } 2692 func rewriteValueMIPS64_OpLess64F_0(v *Value) bool { 2693 b := v.Block 2694 _ = b 2695 // match: (Less64F x y) 2696 // cond: 2697 // result: (FPFlagTrue (CMPGTD y x)) 2698 for { 2699 _ = v.Args[1] 2700 x := v.Args[0] 2701 y := v.Args[1] 2702 v.reset(OpMIPS64FPFlagTrue) 2703 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags) 2704 v0.AddArg(y) 2705 v0.AddArg(x) 2706 v.AddArg(v0) 2707 return true 2708 } 2709 } 2710 func rewriteValueMIPS64_OpLess64U_0(v *Value) bool { 2711 // match: (Less64U x y) 2712 // cond: 2713 // result: (SGTU y x) 2714 for { 2715 _ = v.Args[1] 2716 x := v.Args[0] 2717 y := v.Args[1] 2718 v.reset(OpMIPS64SGTU) 2719 v.AddArg(y) 2720 v.AddArg(x) 2721 return true 2722 } 2723 } 2724 func rewriteValueMIPS64_OpLess8_0(v *Value) bool { 2725 b := v.Block 2726 _ = b 2727 typ := &b.Func.Config.Types 2728 _ = typ 2729 // match: (Less8 x y) 2730 // cond: 2731 // result: (SGT (SignExt8to64 y) (SignExt8to64 x)) 2732 for { 2733 _ = v.Args[1] 2734 x := v.Args[0] 2735 y := v.Args[1] 2736 v.reset(OpMIPS64SGT) 2737 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2738 v0.AddArg(y) 2739 v.AddArg(v0) 2740 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 2741 v1.AddArg(x) 2742 v.AddArg(v1) 2743 return true 2744 } 2745 } 2746 func rewriteValueMIPS64_OpLess8U_0(v *Value) bool { 2747 b := v.Block 2748 _ = b 2749 typ := &b.Func.Config.Types 2750 _ = typ 2751 // match: (Less8U x y) 2752 // cond: 2753 // result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)) 2754 for { 2755 _ = v.Args[1] 2756 x := v.Args[0] 2757 y := v.Args[1] 2758 v.reset(OpMIPS64SGTU) 2759 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2760 v0.AddArg(y) 2761 v.AddArg(v0) 2762 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 2763 v1.AddArg(x) 2764 v.AddArg(v1) 2765 return true 2766 } 2767 } 2768 func rewriteValueMIPS64_OpLoad_0(v *Value) bool { 2769 // match: (Load <t> ptr mem) 2770 // cond: t.IsBoolean() 2771 // result: (MOVBUload ptr mem) 2772 for { 2773 t := v.Type 2774 _ = v.Args[1] 2775 ptr := v.Args[0] 2776 mem := v.Args[1] 2777 if !(t.IsBoolean()) { 2778 break 2779 } 2780 v.reset(OpMIPS64MOVBUload) 2781 v.AddArg(ptr) 2782 v.AddArg(mem) 2783 return true 2784 } 2785 // match: (Load <t> ptr mem) 2786 // cond: (is8BitInt(t) && isSigned(t)) 2787 // result: (MOVBload ptr mem) 2788 for { 2789 t := v.Type 2790 _ = v.Args[1] 2791 ptr := v.Args[0] 2792 mem := v.Args[1] 2793 if !(is8BitInt(t) && isSigned(t)) { 2794 break 2795 } 2796 v.reset(OpMIPS64MOVBload) 2797 v.AddArg(ptr) 2798 v.AddArg(mem) 2799 return true 2800 } 2801 // match: (Load <t> ptr mem) 2802 // cond: (is8BitInt(t) && !isSigned(t)) 2803 // result: (MOVBUload ptr mem) 2804 for { 2805 t := v.Type 2806 _ = v.Args[1] 2807 ptr := v.Args[0] 2808 mem := v.Args[1] 2809 if !(is8BitInt(t) && !isSigned(t)) { 2810 break 2811 } 2812 v.reset(OpMIPS64MOVBUload) 2813 v.AddArg(ptr) 2814 v.AddArg(mem) 2815 return true 2816 } 2817 // match: (Load <t> ptr mem) 2818 // cond: (is16BitInt(t) && isSigned(t)) 2819 // result: (MOVHload ptr mem) 2820 for { 2821 t := v.Type 2822 _ = v.Args[1] 2823 ptr := v.Args[0] 2824 mem := v.Args[1] 2825 if !(is16BitInt(t) && isSigned(t)) { 2826 break 2827 } 2828 v.reset(OpMIPS64MOVHload) 2829 v.AddArg(ptr) 2830 v.AddArg(mem) 2831 return true 2832 } 2833 // match: (Load <t> ptr mem) 2834 // cond: (is16BitInt(t) && !isSigned(t)) 2835 // result: (MOVHUload ptr mem) 2836 for { 2837 t := v.Type 2838 _ = v.Args[1] 2839 ptr := v.Args[0] 2840 mem := v.Args[1] 2841 if !(is16BitInt(t) && !isSigned(t)) { 2842 break 2843 } 2844 v.reset(OpMIPS64MOVHUload) 2845 v.AddArg(ptr) 2846 v.AddArg(mem) 2847 return true 2848 } 2849 // match: (Load <t> ptr mem) 2850 // cond: (is32BitInt(t) && isSigned(t)) 2851 // result: (MOVWload ptr mem) 2852 for { 2853 t := v.Type 2854 _ = v.Args[1] 2855 ptr := v.Args[0] 2856 mem := v.Args[1] 2857 if !(is32BitInt(t) && isSigned(t)) { 2858 break 2859 } 2860 v.reset(OpMIPS64MOVWload) 2861 v.AddArg(ptr) 2862 v.AddArg(mem) 2863 return true 2864 } 2865 // match: (Load <t> ptr mem) 2866 // cond: (is32BitInt(t) && !isSigned(t)) 2867 // result: (MOVWUload ptr mem) 2868 for { 2869 t := v.Type 2870 _ = v.Args[1] 2871 ptr := v.Args[0] 2872 mem := v.Args[1] 2873 if !(is32BitInt(t) && !isSigned(t)) { 2874 break 2875 } 2876 v.reset(OpMIPS64MOVWUload) 2877 v.AddArg(ptr) 2878 v.AddArg(mem) 2879 return true 2880 } 2881 // match: (Load <t> ptr mem) 2882 // cond: (is64BitInt(t) || isPtr(t)) 2883 // result: (MOVVload ptr mem) 2884 for { 2885 t := v.Type 2886 _ = v.Args[1] 2887 ptr := v.Args[0] 2888 mem := v.Args[1] 2889 if !(is64BitInt(t) || isPtr(t)) { 2890 break 2891 } 2892 v.reset(OpMIPS64MOVVload) 2893 v.AddArg(ptr) 2894 v.AddArg(mem) 2895 return true 2896 } 2897 // match: (Load <t> ptr mem) 2898 // cond: is32BitFloat(t) 2899 // result: (MOVFload ptr mem) 2900 for { 2901 t := v.Type 2902 _ = v.Args[1] 2903 ptr := v.Args[0] 2904 mem := v.Args[1] 2905 if !(is32BitFloat(t)) { 2906 break 2907 } 2908 v.reset(OpMIPS64MOVFload) 2909 v.AddArg(ptr) 2910 v.AddArg(mem) 2911 return true 2912 } 2913 // match: (Load <t> ptr mem) 2914 // cond: is64BitFloat(t) 2915 // result: (MOVDload ptr mem) 2916 for { 2917 t := v.Type 2918 _ = v.Args[1] 2919 ptr := v.Args[0] 2920 mem := v.Args[1] 2921 if !(is64BitFloat(t)) { 2922 break 2923 } 2924 v.reset(OpMIPS64MOVDload) 2925 v.AddArg(ptr) 2926 v.AddArg(mem) 2927 return true 2928 } 2929 return false 2930 } 2931 func rewriteValueMIPS64_OpLocalAddr_0(v *Value) bool { 2932 // match: (LocalAddr {sym} base _) 2933 // cond: 2934 // result: (MOVVaddr {sym} base) 2935 for { 2936 sym := v.Aux 2937 _ = v.Args[1] 2938 base := v.Args[0] 2939 v.reset(OpMIPS64MOVVaddr) 2940 v.Aux = sym 2941 v.AddArg(base) 2942 return true 2943 } 2944 } 2945 func rewriteValueMIPS64_OpLsh16x16_0(v *Value) bool { 2946 b := v.Block 2947 _ = b 2948 typ := &b.Func.Config.Types 2949 _ = typ 2950 // match: (Lsh16x16 <t> x y) 2951 // cond: 2952 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 2953 for { 2954 t := v.Type 2955 _ = v.Args[1] 2956 x := v.Args[0] 2957 y := v.Args[1] 2958 v.reset(OpMIPS64AND) 2959 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2960 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2961 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2962 v2.AuxInt = 64 2963 v1.AddArg(v2) 2964 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2965 v3.AddArg(y) 2966 v1.AddArg(v3) 2967 v0.AddArg(v1) 2968 v.AddArg(v0) 2969 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 2970 v4.AddArg(x) 2971 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 2972 v5.AddArg(y) 2973 v4.AddArg(v5) 2974 v.AddArg(v4) 2975 return true 2976 } 2977 } 2978 func rewriteValueMIPS64_OpLsh16x32_0(v *Value) bool { 2979 b := v.Block 2980 _ = b 2981 typ := &b.Func.Config.Types 2982 _ = typ 2983 // match: (Lsh16x32 <t> x y) 2984 // cond: 2985 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 2986 for { 2987 t := v.Type 2988 _ = v.Args[1] 2989 x := v.Args[0] 2990 y := v.Args[1] 2991 v.reset(OpMIPS64AND) 2992 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 2993 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 2994 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 2995 v2.AuxInt = 64 2996 v1.AddArg(v2) 2997 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 2998 v3.AddArg(y) 2999 v1.AddArg(v3) 3000 v0.AddArg(v1) 3001 v.AddArg(v0) 3002 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3003 v4.AddArg(x) 3004 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3005 v5.AddArg(y) 3006 v4.AddArg(v5) 3007 v.AddArg(v4) 3008 return true 3009 } 3010 } 3011 func rewriteValueMIPS64_OpLsh16x64_0(v *Value) bool { 3012 b := v.Block 3013 _ = b 3014 typ := &b.Func.Config.Types 3015 _ = typ 3016 // match: (Lsh16x64 <t> x y) 3017 // cond: 3018 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3019 for { 3020 t := v.Type 3021 _ = v.Args[1] 3022 x := v.Args[0] 3023 y := v.Args[1] 3024 v.reset(OpMIPS64AND) 3025 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3026 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3027 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3028 v2.AuxInt = 64 3029 v1.AddArg(v2) 3030 v1.AddArg(y) 3031 v0.AddArg(v1) 3032 v.AddArg(v0) 3033 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3034 v3.AddArg(x) 3035 v3.AddArg(y) 3036 v.AddArg(v3) 3037 return true 3038 } 3039 } 3040 func rewriteValueMIPS64_OpLsh16x8_0(v *Value) bool { 3041 b := v.Block 3042 _ = b 3043 typ := &b.Func.Config.Types 3044 _ = typ 3045 // match: (Lsh16x8 <t> x y) 3046 // cond: 3047 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3048 for { 3049 t := v.Type 3050 _ = v.Args[1] 3051 x := v.Args[0] 3052 y := v.Args[1] 3053 v.reset(OpMIPS64AND) 3054 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3055 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3056 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3057 v2.AuxInt = 64 3058 v1.AddArg(v2) 3059 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3060 v3.AddArg(y) 3061 v1.AddArg(v3) 3062 v0.AddArg(v1) 3063 v.AddArg(v0) 3064 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3065 v4.AddArg(x) 3066 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3067 v5.AddArg(y) 3068 v4.AddArg(v5) 3069 v.AddArg(v4) 3070 return true 3071 } 3072 } 3073 func rewriteValueMIPS64_OpLsh32x16_0(v *Value) bool { 3074 b := v.Block 3075 _ = b 3076 typ := &b.Func.Config.Types 3077 _ = typ 3078 // match: (Lsh32x16 <t> x y) 3079 // cond: 3080 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3081 for { 3082 t := v.Type 3083 _ = v.Args[1] 3084 x := v.Args[0] 3085 y := v.Args[1] 3086 v.reset(OpMIPS64AND) 3087 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3088 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3089 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3090 v2.AuxInt = 64 3091 v1.AddArg(v2) 3092 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3093 v3.AddArg(y) 3094 v1.AddArg(v3) 3095 v0.AddArg(v1) 3096 v.AddArg(v0) 3097 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3098 v4.AddArg(x) 3099 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3100 v5.AddArg(y) 3101 v4.AddArg(v5) 3102 v.AddArg(v4) 3103 return true 3104 } 3105 } 3106 func rewriteValueMIPS64_OpLsh32x32_0(v *Value) bool { 3107 b := v.Block 3108 _ = b 3109 typ := &b.Func.Config.Types 3110 _ = typ 3111 // match: (Lsh32x32 <t> x y) 3112 // cond: 3113 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3114 for { 3115 t := v.Type 3116 _ = v.Args[1] 3117 x := v.Args[0] 3118 y := v.Args[1] 3119 v.reset(OpMIPS64AND) 3120 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3121 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3122 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3123 v2.AuxInt = 64 3124 v1.AddArg(v2) 3125 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3126 v3.AddArg(y) 3127 v1.AddArg(v3) 3128 v0.AddArg(v1) 3129 v.AddArg(v0) 3130 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3131 v4.AddArg(x) 3132 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3133 v5.AddArg(y) 3134 v4.AddArg(v5) 3135 v.AddArg(v4) 3136 return true 3137 } 3138 } 3139 func rewriteValueMIPS64_OpLsh32x64_0(v *Value) bool { 3140 b := v.Block 3141 _ = b 3142 typ := &b.Func.Config.Types 3143 _ = typ 3144 // match: (Lsh32x64 <t> x y) 3145 // cond: 3146 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3147 for { 3148 t := v.Type 3149 _ = v.Args[1] 3150 x := v.Args[0] 3151 y := v.Args[1] 3152 v.reset(OpMIPS64AND) 3153 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3154 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3155 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3156 v2.AuxInt = 64 3157 v1.AddArg(v2) 3158 v1.AddArg(y) 3159 v0.AddArg(v1) 3160 v.AddArg(v0) 3161 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3162 v3.AddArg(x) 3163 v3.AddArg(y) 3164 v.AddArg(v3) 3165 return true 3166 } 3167 } 3168 func rewriteValueMIPS64_OpLsh32x8_0(v *Value) bool { 3169 b := v.Block 3170 _ = b 3171 typ := &b.Func.Config.Types 3172 _ = typ 3173 // match: (Lsh32x8 <t> x y) 3174 // cond: 3175 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3176 for { 3177 t := v.Type 3178 _ = v.Args[1] 3179 x := v.Args[0] 3180 y := v.Args[1] 3181 v.reset(OpMIPS64AND) 3182 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3183 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3184 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3185 v2.AuxInt = 64 3186 v1.AddArg(v2) 3187 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3188 v3.AddArg(y) 3189 v1.AddArg(v3) 3190 v0.AddArg(v1) 3191 v.AddArg(v0) 3192 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3193 v4.AddArg(x) 3194 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3195 v5.AddArg(y) 3196 v4.AddArg(v5) 3197 v.AddArg(v4) 3198 return true 3199 } 3200 } 3201 func rewriteValueMIPS64_OpLsh64x16_0(v *Value) bool { 3202 b := v.Block 3203 _ = b 3204 typ := &b.Func.Config.Types 3205 _ = typ 3206 // match: (Lsh64x16 <t> x y) 3207 // cond: 3208 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3209 for { 3210 t := v.Type 3211 _ = v.Args[1] 3212 x := v.Args[0] 3213 y := v.Args[1] 3214 v.reset(OpMIPS64AND) 3215 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3216 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3217 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3218 v2.AuxInt = 64 3219 v1.AddArg(v2) 3220 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3221 v3.AddArg(y) 3222 v1.AddArg(v3) 3223 v0.AddArg(v1) 3224 v.AddArg(v0) 3225 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3226 v4.AddArg(x) 3227 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3228 v5.AddArg(y) 3229 v4.AddArg(v5) 3230 v.AddArg(v4) 3231 return true 3232 } 3233 } 3234 func rewriteValueMIPS64_OpLsh64x32_0(v *Value) bool { 3235 b := v.Block 3236 _ = b 3237 typ := &b.Func.Config.Types 3238 _ = typ 3239 // match: (Lsh64x32 <t> x y) 3240 // cond: 3241 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3242 for { 3243 t := v.Type 3244 _ = v.Args[1] 3245 x := v.Args[0] 3246 y := v.Args[1] 3247 v.reset(OpMIPS64AND) 3248 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3249 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3250 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3251 v2.AuxInt = 64 3252 v1.AddArg(v2) 3253 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3254 v3.AddArg(y) 3255 v1.AddArg(v3) 3256 v0.AddArg(v1) 3257 v.AddArg(v0) 3258 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3259 v4.AddArg(x) 3260 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3261 v5.AddArg(y) 3262 v4.AddArg(v5) 3263 v.AddArg(v4) 3264 return true 3265 } 3266 } 3267 func rewriteValueMIPS64_OpLsh64x64_0(v *Value) bool { 3268 b := v.Block 3269 _ = b 3270 typ := &b.Func.Config.Types 3271 _ = typ 3272 // match: (Lsh64x64 <t> x y) 3273 // cond: 3274 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3275 for { 3276 t := v.Type 3277 _ = v.Args[1] 3278 x := v.Args[0] 3279 y := v.Args[1] 3280 v.reset(OpMIPS64AND) 3281 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3282 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3283 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3284 v2.AuxInt = 64 3285 v1.AddArg(v2) 3286 v1.AddArg(y) 3287 v0.AddArg(v1) 3288 v.AddArg(v0) 3289 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3290 v3.AddArg(x) 3291 v3.AddArg(y) 3292 v.AddArg(v3) 3293 return true 3294 } 3295 } 3296 func rewriteValueMIPS64_OpLsh64x8_0(v *Value) bool { 3297 b := v.Block 3298 _ = b 3299 typ := &b.Func.Config.Types 3300 _ = typ 3301 // match: (Lsh64x8 <t> x y) 3302 // cond: 3303 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3304 for { 3305 t := v.Type 3306 _ = v.Args[1] 3307 x := v.Args[0] 3308 y := v.Args[1] 3309 v.reset(OpMIPS64AND) 3310 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3311 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3312 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3313 v2.AuxInt = 64 3314 v1.AddArg(v2) 3315 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3316 v3.AddArg(y) 3317 v1.AddArg(v3) 3318 v0.AddArg(v1) 3319 v.AddArg(v0) 3320 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3321 v4.AddArg(x) 3322 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3323 v5.AddArg(y) 3324 v4.AddArg(v5) 3325 v.AddArg(v4) 3326 return true 3327 } 3328 } 3329 func rewriteValueMIPS64_OpLsh8x16_0(v *Value) bool { 3330 b := v.Block 3331 _ = b 3332 typ := &b.Func.Config.Types 3333 _ = typ 3334 // match: (Lsh8x16 <t> x y) 3335 // cond: 3336 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 3337 for { 3338 t := v.Type 3339 _ = v.Args[1] 3340 x := v.Args[0] 3341 y := v.Args[1] 3342 v.reset(OpMIPS64AND) 3343 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3344 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3345 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3346 v2.AuxInt = 64 3347 v1.AddArg(v2) 3348 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3349 v3.AddArg(y) 3350 v1.AddArg(v3) 3351 v0.AddArg(v1) 3352 v.AddArg(v0) 3353 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3354 v4.AddArg(x) 3355 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 3356 v5.AddArg(y) 3357 v4.AddArg(v5) 3358 v.AddArg(v4) 3359 return true 3360 } 3361 } 3362 func rewriteValueMIPS64_OpLsh8x32_0(v *Value) bool { 3363 b := v.Block 3364 _ = b 3365 typ := &b.Func.Config.Types 3366 _ = typ 3367 // match: (Lsh8x32 <t> x y) 3368 // cond: 3369 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 3370 for { 3371 t := v.Type 3372 _ = v.Args[1] 3373 x := v.Args[0] 3374 y := v.Args[1] 3375 v.reset(OpMIPS64AND) 3376 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3377 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3378 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3379 v2.AuxInt = 64 3380 v1.AddArg(v2) 3381 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3382 v3.AddArg(y) 3383 v1.AddArg(v3) 3384 v0.AddArg(v1) 3385 v.AddArg(v0) 3386 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3387 v4.AddArg(x) 3388 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 3389 v5.AddArg(y) 3390 v4.AddArg(v5) 3391 v.AddArg(v4) 3392 return true 3393 } 3394 } 3395 func rewriteValueMIPS64_OpLsh8x64_0(v *Value) bool { 3396 b := v.Block 3397 _ = b 3398 typ := &b.Func.Config.Types 3399 _ = typ 3400 // match: (Lsh8x64 <t> x y) 3401 // cond: 3402 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 3403 for { 3404 t := v.Type 3405 _ = v.Args[1] 3406 x := v.Args[0] 3407 y := v.Args[1] 3408 v.reset(OpMIPS64AND) 3409 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3410 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3411 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3412 v2.AuxInt = 64 3413 v1.AddArg(v2) 3414 v1.AddArg(y) 3415 v0.AddArg(v1) 3416 v.AddArg(v0) 3417 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3418 v3.AddArg(x) 3419 v3.AddArg(y) 3420 v.AddArg(v3) 3421 return true 3422 } 3423 } 3424 func rewriteValueMIPS64_OpLsh8x8_0(v *Value) bool { 3425 b := v.Block 3426 _ = b 3427 typ := &b.Func.Config.Types 3428 _ = typ 3429 // match: (Lsh8x8 <t> x y) 3430 // cond: 3431 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 3432 for { 3433 t := v.Type 3434 _ = v.Args[1] 3435 x := v.Args[0] 3436 y := v.Args[1] 3437 v.reset(OpMIPS64AND) 3438 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 3439 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 3440 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 3441 v2.AuxInt = 64 3442 v1.AddArg(v2) 3443 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3444 v3.AddArg(y) 3445 v1.AddArg(v3) 3446 v0.AddArg(v1) 3447 v.AddArg(v0) 3448 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t) 3449 v4.AddArg(x) 3450 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 3451 v5.AddArg(y) 3452 v4.AddArg(v5) 3453 v.AddArg(v4) 3454 return true 3455 } 3456 } 3457 func rewriteValueMIPS64_OpMIPS64ADDV_0(v *Value) bool { 3458 // match: (ADDV x (MOVVconst [c])) 3459 // cond: is32Bit(c) 3460 // result: (ADDVconst [c] x) 3461 for { 3462 _ = v.Args[1] 3463 x := v.Args[0] 3464 v_1 := v.Args[1] 3465 if v_1.Op != OpMIPS64MOVVconst { 3466 break 3467 } 3468 c := v_1.AuxInt 3469 if !(is32Bit(c)) { 3470 break 3471 } 3472 v.reset(OpMIPS64ADDVconst) 3473 v.AuxInt = c 3474 v.AddArg(x) 3475 return true 3476 } 3477 // match: (ADDV (MOVVconst [c]) x) 3478 // cond: is32Bit(c) 3479 // result: (ADDVconst [c] x) 3480 for { 3481 _ = v.Args[1] 3482 v_0 := v.Args[0] 3483 if v_0.Op != OpMIPS64MOVVconst { 3484 break 3485 } 3486 c := v_0.AuxInt 3487 x := v.Args[1] 3488 if !(is32Bit(c)) { 3489 break 3490 } 3491 v.reset(OpMIPS64ADDVconst) 3492 v.AuxInt = c 3493 v.AddArg(x) 3494 return true 3495 } 3496 // match: (ADDV x (NEGV y)) 3497 // cond: 3498 // result: (SUBV x y) 3499 for { 3500 _ = v.Args[1] 3501 x := v.Args[0] 3502 v_1 := v.Args[1] 3503 if v_1.Op != OpMIPS64NEGV { 3504 break 3505 } 3506 y := v_1.Args[0] 3507 v.reset(OpMIPS64SUBV) 3508 v.AddArg(x) 3509 v.AddArg(y) 3510 return true 3511 } 3512 // match: (ADDV (NEGV y) x) 3513 // cond: 3514 // result: (SUBV x y) 3515 for { 3516 _ = v.Args[1] 3517 v_0 := v.Args[0] 3518 if v_0.Op != OpMIPS64NEGV { 3519 break 3520 } 3521 y := v_0.Args[0] 3522 x := v.Args[1] 3523 v.reset(OpMIPS64SUBV) 3524 v.AddArg(x) 3525 v.AddArg(y) 3526 return true 3527 } 3528 return false 3529 } 3530 func rewriteValueMIPS64_OpMIPS64ADDVconst_0(v *Value) bool { 3531 // match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr)) 3532 // cond: 3533 // result: (MOVVaddr [off1+off2] {sym} ptr) 3534 for { 3535 off1 := v.AuxInt 3536 v_0 := v.Args[0] 3537 if v_0.Op != OpMIPS64MOVVaddr { 3538 break 3539 } 3540 off2 := v_0.AuxInt 3541 sym := v_0.Aux 3542 ptr := v_0.Args[0] 3543 v.reset(OpMIPS64MOVVaddr) 3544 v.AuxInt = off1 + off2 3545 v.Aux = sym 3546 v.AddArg(ptr) 3547 return true 3548 } 3549 // match: (ADDVconst [0] x) 3550 // cond: 3551 // result: x 3552 for { 3553 if v.AuxInt != 0 { 3554 break 3555 } 3556 x := v.Args[0] 3557 v.reset(OpCopy) 3558 v.Type = x.Type 3559 v.AddArg(x) 3560 return true 3561 } 3562 // match: (ADDVconst [c] (MOVVconst [d])) 3563 // cond: 3564 // result: (MOVVconst [c+d]) 3565 for { 3566 c := v.AuxInt 3567 v_0 := v.Args[0] 3568 if v_0.Op != OpMIPS64MOVVconst { 3569 break 3570 } 3571 d := v_0.AuxInt 3572 v.reset(OpMIPS64MOVVconst) 3573 v.AuxInt = c + d 3574 return true 3575 } 3576 // match: (ADDVconst [c] (ADDVconst [d] x)) 3577 // cond: is32Bit(c+d) 3578 // result: (ADDVconst [c+d] x) 3579 for { 3580 c := v.AuxInt 3581 v_0 := v.Args[0] 3582 if v_0.Op != OpMIPS64ADDVconst { 3583 break 3584 } 3585 d := v_0.AuxInt 3586 x := v_0.Args[0] 3587 if !(is32Bit(c + d)) { 3588 break 3589 } 3590 v.reset(OpMIPS64ADDVconst) 3591 v.AuxInt = c + d 3592 v.AddArg(x) 3593 return true 3594 } 3595 // match: (ADDVconst [c] (SUBVconst [d] x)) 3596 // cond: is32Bit(c-d) 3597 // result: (ADDVconst [c-d] x) 3598 for { 3599 c := v.AuxInt 3600 v_0 := v.Args[0] 3601 if v_0.Op != OpMIPS64SUBVconst { 3602 break 3603 } 3604 d := v_0.AuxInt 3605 x := v_0.Args[0] 3606 if !(is32Bit(c - d)) { 3607 break 3608 } 3609 v.reset(OpMIPS64ADDVconst) 3610 v.AuxInt = c - d 3611 v.AddArg(x) 3612 return true 3613 } 3614 return false 3615 } 3616 func rewriteValueMIPS64_OpMIPS64AND_0(v *Value) bool { 3617 // match: (AND x (MOVVconst [c])) 3618 // cond: is32Bit(c) 3619 // result: (ANDconst [c] x) 3620 for { 3621 _ = v.Args[1] 3622 x := v.Args[0] 3623 v_1 := v.Args[1] 3624 if v_1.Op != OpMIPS64MOVVconst { 3625 break 3626 } 3627 c := v_1.AuxInt 3628 if !(is32Bit(c)) { 3629 break 3630 } 3631 v.reset(OpMIPS64ANDconst) 3632 v.AuxInt = c 3633 v.AddArg(x) 3634 return true 3635 } 3636 // match: (AND (MOVVconst [c]) x) 3637 // cond: is32Bit(c) 3638 // result: (ANDconst [c] x) 3639 for { 3640 _ = v.Args[1] 3641 v_0 := v.Args[0] 3642 if v_0.Op != OpMIPS64MOVVconst { 3643 break 3644 } 3645 c := v_0.AuxInt 3646 x := v.Args[1] 3647 if !(is32Bit(c)) { 3648 break 3649 } 3650 v.reset(OpMIPS64ANDconst) 3651 v.AuxInt = c 3652 v.AddArg(x) 3653 return true 3654 } 3655 // match: (AND x x) 3656 // cond: 3657 // result: x 3658 for { 3659 _ = v.Args[1] 3660 x := v.Args[0] 3661 if x != v.Args[1] { 3662 break 3663 } 3664 v.reset(OpCopy) 3665 v.Type = x.Type 3666 v.AddArg(x) 3667 return true 3668 } 3669 return false 3670 } 3671 func rewriteValueMIPS64_OpMIPS64ANDconst_0(v *Value) bool { 3672 // match: (ANDconst [0] _) 3673 // cond: 3674 // result: (MOVVconst [0]) 3675 for { 3676 if v.AuxInt != 0 { 3677 break 3678 } 3679 v.reset(OpMIPS64MOVVconst) 3680 v.AuxInt = 0 3681 return true 3682 } 3683 // match: (ANDconst [-1] x) 3684 // cond: 3685 // result: x 3686 for { 3687 if v.AuxInt != -1 { 3688 break 3689 } 3690 x := v.Args[0] 3691 v.reset(OpCopy) 3692 v.Type = x.Type 3693 v.AddArg(x) 3694 return true 3695 } 3696 // match: (ANDconst [c] (MOVVconst [d])) 3697 // cond: 3698 // result: (MOVVconst [c&d]) 3699 for { 3700 c := v.AuxInt 3701 v_0 := v.Args[0] 3702 if v_0.Op != OpMIPS64MOVVconst { 3703 break 3704 } 3705 d := v_0.AuxInt 3706 v.reset(OpMIPS64MOVVconst) 3707 v.AuxInt = c & d 3708 return true 3709 } 3710 // match: (ANDconst [c] (ANDconst [d] x)) 3711 // cond: 3712 // result: (ANDconst [c&d] x) 3713 for { 3714 c := v.AuxInt 3715 v_0 := v.Args[0] 3716 if v_0.Op != OpMIPS64ANDconst { 3717 break 3718 } 3719 d := v_0.AuxInt 3720 x := v_0.Args[0] 3721 v.reset(OpMIPS64ANDconst) 3722 v.AuxInt = c & d 3723 v.AddArg(x) 3724 return true 3725 } 3726 return false 3727 } 3728 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32_0(v *Value) bool { 3729 // match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem) 3730 // cond: is32Bit(c) 3731 // result: (LoweredAtomicAddconst32 [c] ptr mem) 3732 for { 3733 _ = v.Args[2] 3734 ptr := v.Args[0] 3735 v_1 := v.Args[1] 3736 if v_1.Op != OpMIPS64MOVVconst { 3737 break 3738 } 3739 c := v_1.AuxInt 3740 mem := v.Args[2] 3741 if !(is32Bit(c)) { 3742 break 3743 } 3744 v.reset(OpMIPS64LoweredAtomicAddconst32) 3745 v.AuxInt = c 3746 v.AddArg(ptr) 3747 v.AddArg(mem) 3748 return true 3749 } 3750 return false 3751 } 3752 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64_0(v *Value) bool { 3753 // match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem) 3754 // cond: is32Bit(c) 3755 // result: (LoweredAtomicAddconst64 [c] ptr mem) 3756 for { 3757 _ = v.Args[2] 3758 ptr := v.Args[0] 3759 v_1 := v.Args[1] 3760 if v_1.Op != OpMIPS64MOVVconst { 3761 break 3762 } 3763 c := v_1.AuxInt 3764 mem := v.Args[2] 3765 if !(is32Bit(c)) { 3766 break 3767 } 3768 v.reset(OpMIPS64LoweredAtomicAddconst64) 3769 v.AuxInt = c 3770 v.AddArg(ptr) 3771 v.AddArg(mem) 3772 return true 3773 } 3774 return false 3775 } 3776 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32_0(v *Value) bool { 3777 // match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem) 3778 // cond: 3779 // result: (LoweredAtomicStorezero32 ptr mem) 3780 for { 3781 _ = v.Args[2] 3782 ptr := v.Args[0] 3783 v_1 := v.Args[1] 3784 if v_1.Op != OpMIPS64MOVVconst { 3785 break 3786 } 3787 if v_1.AuxInt != 0 { 3788 break 3789 } 3790 mem := v.Args[2] 3791 v.reset(OpMIPS64LoweredAtomicStorezero32) 3792 v.AddArg(ptr) 3793 v.AddArg(mem) 3794 return true 3795 } 3796 return false 3797 } 3798 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64_0(v *Value) bool { 3799 // match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem) 3800 // cond: 3801 // result: (LoweredAtomicStorezero64 ptr mem) 3802 for { 3803 _ = v.Args[2] 3804 ptr := v.Args[0] 3805 v_1 := v.Args[1] 3806 if v_1.Op != OpMIPS64MOVVconst { 3807 break 3808 } 3809 if v_1.AuxInt != 0 { 3810 break 3811 } 3812 mem := v.Args[2] 3813 v.reset(OpMIPS64LoweredAtomicStorezero64) 3814 v.AddArg(ptr) 3815 v.AddArg(mem) 3816 return true 3817 } 3818 return false 3819 } 3820 func rewriteValueMIPS64_OpMIPS64MOVBUload_0(v *Value) bool { 3821 // match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem) 3822 // cond: is32Bit(off1+off2) 3823 // result: (MOVBUload [off1+off2] {sym} ptr mem) 3824 for { 3825 off1 := v.AuxInt 3826 sym := v.Aux 3827 _ = v.Args[1] 3828 v_0 := v.Args[0] 3829 if v_0.Op != OpMIPS64ADDVconst { 3830 break 3831 } 3832 off2 := v_0.AuxInt 3833 ptr := v_0.Args[0] 3834 mem := v.Args[1] 3835 if !(is32Bit(off1 + off2)) { 3836 break 3837 } 3838 v.reset(OpMIPS64MOVBUload) 3839 v.AuxInt = off1 + off2 3840 v.Aux = sym 3841 v.AddArg(ptr) 3842 v.AddArg(mem) 3843 return true 3844 } 3845 // match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3846 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3847 // result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3848 for { 3849 off1 := v.AuxInt 3850 sym1 := v.Aux 3851 _ = v.Args[1] 3852 v_0 := v.Args[0] 3853 if v_0.Op != OpMIPS64MOVVaddr { 3854 break 3855 } 3856 off2 := v_0.AuxInt 3857 sym2 := v_0.Aux 3858 ptr := v_0.Args[0] 3859 mem := v.Args[1] 3860 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3861 break 3862 } 3863 v.reset(OpMIPS64MOVBUload) 3864 v.AuxInt = off1 + off2 3865 v.Aux = mergeSym(sym1, sym2) 3866 v.AddArg(ptr) 3867 v.AddArg(mem) 3868 return true 3869 } 3870 return false 3871 } 3872 func rewriteValueMIPS64_OpMIPS64MOVBUreg_0(v *Value) bool { 3873 // match: (MOVBUreg x:(MOVBUload _ _)) 3874 // cond: 3875 // result: (MOVVreg x) 3876 for { 3877 x := v.Args[0] 3878 if x.Op != OpMIPS64MOVBUload { 3879 break 3880 } 3881 _ = x.Args[1] 3882 v.reset(OpMIPS64MOVVreg) 3883 v.AddArg(x) 3884 return true 3885 } 3886 // match: (MOVBUreg x:(MOVBUreg _)) 3887 // cond: 3888 // result: (MOVVreg x) 3889 for { 3890 x := v.Args[0] 3891 if x.Op != OpMIPS64MOVBUreg { 3892 break 3893 } 3894 v.reset(OpMIPS64MOVVreg) 3895 v.AddArg(x) 3896 return true 3897 } 3898 // match: (MOVBUreg (MOVVconst [c])) 3899 // cond: 3900 // result: (MOVVconst [int64(uint8(c))]) 3901 for { 3902 v_0 := v.Args[0] 3903 if v_0.Op != OpMIPS64MOVVconst { 3904 break 3905 } 3906 c := v_0.AuxInt 3907 v.reset(OpMIPS64MOVVconst) 3908 v.AuxInt = int64(uint8(c)) 3909 return true 3910 } 3911 return false 3912 } 3913 func rewriteValueMIPS64_OpMIPS64MOVBload_0(v *Value) bool { 3914 // match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem) 3915 // cond: is32Bit(off1+off2) 3916 // result: (MOVBload [off1+off2] {sym} ptr mem) 3917 for { 3918 off1 := v.AuxInt 3919 sym := v.Aux 3920 _ = v.Args[1] 3921 v_0 := v.Args[0] 3922 if v_0.Op != OpMIPS64ADDVconst { 3923 break 3924 } 3925 off2 := v_0.AuxInt 3926 ptr := v_0.Args[0] 3927 mem := v.Args[1] 3928 if !(is32Bit(off1 + off2)) { 3929 break 3930 } 3931 v.reset(OpMIPS64MOVBload) 3932 v.AuxInt = off1 + off2 3933 v.Aux = sym 3934 v.AddArg(ptr) 3935 v.AddArg(mem) 3936 return true 3937 } 3938 // match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3939 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 3940 // result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 3941 for { 3942 off1 := v.AuxInt 3943 sym1 := v.Aux 3944 _ = v.Args[1] 3945 v_0 := v.Args[0] 3946 if v_0.Op != OpMIPS64MOVVaddr { 3947 break 3948 } 3949 off2 := v_0.AuxInt 3950 sym2 := v_0.Aux 3951 ptr := v_0.Args[0] 3952 mem := v.Args[1] 3953 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 3954 break 3955 } 3956 v.reset(OpMIPS64MOVBload) 3957 v.AuxInt = off1 + off2 3958 v.Aux = mergeSym(sym1, sym2) 3959 v.AddArg(ptr) 3960 v.AddArg(mem) 3961 return true 3962 } 3963 return false 3964 } 3965 func rewriteValueMIPS64_OpMIPS64MOVBreg_0(v *Value) bool { 3966 // match: (MOVBreg x:(MOVBload _ _)) 3967 // cond: 3968 // result: (MOVVreg x) 3969 for { 3970 x := v.Args[0] 3971 if x.Op != OpMIPS64MOVBload { 3972 break 3973 } 3974 _ = x.Args[1] 3975 v.reset(OpMIPS64MOVVreg) 3976 v.AddArg(x) 3977 return true 3978 } 3979 // match: (MOVBreg x:(MOVBreg _)) 3980 // cond: 3981 // result: (MOVVreg x) 3982 for { 3983 x := v.Args[0] 3984 if x.Op != OpMIPS64MOVBreg { 3985 break 3986 } 3987 v.reset(OpMIPS64MOVVreg) 3988 v.AddArg(x) 3989 return true 3990 } 3991 // match: (MOVBreg (MOVVconst [c])) 3992 // cond: 3993 // result: (MOVVconst [int64(int8(c))]) 3994 for { 3995 v_0 := v.Args[0] 3996 if v_0.Op != OpMIPS64MOVVconst { 3997 break 3998 } 3999 c := v_0.AuxInt 4000 v.reset(OpMIPS64MOVVconst) 4001 v.AuxInt = int64(int8(c)) 4002 return true 4003 } 4004 return false 4005 } 4006 func rewriteValueMIPS64_OpMIPS64MOVBstore_0(v *Value) bool { 4007 // match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4008 // cond: is32Bit(off1+off2) 4009 // result: (MOVBstore [off1+off2] {sym} ptr val mem) 4010 for { 4011 off1 := v.AuxInt 4012 sym := v.Aux 4013 _ = v.Args[2] 4014 v_0 := v.Args[0] 4015 if v_0.Op != OpMIPS64ADDVconst { 4016 break 4017 } 4018 off2 := v_0.AuxInt 4019 ptr := v_0.Args[0] 4020 val := v.Args[1] 4021 mem := v.Args[2] 4022 if !(is32Bit(off1 + off2)) { 4023 break 4024 } 4025 v.reset(OpMIPS64MOVBstore) 4026 v.AuxInt = off1 + off2 4027 v.Aux = sym 4028 v.AddArg(ptr) 4029 v.AddArg(val) 4030 v.AddArg(mem) 4031 return true 4032 } 4033 // match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4034 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4035 // result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4036 for { 4037 off1 := v.AuxInt 4038 sym1 := v.Aux 4039 _ = v.Args[2] 4040 v_0 := v.Args[0] 4041 if v_0.Op != OpMIPS64MOVVaddr { 4042 break 4043 } 4044 off2 := v_0.AuxInt 4045 sym2 := v_0.Aux 4046 ptr := v_0.Args[0] 4047 val := v.Args[1] 4048 mem := v.Args[2] 4049 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4050 break 4051 } 4052 v.reset(OpMIPS64MOVBstore) 4053 v.AuxInt = off1 + off2 4054 v.Aux = mergeSym(sym1, sym2) 4055 v.AddArg(ptr) 4056 v.AddArg(val) 4057 v.AddArg(mem) 4058 return true 4059 } 4060 // match: (MOVBstore [off] {sym} ptr (MOVVconst [0]) mem) 4061 // cond: 4062 // result: (MOVBstorezero [off] {sym} ptr mem) 4063 for { 4064 off := v.AuxInt 4065 sym := v.Aux 4066 _ = v.Args[2] 4067 ptr := v.Args[0] 4068 v_1 := v.Args[1] 4069 if v_1.Op != OpMIPS64MOVVconst { 4070 break 4071 } 4072 if v_1.AuxInt != 0 { 4073 break 4074 } 4075 mem := v.Args[2] 4076 v.reset(OpMIPS64MOVBstorezero) 4077 v.AuxInt = off 4078 v.Aux = sym 4079 v.AddArg(ptr) 4080 v.AddArg(mem) 4081 return true 4082 } 4083 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 4084 // cond: 4085 // result: (MOVBstore [off] {sym} ptr x mem) 4086 for { 4087 off := v.AuxInt 4088 sym := v.Aux 4089 _ = v.Args[2] 4090 ptr := v.Args[0] 4091 v_1 := v.Args[1] 4092 if v_1.Op != OpMIPS64MOVBreg { 4093 break 4094 } 4095 x := v_1.Args[0] 4096 mem := v.Args[2] 4097 v.reset(OpMIPS64MOVBstore) 4098 v.AuxInt = off 4099 v.Aux = sym 4100 v.AddArg(ptr) 4101 v.AddArg(x) 4102 v.AddArg(mem) 4103 return true 4104 } 4105 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 4106 // cond: 4107 // result: (MOVBstore [off] {sym} ptr x mem) 4108 for { 4109 off := v.AuxInt 4110 sym := v.Aux 4111 _ = v.Args[2] 4112 ptr := v.Args[0] 4113 v_1 := v.Args[1] 4114 if v_1.Op != OpMIPS64MOVBUreg { 4115 break 4116 } 4117 x := v_1.Args[0] 4118 mem := v.Args[2] 4119 v.reset(OpMIPS64MOVBstore) 4120 v.AuxInt = off 4121 v.Aux = sym 4122 v.AddArg(ptr) 4123 v.AddArg(x) 4124 v.AddArg(mem) 4125 return true 4126 } 4127 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 4128 // cond: 4129 // result: (MOVBstore [off] {sym} ptr x mem) 4130 for { 4131 off := v.AuxInt 4132 sym := v.Aux 4133 _ = v.Args[2] 4134 ptr := v.Args[0] 4135 v_1 := v.Args[1] 4136 if v_1.Op != OpMIPS64MOVHreg { 4137 break 4138 } 4139 x := v_1.Args[0] 4140 mem := v.Args[2] 4141 v.reset(OpMIPS64MOVBstore) 4142 v.AuxInt = off 4143 v.Aux = sym 4144 v.AddArg(ptr) 4145 v.AddArg(x) 4146 v.AddArg(mem) 4147 return true 4148 } 4149 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 4150 // cond: 4151 // result: (MOVBstore [off] {sym} ptr x mem) 4152 for { 4153 off := v.AuxInt 4154 sym := v.Aux 4155 _ = v.Args[2] 4156 ptr := v.Args[0] 4157 v_1 := v.Args[1] 4158 if v_1.Op != OpMIPS64MOVHUreg { 4159 break 4160 } 4161 x := v_1.Args[0] 4162 mem := v.Args[2] 4163 v.reset(OpMIPS64MOVBstore) 4164 v.AuxInt = off 4165 v.Aux = sym 4166 v.AddArg(ptr) 4167 v.AddArg(x) 4168 v.AddArg(mem) 4169 return true 4170 } 4171 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 4172 // cond: 4173 // result: (MOVBstore [off] {sym} ptr x mem) 4174 for { 4175 off := v.AuxInt 4176 sym := v.Aux 4177 _ = v.Args[2] 4178 ptr := v.Args[0] 4179 v_1 := v.Args[1] 4180 if v_1.Op != OpMIPS64MOVWreg { 4181 break 4182 } 4183 x := v_1.Args[0] 4184 mem := v.Args[2] 4185 v.reset(OpMIPS64MOVBstore) 4186 v.AuxInt = off 4187 v.Aux = sym 4188 v.AddArg(ptr) 4189 v.AddArg(x) 4190 v.AddArg(mem) 4191 return true 4192 } 4193 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 4194 // cond: 4195 // result: (MOVBstore [off] {sym} ptr x mem) 4196 for { 4197 off := v.AuxInt 4198 sym := v.Aux 4199 _ = v.Args[2] 4200 ptr := v.Args[0] 4201 v_1 := v.Args[1] 4202 if v_1.Op != OpMIPS64MOVWUreg { 4203 break 4204 } 4205 x := v_1.Args[0] 4206 mem := v.Args[2] 4207 v.reset(OpMIPS64MOVBstore) 4208 v.AuxInt = off 4209 v.Aux = sym 4210 v.AddArg(ptr) 4211 v.AddArg(x) 4212 v.AddArg(mem) 4213 return true 4214 } 4215 return false 4216 } 4217 func rewriteValueMIPS64_OpMIPS64MOVBstorezero_0(v *Value) bool { 4218 // match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 4219 // cond: is32Bit(off1+off2) 4220 // result: (MOVBstorezero [off1+off2] {sym} ptr mem) 4221 for { 4222 off1 := v.AuxInt 4223 sym := v.Aux 4224 _ = v.Args[1] 4225 v_0 := v.Args[0] 4226 if v_0.Op != OpMIPS64ADDVconst { 4227 break 4228 } 4229 off2 := v_0.AuxInt 4230 ptr := v_0.Args[0] 4231 mem := v.Args[1] 4232 if !(is32Bit(off1 + off2)) { 4233 break 4234 } 4235 v.reset(OpMIPS64MOVBstorezero) 4236 v.AuxInt = off1 + off2 4237 v.Aux = sym 4238 v.AddArg(ptr) 4239 v.AddArg(mem) 4240 return true 4241 } 4242 // match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4243 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4244 // result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4245 for { 4246 off1 := v.AuxInt 4247 sym1 := v.Aux 4248 _ = v.Args[1] 4249 v_0 := v.Args[0] 4250 if v_0.Op != OpMIPS64MOVVaddr { 4251 break 4252 } 4253 off2 := v_0.AuxInt 4254 sym2 := v_0.Aux 4255 ptr := v_0.Args[0] 4256 mem := v.Args[1] 4257 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4258 break 4259 } 4260 v.reset(OpMIPS64MOVBstorezero) 4261 v.AuxInt = off1 + off2 4262 v.Aux = mergeSym(sym1, sym2) 4263 v.AddArg(ptr) 4264 v.AddArg(mem) 4265 return true 4266 } 4267 return false 4268 } 4269 func rewriteValueMIPS64_OpMIPS64MOVDload_0(v *Value) bool { 4270 // match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem) 4271 // cond: is32Bit(off1+off2) 4272 // result: (MOVDload [off1+off2] {sym} ptr mem) 4273 for { 4274 off1 := v.AuxInt 4275 sym := v.Aux 4276 _ = v.Args[1] 4277 v_0 := v.Args[0] 4278 if v_0.Op != OpMIPS64ADDVconst { 4279 break 4280 } 4281 off2 := v_0.AuxInt 4282 ptr := v_0.Args[0] 4283 mem := v.Args[1] 4284 if !(is32Bit(off1 + off2)) { 4285 break 4286 } 4287 v.reset(OpMIPS64MOVDload) 4288 v.AuxInt = off1 + off2 4289 v.Aux = sym 4290 v.AddArg(ptr) 4291 v.AddArg(mem) 4292 return true 4293 } 4294 // match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4295 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4296 // result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4297 for { 4298 off1 := v.AuxInt 4299 sym1 := v.Aux 4300 _ = v.Args[1] 4301 v_0 := v.Args[0] 4302 if v_0.Op != OpMIPS64MOVVaddr { 4303 break 4304 } 4305 off2 := v_0.AuxInt 4306 sym2 := v_0.Aux 4307 ptr := v_0.Args[0] 4308 mem := v.Args[1] 4309 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4310 break 4311 } 4312 v.reset(OpMIPS64MOVDload) 4313 v.AuxInt = off1 + off2 4314 v.Aux = mergeSym(sym1, sym2) 4315 v.AddArg(ptr) 4316 v.AddArg(mem) 4317 return true 4318 } 4319 return false 4320 } 4321 func rewriteValueMIPS64_OpMIPS64MOVDstore_0(v *Value) bool { 4322 // match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4323 // cond: is32Bit(off1+off2) 4324 // result: (MOVDstore [off1+off2] {sym} ptr val mem) 4325 for { 4326 off1 := v.AuxInt 4327 sym := v.Aux 4328 _ = v.Args[2] 4329 v_0 := v.Args[0] 4330 if v_0.Op != OpMIPS64ADDVconst { 4331 break 4332 } 4333 off2 := v_0.AuxInt 4334 ptr := v_0.Args[0] 4335 val := v.Args[1] 4336 mem := v.Args[2] 4337 if !(is32Bit(off1 + off2)) { 4338 break 4339 } 4340 v.reset(OpMIPS64MOVDstore) 4341 v.AuxInt = off1 + off2 4342 v.Aux = sym 4343 v.AddArg(ptr) 4344 v.AddArg(val) 4345 v.AddArg(mem) 4346 return true 4347 } 4348 // match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4349 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4350 // result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4351 for { 4352 off1 := v.AuxInt 4353 sym1 := v.Aux 4354 _ = v.Args[2] 4355 v_0 := v.Args[0] 4356 if v_0.Op != OpMIPS64MOVVaddr { 4357 break 4358 } 4359 off2 := v_0.AuxInt 4360 sym2 := v_0.Aux 4361 ptr := v_0.Args[0] 4362 val := v.Args[1] 4363 mem := v.Args[2] 4364 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4365 break 4366 } 4367 v.reset(OpMIPS64MOVDstore) 4368 v.AuxInt = off1 + off2 4369 v.Aux = mergeSym(sym1, sym2) 4370 v.AddArg(ptr) 4371 v.AddArg(val) 4372 v.AddArg(mem) 4373 return true 4374 } 4375 return false 4376 } 4377 func rewriteValueMIPS64_OpMIPS64MOVFload_0(v *Value) bool { 4378 // match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem) 4379 // cond: is32Bit(off1+off2) 4380 // result: (MOVFload [off1+off2] {sym} ptr mem) 4381 for { 4382 off1 := v.AuxInt 4383 sym := v.Aux 4384 _ = v.Args[1] 4385 v_0 := v.Args[0] 4386 if v_0.Op != OpMIPS64ADDVconst { 4387 break 4388 } 4389 off2 := v_0.AuxInt 4390 ptr := v_0.Args[0] 4391 mem := v.Args[1] 4392 if !(is32Bit(off1 + off2)) { 4393 break 4394 } 4395 v.reset(OpMIPS64MOVFload) 4396 v.AuxInt = off1 + off2 4397 v.Aux = sym 4398 v.AddArg(ptr) 4399 v.AddArg(mem) 4400 return true 4401 } 4402 // match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4403 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4404 // result: (MOVFload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4405 for { 4406 off1 := v.AuxInt 4407 sym1 := v.Aux 4408 _ = v.Args[1] 4409 v_0 := v.Args[0] 4410 if v_0.Op != OpMIPS64MOVVaddr { 4411 break 4412 } 4413 off2 := v_0.AuxInt 4414 sym2 := v_0.Aux 4415 ptr := v_0.Args[0] 4416 mem := v.Args[1] 4417 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4418 break 4419 } 4420 v.reset(OpMIPS64MOVFload) 4421 v.AuxInt = off1 + off2 4422 v.Aux = mergeSym(sym1, sym2) 4423 v.AddArg(ptr) 4424 v.AddArg(mem) 4425 return true 4426 } 4427 return false 4428 } 4429 func rewriteValueMIPS64_OpMIPS64MOVFstore_0(v *Value) bool { 4430 // match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4431 // cond: is32Bit(off1+off2) 4432 // result: (MOVFstore [off1+off2] {sym} ptr val mem) 4433 for { 4434 off1 := v.AuxInt 4435 sym := v.Aux 4436 _ = v.Args[2] 4437 v_0 := v.Args[0] 4438 if v_0.Op != OpMIPS64ADDVconst { 4439 break 4440 } 4441 off2 := v_0.AuxInt 4442 ptr := v_0.Args[0] 4443 val := v.Args[1] 4444 mem := v.Args[2] 4445 if !(is32Bit(off1 + off2)) { 4446 break 4447 } 4448 v.reset(OpMIPS64MOVFstore) 4449 v.AuxInt = off1 + off2 4450 v.Aux = sym 4451 v.AddArg(ptr) 4452 v.AddArg(val) 4453 v.AddArg(mem) 4454 return true 4455 } 4456 // match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4457 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4458 // result: (MOVFstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4459 for { 4460 off1 := v.AuxInt 4461 sym1 := v.Aux 4462 _ = v.Args[2] 4463 v_0 := v.Args[0] 4464 if v_0.Op != OpMIPS64MOVVaddr { 4465 break 4466 } 4467 off2 := v_0.AuxInt 4468 sym2 := v_0.Aux 4469 ptr := v_0.Args[0] 4470 val := v.Args[1] 4471 mem := v.Args[2] 4472 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4473 break 4474 } 4475 v.reset(OpMIPS64MOVFstore) 4476 v.AuxInt = off1 + off2 4477 v.Aux = mergeSym(sym1, sym2) 4478 v.AddArg(ptr) 4479 v.AddArg(val) 4480 v.AddArg(mem) 4481 return true 4482 } 4483 return false 4484 } 4485 func rewriteValueMIPS64_OpMIPS64MOVHUload_0(v *Value) bool { 4486 // match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem) 4487 // cond: is32Bit(off1+off2) 4488 // result: (MOVHUload [off1+off2] {sym} ptr mem) 4489 for { 4490 off1 := v.AuxInt 4491 sym := v.Aux 4492 _ = v.Args[1] 4493 v_0 := v.Args[0] 4494 if v_0.Op != OpMIPS64ADDVconst { 4495 break 4496 } 4497 off2 := v_0.AuxInt 4498 ptr := v_0.Args[0] 4499 mem := v.Args[1] 4500 if !(is32Bit(off1 + off2)) { 4501 break 4502 } 4503 v.reset(OpMIPS64MOVHUload) 4504 v.AuxInt = off1 + off2 4505 v.Aux = sym 4506 v.AddArg(ptr) 4507 v.AddArg(mem) 4508 return true 4509 } 4510 // match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4511 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4512 // result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4513 for { 4514 off1 := v.AuxInt 4515 sym1 := v.Aux 4516 _ = v.Args[1] 4517 v_0 := v.Args[0] 4518 if v_0.Op != OpMIPS64MOVVaddr { 4519 break 4520 } 4521 off2 := v_0.AuxInt 4522 sym2 := v_0.Aux 4523 ptr := v_0.Args[0] 4524 mem := v.Args[1] 4525 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4526 break 4527 } 4528 v.reset(OpMIPS64MOVHUload) 4529 v.AuxInt = off1 + off2 4530 v.Aux = mergeSym(sym1, sym2) 4531 v.AddArg(ptr) 4532 v.AddArg(mem) 4533 return true 4534 } 4535 return false 4536 } 4537 func rewriteValueMIPS64_OpMIPS64MOVHUreg_0(v *Value) bool { 4538 // match: (MOVHUreg x:(MOVBUload _ _)) 4539 // cond: 4540 // result: (MOVVreg x) 4541 for { 4542 x := v.Args[0] 4543 if x.Op != OpMIPS64MOVBUload { 4544 break 4545 } 4546 _ = x.Args[1] 4547 v.reset(OpMIPS64MOVVreg) 4548 v.AddArg(x) 4549 return true 4550 } 4551 // match: (MOVHUreg x:(MOVHUload _ _)) 4552 // cond: 4553 // result: (MOVVreg x) 4554 for { 4555 x := v.Args[0] 4556 if x.Op != OpMIPS64MOVHUload { 4557 break 4558 } 4559 _ = x.Args[1] 4560 v.reset(OpMIPS64MOVVreg) 4561 v.AddArg(x) 4562 return true 4563 } 4564 // match: (MOVHUreg x:(MOVBUreg _)) 4565 // cond: 4566 // result: (MOVVreg x) 4567 for { 4568 x := v.Args[0] 4569 if x.Op != OpMIPS64MOVBUreg { 4570 break 4571 } 4572 v.reset(OpMIPS64MOVVreg) 4573 v.AddArg(x) 4574 return true 4575 } 4576 // match: (MOVHUreg x:(MOVHUreg _)) 4577 // cond: 4578 // result: (MOVVreg x) 4579 for { 4580 x := v.Args[0] 4581 if x.Op != OpMIPS64MOVHUreg { 4582 break 4583 } 4584 v.reset(OpMIPS64MOVVreg) 4585 v.AddArg(x) 4586 return true 4587 } 4588 // match: (MOVHUreg (MOVVconst [c])) 4589 // cond: 4590 // result: (MOVVconst [int64(uint16(c))]) 4591 for { 4592 v_0 := v.Args[0] 4593 if v_0.Op != OpMIPS64MOVVconst { 4594 break 4595 } 4596 c := v_0.AuxInt 4597 v.reset(OpMIPS64MOVVconst) 4598 v.AuxInt = int64(uint16(c)) 4599 return true 4600 } 4601 return false 4602 } 4603 func rewriteValueMIPS64_OpMIPS64MOVHload_0(v *Value) bool { 4604 // match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem) 4605 // cond: is32Bit(off1+off2) 4606 // result: (MOVHload [off1+off2] {sym} ptr mem) 4607 for { 4608 off1 := v.AuxInt 4609 sym := v.Aux 4610 _ = v.Args[1] 4611 v_0 := v.Args[0] 4612 if v_0.Op != OpMIPS64ADDVconst { 4613 break 4614 } 4615 off2 := v_0.AuxInt 4616 ptr := v_0.Args[0] 4617 mem := v.Args[1] 4618 if !(is32Bit(off1 + off2)) { 4619 break 4620 } 4621 v.reset(OpMIPS64MOVHload) 4622 v.AuxInt = off1 + off2 4623 v.Aux = sym 4624 v.AddArg(ptr) 4625 v.AddArg(mem) 4626 return true 4627 } 4628 // match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4629 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4630 // result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4631 for { 4632 off1 := v.AuxInt 4633 sym1 := v.Aux 4634 _ = v.Args[1] 4635 v_0 := v.Args[0] 4636 if v_0.Op != OpMIPS64MOVVaddr { 4637 break 4638 } 4639 off2 := v_0.AuxInt 4640 sym2 := v_0.Aux 4641 ptr := v_0.Args[0] 4642 mem := v.Args[1] 4643 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4644 break 4645 } 4646 v.reset(OpMIPS64MOVHload) 4647 v.AuxInt = off1 + off2 4648 v.Aux = mergeSym(sym1, sym2) 4649 v.AddArg(ptr) 4650 v.AddArg(mem) 4651 return true 4652 } 4653 return false 4654 } 4655 func rewriteValueMIPS64_OpMIPS64MOVHreg_0(v *Value) bool { 4656 // match: (MOVHreg x:(MOVBload _ _)) 4657 // cond: 4658 // result: (MOVVreg x) 4659 for { 4660 x := v.Args[0] 4661 if x.Op != OpMIPS64MOVBload { 4662 break 4663 } 4664 _ = x.Args[1] 4665 v.reset(OpMIPS64MOVVreg) 4666 v.AddArg(x) 4667 return true 4668 } 4669 // match: (MOVHreg x:(MOVBUload _ _)) 4670 // cond: 4671 // result: (MOVVreg x) 4672 for { 4673 x := v.Args[0] 4674 if x.Op != OpMIPS64MOVBUload { 4675 break 4676 } 4677 _ = x.Args[1] 4678 v.reset(OpMIPS64MOVVreg) 4679 v.AddArg(x) 4680 return true 4681 } 4682 // match: (MOVHreg x:(MOVHload _ _)) 4683 // cond: 4684 // result: (MOVVreg x) 4685 for { 4686 x := v.Args[0] 4687 if x.Op != OpMIPS64MOVHload { 4688 break 4689 } 4690 _ = x.Args[1] 4691 v.reset(OpMIPS64MOVVreg) 4692 v.AddArg(x) 4693 return true 4694 } 4695 // match: (MOVHreg x:(MOVBreg _)) 4696 // cond: 4697 // result: (MOVVreg x) 4698 for { 4699 x := v.Args[0] 4700 if x.Op != OpMIPS64MOVBreg { 4701 break 4702 } 4703 v.reset(OpMIPS64MOVVreg) 4704 v.AddArg(x) 4705 return true 4706 } 4707 // match: (MOVHreg x:(MOVBUreg _)) 4708 // cond: 4709 // result: (MOVVreg x) 4710 for { 4711 x := v.Args[0] 4712 if x.Op != OpMIPS64MOVBUreg { 4713 break 4714 } 4715 v.reset(OpMIPS64MOVVreg) 4716 v.AddArg(x) 4717 return true 4718 } 4719 // match: (MOVHreg x:(MOVHreg _)) 4720 // cond: 4721 // result: (MOVVreg x) 4722 for { 4723 x := v.Args[0] 4724 if x.Op != OpMIPS64MOVHreg { 4725 break 4726 } 4727 v.reset(OpMIPS64MOVVreg) 4728 v.AddArg(x) 4729 return true 4730 } 4731 // match: (MOVHreg (MOVVconst [c])) 4732 // cond: 4733 // result: (MOVVconst [int64(int16(c))]) 4734 for { 4735 v_0 := v.Args[0] 4736 if v_0.Op != OpMIPS64MOVVconst { 4737 break 4738 } 4739 c := v_0.AuxInt 4740 v.reset(OpMIPS64MOVVconst) 4741 v.AuxInt = int64(int16(c)) 4742 return true 4743 } 4744 return false 4745 } 4746 func rewriteValueMIPS64_OpMIPS64MOVHstore_0(v *Value) bool { 4747 // match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 4748 // cond: is32Bit(off1+off2) 4749 // result: (MOVHstore [off1+off2] {sym} ptr val mem) 4750 for { 4751 off1 := v.AuxInt 4752 sym := v.Aux 4753 _ = v.Args[2] 4754 v_0 := v.Args[0] 4755 if v_0.Op != OpMIPS64ADDVconst { 4756 break 4757 } 4758 off2 := v_0.AuxInt 4759 ptr := v_0.Args[0] 4760 val := v.Args[1] 4761 mem := v.Args[2] 4762 if !(is32Bit(off1 + off2)) { 4763 break 4764 } 4765 v.reset(OpMIPS64MOVHstore) 4766 v.AuxInt = off1 + off2 4767 v.Aux = sym 4768 v.AddArg(ptr) 4769 v.AddArg(val) 4770 v.AddArg(mem) 4771 return true 4772 } 4773 // match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 4774 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4775 // result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 4776 for { 4777 off1 := v.AuxInt 4778 sym1 := v.Aux 4779 _ = v.Args[2] 4780 v_0 := v.Args[0] 4781 if v_0.Op != OpMIPS64MOVVaddr { 4782 break 4783 } 4784 off2 := v_0.AuxInt 4785 sym2 := v_0.Aux 4786 ptr := v_0.Args[0] 4787 val := v.Args[1] 4788 mem := v.Args[2] 4789 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4790 break 4791 } 4792 v.reset(OpMIPS64MOVHstore) 4793 v.AuxInt = off1 + off2 4794 v.Aux = mergeSym(sym1, sym2) 4795 v.AddArg(ptr) 4796 v.AddArg(val) 4797 v.AddArg(mem) 4798 return true 4799 } 4800 // match: (MOVHstore [off] {sym} ptr (MOVVconst [0]) mem) 4801 // cond: 4802 // result: (MOVHstorezero [off] {sym} ptr mem) 4803 for { 4804 off := v.AuxInt 4805 sym := v.Aux 4806 _ = v.Args[2] 4807 ptr := v.Args[0] 4808 v_1 := v.Args[1] 4809 if v_1.Op != OpMIPS64MOVVconst { 4810 break 4811 } 4812 if v_1.AuxInt != 0 { 4813 break 4814 } 4815 mem := v.Args[2] 4816 v.reset(OpMIPS64MOVHstorezero) 4817 v.AuxInt = off 4818 v.Aux = sym 4819 v.AddArg(ptr) 4820 v.AddArg(mem) 4821 return true 4822 } 4823 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 4824 // cond: 4825 // result: (MOVHstore [off] {sym} ptr x mem) 4826 for { 4827 off := v.AuxInt 4828 sym := v.Aux 4829 _ = v.Args[2] 4830 ptr := v.Args[0] 4831 v_1 := v.Args[1] 4832 if v_1.Op != OpMIPS64MOVHreg { 4833 break 4834 } 4835 x := v_1.Args[0] 4836 mem := v.Args[2] 4837 v.reset(OpMIPS64MOVHstore) 4838 v.AuxInt = off 4839 v.Aux = sym 4840 v.AddArg(ptr) 4841 v.AddArg(x) 4842 v.AddArg(mem) 4843 return true 4844 } 4845 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 4846 // cond: 4847 // result: (MOVHstore [off] {sym} ptr x mem) 4848 for { 4849 off := v.AuxInt 4850 sym := v.Aux 4851 _ = v.Args[2] 4852 ptr := v.Args[0] 4853 v_1 := v.Args[1] 4854 if v_1.Op != OpMIPS64MOVHUreg { 4855 break 4856 } 4857 x := v_1.Args[0] 4858 mem := v.Args[2] 4859 v.reset(OpMIPS64MOVHstore) 4860 v.AuxInt = off 4861 v.Aux = sym 4862 v.AddArg(ptr) 4863 v.AddArg(x) 4864 v.AddArg(mem) 4865 return true 4866 } 4867 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 4868 // cond: 4869 // result: (MOVHstore [off] {sym} ptr x mem) 4870 for { 4871 off := v.AuxInt 4872 sym := v.Aux 4873 _ = v.Args[2] 4874 ptr := v.Args[0] 4875 v_1 := v.Args[1] 4876 if v_1.Op != OpMIPS64MOVWreg { 4877 break 4878 } 4879 x := v_1.Args[0] 4880 mem := v.Args[2] 4881 v.reset(OpMIPS64MOVHstore) 4882 v.AuxInt = off 4883 v.Aux = sym 4884 v.AddArg(ptr) 4885 v.AddArg(x) 4886 v.AddArg(mem) 4887 return true 4888 } 4889 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 4890 // cond: 4891 // result: (MOVHstore [off] {sym} ptr x mem) 4892 for { 4893 off := v.AuxInt 4894 sym := v.Aux 4895 _ = v.Args[2] 4896 ptr := v.Args[0] 4897 v_1 := v.Args[1] 4898 if v_1.Op != OpMIPS64MOVWUreg { 4899 break 4900 } 4901 x := v_1.Args[0] 4902 mem := v.Args[2] 4903 v.reset(OpMIPS64MOVHstore) 4904 v.AuxInt = off 4905 v.Aux = sym 4906 v.AddArg(ptr) 4907 v.AddArg(x) 4908 v.AddArg(mem) 4909 return true 4910 } 4911 return false 4912 } 4913 func rewriteValueMIPS64_OpMIPS64MOVHstorezero_0(v *Value) bool { 4914 // match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 4915 // cond: is32Bit(off1+off2) 4916 // result: (MOVHstorezero [off1+off2] {sym} ptr mem) 4917 for { 4918 off1 := v.AuxInt 4919 sym := v.Aux 4920 _ = v.Args[1] 4921 v_0 := v.Args[0] 4922 if v_0.Op != OpMIPS64ADDVconst { 4923 break 4924 } 4925 off2 := v_0.AuxInt 4926 ptr := v_0.Args[0] 4927 mem := v.Args[1] 4928 if !(is32Bit(off1 + off2)) { 4929 break 4930 } 4931 v.reset(OpMIPS64MOVHstorezero) 4932 v.AuxInt = off1 + off2 4933 v.Aux = sym 4934 v.AddArg(ptr) 4935 v.AddArg(mem) 4936 return true 4937 } 4938 // match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4939 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4940 // result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4941 for { 4942 off1 := v.AuxInt 4943 sym1 := v.Aux 4944 _ = v.Args[1] 4945 v_0 := v.Args[0] 4946 if v_0.Op != OpMIPS64MOVVaddr { 4947 break 4948 } 4949 off2 := v_0.AuxInt 4950 sym2 := v_0.Aux 4951 ptr := v_0.Args[0] 4952 mem := v.Args[1] 4953 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 4954 break 4955 } 4956 v.reset(OpMIPS64MOVHstorezero) 4957 v.AuxInt = off1 + off2 4958 v.Aux = mergeSym(sym1, sym2) 4959 v.AddArg(ptr) 4960 v.AddArg(mem) 4961 return true 4962 } 4963 return false 4964 } 4965 func rewriteValueMIPS64_OpMIPS64MOVVload_0(v *Value) bool { 4966 // match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem) 4967 // cond: is32Bit(off1+off2) 4968 // result: (MOVVload [off1+off2] {sym} ptr mem) 4969 for { 4970 off1 := v.AuxInt 4971 sym := v.Aux 4972 _ = v.Args[1] 4973 v_0 := v.Args[0] 4974 if v_0.Op != OpMIPS64ADDVconst { 4975 break 4976 } 4977 off2 := v_0.AuxInt 4978 ptr := v_0.Args[0] 4979 mem := v.Args[1] 4980 if !(is32Bit(off1 + off2)) { 4981 break 4982 } 4983 v.reset(OpMIPS64MOVVload) 4984 v.AuxInt = off1 + off2 4985 v.Aux = sym 4986 v.AddArg(ptr) 4987 v.AddArg(mem) 4988 return true 4989 } 4990 // match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 4991 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 4992 // result: (MOVVload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 4993 for { 4994 off1 := v.AuxInt 4995 sym1 := v.Aux 4996 _ = v.Args[1] 4997 v_0 := v.Args[0] 4998 if v_0.Op != OpMIPS64MOVVaddr { 4999 break 5000 } 5001 off2 := v_0.AuxInt 5002 sym2 := v_0.Aux 5003 ptr := v_0.Args[0] 5004 mem := v.Args[1] 5005 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5006 break 5007 } 5008 v.reset(OpMIPS64MOVVload) 5009 v.AuxInt = off1 + off2 5010 v.Aux = mergeSym(sym1, sym2) 5011 v.AddArg(ptr) 5012 v.AddArg(mem) 5013 return true 5014 } 5015 return false 5016 } 5017 func rewriteValueMIPS64_OpMIPS64MOVVreg_0(v *Value) bool { 5018 // match: (MOVVreg x) 5019 // cond: x.Uses == 1 5020 // result: (MOVVnop x) 5021 for { 5022 x := v.Args[0] 5023 if !(x.Uses == 1) { 5024 break 5025 } 5026 v.reset(OpMIPS64MOVVnop) 5027 v.AddArg(x) 5028 return true 5029 } 5030 // match: (MOVVreg (MOVVconst [c])) 5031 // cond: 5032 // result: (MOVVconst [c]) 5033 for { 5034 v_0 := v.Args[0] 5035 if v_0.Op != OpMIPS64MOVVconst { 5036 break 5037 } 5038 c := v_0.AuxInt 5039 v.reset(OpMIPS64MOVVconst) 5040 v.AuxInt = c 5041 return true 5042 } 5043 return false 5044 } 5045 func rewriteValueMIPS64_OpMIPS64MOVVstore_0(v *Value) bool { 5046 // match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 5047 // cond: is32Bit(off1+off2) 5048 // result: (MOVVstore [off1+off2] {sym} ptr val mem) 5049 for { 5050 off1 := v.AuxInt 5051 sym := v.Aux 5052 _ = v.Args[2] 5053 v_0 := v.Args[0] 5054 if v_0.Op != OpMIPS64ADDVconst { 5055 break 5056 } 5057 off2 := v_0.AuxInt 5058 ptr := v_0.Args[0] 5059 val := v.Args[1] 5060 mem := v.Args[2] 5061 if !(is32Bit(off1 + off2)) { 5062 break 5063 } 5064 v.reset(OpMIPS64MOVVstore) 5065 v.AuxInt = off1 + off2 5066 v.Aux = sym 5067 v.AddArg(ptr) 5068 v.AddArg(val) 5069 v.AddArg(mem) 5070 return true 5071 } 5072 // match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 5073 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5074 // result: (MOVVstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5075 for { 5076 off1 := v.AuxInt 5077 sym1 := v.Aux 5078 _ = v.Args[2] 5079 v_0 := v.Args[0] 5080 if v_0.Op != OpMIPS64MOVVaddr { 5081 break 5082 } 5083 off2 := v_0.AuxInt 5084 sym2 := v_0.Aux 5085 ptr := v_0.Args[0] 5086 val := v.Args[1] 5087 mem := v.Args[2] 5088 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5089 break 5090 } 5091 v.reset(OpMIPS64MOVVstore) 5092 v.AuxInt = off1 + off2 5093 v.Aux = mergeSym(sym1, sym2) 5094 v.AddArg(ptr) 5095 v.AddArg(val) 5096 v.AddArg(mem) 5097 return true 5098 } 5099 // match: (MOVVstore [off] {sym} ptr (MOVVconst [0]) mem) 5100 // cond: 5101 // result: (MOVVstorezero [off] {sym} ptr mem) 5102 for { 5103 off := v.AuxInt 5104 sym := v.Aux 5105 _ = v.Args[2] 5106 ptr := v.Args[0] 5107 v_1 := v.Args[1] 5108 if v_1.Op != OpMIPS64MOVVconst { 5109 break 5110 } 5111 if v_1.AuxInt != 0 { 5112 break 5113 } 5114 mem := v.Args[2] 5115 v.reset(OpMIPS64MOVVstorezero) 5116 v.AuxInt = off 5117 v.Aux = sym 5118 v.AddArg(ptr) 5119 v.AddArg(mem) 5120 return true 5121 } 5122 return false 5123 } 5124 func rewriteValueMIPS64_OpMIPS64MOVVstorezero_0(v *Value) bool { 5125 // match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 5126 // cond: is32Bit(off1+off2) 5127 // result: (MOVVstorezero [off1+off2] {sym} ptr mem) 5128 for { 5129 off1 := v.AuxInt 5130 sym := v.Aux 5131 _ = v.Args[1] 5132 v_0 := v.Args[0] 5133 if v_0.Op != OpMIPS64ADDVconst { 5134 break 5135 } 5136 off2 := v_0.AuxInt 5137 ptr := v_0.Args[0] 5138 mem := v.Args[1] 5139 if !(is32Bit(off1 + off2)) { 5140 break 5141 } 5142 v.reset(OpMIPS64MOVVstorezero) 5143 v.AuxInt = off1 + off2 5144 v.Aux = sym 5145 v.AddArg(ptr) 5146 v.AddArg(mem) 5147 return true 5148 } 5149 // match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5150 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5151 // result: (MOVVstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5152 for { 5153 off1 := v.AuxInt 5154 sym1 := v.Aux 5155 _ = v.Args[1] 5156 v_0 := v.Args[0] 5157 if v_0.Op != OpMIPS64MOVVaddr { 5158 break 5159 } 5160 off2 := v_0.AuxInt 5161 sym2 := v_0.Aux 5162 ptr := v_0.Args[0] 5163 mem := v.Args[1] 5164 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5165 break 5166 } 5167 v.reset(OpMIPS64MOVVstorezero) 5168 v.AuxInt = off1 + off2 5169 v.Aux = mergeSym(sym1, sym2) 5170 v.AddArg(ptr) 5171 v.AddArg(mem) 5172 return true 5173 } 5174 return false 5175 } 5176 func rewriteValueMIPS64_OpMIPS64MOVWUload_0(v *Value) bool { 5177 // match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem) 5178 // cond: is32Bit(off1+off2) 5179 // result: (MOVWUload [off1+off2] {sym} ptr mem) 5180 for { 5181 off1 := v.AuxInt 5182 sym := v.Aux 5183 _ = v.Args[1] 5184 v_0 := v.Args[0] 5185 if v_0.Op != OpMIPS64ADDVconst { 5186 break 5187 } 5188 off2 := v_0.AuxInt 5189 ptr := v_0.Args[0] 5190 mem := v.Args[1] 5191 if !(is32Bit(off1 + off2)) { 5192 break 5193 } 5194 v.reset(OpMIPS64MOVWUload) 5195 v.AuxInt = off1 + off2 5196 v.Aux = sym 5197 v.AddArg(ptr) 5198 v.AddArg(mem) 5199 return true 5200 } 5201 // match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5202 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5203 // result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5204 for { 5205 off1 := v.AuxInt 5206 sym1 := v.Aux 5207 _ = v.Args[1] 5208 v_0 := v.Args[0] 5209 if v_0.Op != OpMIPS64MOVVaddr { 5210 break 5211 } 5212 off2 := v_0.AuxInt 5213 sym2 := v_0.Aux 5214 ptr := v_0.Args[0] 5215 mem := v.Args[1] 5216 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5217 break 5218 } 5219 v.reset(OpMIPS64MOVWUload) 5220 v.AuxInt = off1 + off2 5221 v.Aux = mergeSym(sym1, sym2) 5222 v.AddArg(ptr) 5223 v.AddArg(mem) 5224 return true 5225 } 5226 return false 5227 } 5228 func rewriteValueMIPS64_OpMIPS64MOVWUreg_0(v *Value) bool { 5229 // match: (MOVWUreg x:(MOVBUload _ _)) 5230 // cond: 5231 // result: (MOVVreg x) 5232 for { 5233 x := v.Args[0] 5234 if x.Op != OpMIPS64MOVBUload { 5235 break 5236 } 5237 _ = x.Args[1] 5238 v.reset(OpMIPS64MOVVreg) 5239 v.AddArg(x) 5240 return true 5241 } 5242 // match: (MOVWUreg x:(MOVHUload _ _)) 5243 // cond: 5244 // result: (MOVVreg x) 5245 for { 5246 x := v.Args[0] 5247 if x.Op != OpMIPS64MOVHUload { 5248 break 5249 } 5250 _ = x.Args[1] 5251 v.reset(OpMIPS64MOVVreg) 5252 v.AddArg(x) 5253 return true 5254 } 5255 // match: (MOVWUreg x:(MOVWUload _ _)) 5256 // cond: 5257 // result: (MOVVreg x) 5258 for { 5259 x := v.Args[0] 5260 if x.Op != OpMIPS64MOVWUload { 5261 break 5262 } 5263 _ = x.Args[1] 5264 v.reset(OpMIPS64MOVVreg) 5265 v.AddArg(x) 5266 return true 5267 } 5268 // match: (MOVWUreg x:(MOVBUreg _)) 5269 // cond: 5270 // result: (MOVVreg x) 5271 for { 5272 x := v.Args[0] 5273 if x.Op != OpMIPS64MOVBUreg { 5274 break 5275 } 5276 v.reset(OpMIPS64MOVVreg) 5277 v.AddArg(x) 5278 return true 5279 } 5280 // match: (MOVWUreg x:(MOVHUreg _)) 5281 // cond: 5282 // result: (MOVVreg x) 5283 for { 5284 x := v.Args[0] 5285 if x.Op != OpMIPS64MOVHUreg { 5286 break 5287 } 5288 v.reset(OpMIPS64MOVVreg) 5289 v.AddArg(x) 5290 return true 5291 } 5292 // match: (MOVWUreg x:(MOVWUreg _)) 5293 // cond: 5294 // result: (MOVVreg x) 5295 for { 5296 x := v.Args[0] 5297 if x.Op != OpMIPS64MOVWUreg { 5298 break 5299 } 5300 v.reset(OpMIPS64MOVVreg) 5301 v.AddArg(x) 5302 return true 5303 } 5304 // match: (MOVWUreg (MOVVconst [c])) 5305 // cond: 5306 // result: (MOVVconst [int64(uint32(c))]) 5307 for { 5308 v_0 := v.Args[0] 5309 if v_0.Op != OpMIPS64MOVVconst { 5310 break 5311 } 5312 c := v_0.AuxInt 5313 v.reset(OpMIPS64MOVVconst) 5314 v.AuxInt = int64(uint32(c)) 5315 return true 5316 } 5317 return false 5318 } 5319 func rewriteValueMIPS64_OpMIPS64MOVWload_0(v *Value) bool { 5320 // match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem) 5321 // cond: is32Bit(off1+off2) 5322 // result: (MOVWload [off1+off2] {sym} ptr mem) 5323 for { 5324 off1 := v.AuxInt 5325 sym := v.Aux 5326 _ = v.Args[1] 5327 v_0 := v.Args[0] 5328 if v_0.Op != OpMIPS64ADDVconst { 5329 break 5330 } 5331 off2 := v_0.AuxInt 5332 ptr := v_0.Args[0] 5333 mem := v.Args[1] 5334 if !(is32Bit(off1 + off2)) { 5335 break 5336 } 5337 v.reset(OpMIPS64MOVWload) 5338 v.AuxInt = off1 + off2 5339 v.Aux = sym 5340 v.AddArg(ptr) 5341 v.AddArg(mem) 5342 return true 5343 } 5344 // match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5345 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5346 // result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5347 for { 5348 off1 := v.AuxInt 5349 sym1 := v.Aux 5350 _ = v.Args[1] 5351 v_0 := v.Args[0] 5352 if v_0.Op != OpMIPS64MOVVaddr { 5353 break 5354 } 5355 off2 := v_0.AuxInt 5356 sym2 := v_0.Aux 5357 ptr := v_0.Args[0] 5358 mem := v.Args[1] 5359 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5360 break 5361 } 5362 v.reset(OpMIPS64MOVWload) 5363 v.AuxInt = off1 + off2 5364 v.Aux = mergeSym(sym1, sym2) 5365 v.AddArg(ptr) 5366 v.AddArg(mem) 5367 return true 5368 } 5369 return false 5370 } 5371 func rewriteValueMIPS64_OpMIPS64MOVWreg_0(v *Value) bool { 5372 // match: (MOVWreg x:(MOVBload _ _)) 5373 // cond: 5374 // result: (MOVVreg x) 5375 for { 5376 x := v.Args[0] 5377 if x.Op != OpMIPS64MOVBload { 5378 break 5379 } 5380 _ = x.Args[1] 5381 v.reset(OpMIPS64MOVVreg) 5382 v.AddArg(x) 5383 return true 5384 } 5385 // match: (MOVWreg x:(MOVBUload _ _)) 5386 // cond: 5387 // result: (MOVVreg x) 5388 for { 5389 x := v.Args[0] 5390 if x.Op != OpMIPS64MOVBUload { 5391 break 5392 } 5393 _ = x.Args[1] 5394 v.reset(OpMIPS64MOVVreg) 5395 v.AddArg(x) 5396 return true 5397 } 5398 // match: (MOVWreg x:(MOVHload _ _)) 5399 // cond: 5400 // result: (MOVVreg x) 5401 for { 5402 x := v.Args[0] 5403 if x.Op != OpMIPS64MOVHload { 5404 break 5405 } 5406 _ = x.Args[1] 5407 v.reset(OpMIPS64MOVVreg) 5408 v.AddArg(x) 5409 return true 5410 } 5411 // match: (MOVWreg x:(MOVHUload _ _)) 5412 // cond: 5413 // result: (MOVVreg x) 5414 for { 5415 x := v.Args[0] 5416 if x.Op != OpMIPS64MOVHUload { 5417 break 5418 } 5419 _ = x.Args[1] 5420 v.reset(OpMIPS64MOVVreg) 5421 v.AddArg(x) 5422 return true 5423 } 5424 // match: (MOVWreg x:(MOVWload _ _)) 5425 // cond: 5426 // result: (MOVVreg x) 5427 for { 5428 x := v.Args[0] 5429 if x.Op != OpMIPS64MOVWload { 5430 break 5431 } 5432 _ = x.Args[1] 5433 v.reset(OpMIPS64MOVVreg) 5434 v.AddArg(x) 5435 return true 5436 } 5437 // match: (MOVWreg x:(MOVBreg _)) 5438 // cond: 5439 // result: (MOVVreg x) 5440 for { 5441 x := v.Args[0] 5442 if x.Op != OpMIPS64MOVBreg { 5443 break 5444 } 5445 v.reset(OpMIPS64MOVVreg) 5446 v.AddArg(x) 5447 return true 5448 } 5449 // match: (MOVWreg x:(MOVBUreg _)) 5450 // cond: 5451 // result: (MOVVreg x) 5452 for { 5453 x := v.Args[0] 5454 if x.Op != OpMIPS64MOVBUreg { 5455 break 5456 } 5457 v.reset(OpMIPS64MOVVreg) 5458 v.AddArg(x) 5459 return true 5460 } 5461 // match: (MOVWreg x:(MOVHreg _)) 5462 // cond: 5463 // result: (MOVVreg x) 5464 for { 5465 x := v.Args[0] 5466 if x.Op != OpMIPS64MOVHreg { 5467 break 5468 } 5469 v.reset(OpMIPS64MOVVreg) 5470 v.AddArg(x) 5471 return true 5472 } 5473 // match: (MOVWreg x:(MOVHreg _)) 5474 // cond: 5475 // result: (MOVVreg x) 5476 for { 5477 x := v.Args[0] 5478 if x.Op != OpMIPS64MOVHreg { 5479 break 5480 } 5481 v.reset(OpMIPS64MOVVreg) 5482 v.AddArg(x) 5483 return true 5484 } 5485 // match: (MOVWreg x:(MOVWreg _)) 5486 // cond: 5487 // result: (MOVVreg x) 5488 for { 5489 x := v.Args[0] 5490 if x.Op != OpMIPS64MOVWreg { 5491 break 5492 } 5493 v.reset(OpMIPS64MOVVreg) 5494 v.AddArg(x) 5495 return true 5496 } 5497 return false 5498 } 5499 func rewriteValueMIPS64_OpMIPS64MOVWreg_10(v *Value) bool { 5500 // match: (MOVWreg (MOVVconst [c])) 5501 // cond: 5502 // result: (MOVVconst [int64(int32(c))]) 5503 for { 5504 v_0 := v.Args[0] 5505 if v_0.Op != OpMIPS64MOVVconst { 5506 break 5507 } 5508 c := v_0.AuxInt 5509 v.reset(OpMIPS64MOVVconst) 5510 v.AuxInt = int64(int32(c)) 5511 return true 5512 } 5513 return false 5514 } 5515 func rewriteValueMIPS64_OpMIPS64MOVWstore_0(v *Value) bool { 5516 // match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 5517 // cond: is32Bit(off1+off2) 5518 // result: (MOVWstore [off1+off2] {sym} ptr val mem) 5519 for { 5520 off1 := v.AuxInt 5521 sym := v.Aux 5522 _ = v.Args[2] 5523 v_0 := v.Args[0] 5524 if v_0.Op != OpMIPS64ADDVconst { 5525 break 5526 } 5527 off2 := v_0.AuxInt 5528 ptr := v_0.Args[0] 5529 val := v.Args[1] 5530 mem := v.Args[2] 5531 if !(is32Bit(off1 + off2)) { 5532 break 5533 } 5534 v.reset(OpMIPS64MOVWstore) 5535 v.AuxInt = off1 + off2 5536 v.Aux = sym 5537 v.AddArg(ptr) 5538 v.AddArg(val) 5539 v.AddArg(mem) 5540 return true 5541 } 5542 // match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 5543 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5544 // result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem) 5545 for { 5546 off1 := v.AuxInt 5547 sym1 := v.Aux 5548 _ = v.Args[2] 5549 v_0 := v.Args[0] 5550 if v_0.Op != OpMIPS64MOVVaddr { 5551 break 5552 } 5553 off2 := v_0.AuxInt 5554 sym2 := v_0.Aux 5555 ptr := v_0.Args[0] 5556 val := v.Args[1] 5557 mem := v.Args[2] 5558 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5559 break 5560 } 5561 v.reset(OpMIPS64MOVWstore) 5562 v.AuxInt = off1 + off2 5563 v.Aux = mergeSym(sym1, sym2) 5564 v.AddArg(ptr) 5565 v.AddArg(val) 5566 v.AddArg(mem) 5567 return true 5568 } 5569 // match: (MOVWstore [off] {sym} ptr (MOVVconst [0]) mem) 5570 // cond: 5571 // result: (MOVWstorezero [off] {sym} ptr mem) 5572 for { 5573 off := v.AuxInt 5574 sym := v.Aux 5575 _ = v.Args[2] 5576 ptr := v.Args[0] 5577 v_1 := v.Args[1] 5578 if v_1.Op != OpMIPS64MOVVconst { 5579 break 5580 } 5581 if v_1.AuxInt != 0 { 5582 break 5583 } 5584 mem := v.Args[2] 5585 v.reset(OpMIPS64MOVWstorezero) 5586 v.AuxInt = off 5587 v.Aux = sym 5588 v.AddArg(ptr) 5589 v.AddArg(mem) 5590 return true 5591 } 5592 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 5593 // cond: 5594 // result: (MOVWstore [off] {sym} ptr x mem) 5595 for { 5596 off := v.AuxInt 5597 sym := v.Aux 5598 _ = v.Args[2] 5599 ptr := v.Args[0] 5600 v_1 := v.Args[1] 5601 if v_1.Op != OpMIPS64MOVWreg { 5602 break 5603 } 5604 x := v_1.Args[0] 5605 mem := v.Args[2] 5606 v.reset(OpMIPS64MOVWstore) 5607 v.AuxInt = off 5608 v.Aux = sym 5609 v.AddArg(ptr) 5610 v.AddArg(x) 5611 v.AddArg(mem) 5612 return true 5613 } 5614 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 5615 // cond: 5616 // result: (MOVWstore [off] {sym} ptr x mem) 5617 for { 5618 off := v.AuxInt 5619 sym := v.Aux 5620 _ = v.Args[2] 5621 ptr := v.Args[0] 5622 v_1 := v.Args[1] 5623 if v_1.Op != OpMIPS64MOVWUreg { 5624 break 5625 } 5626 x := v_1.Args[0] 5627 mem := v.Args[2] 5628 v.reset(OpMIPS64MOVWstore) 5629 v.AuxInt = off 5630 v.Aux = sym 5631 v.AddArg(ptr) 5632 v.AddArg(x) 5633 v.AddArg(mem) 5634 return true 5635 } 5636 return false 5637 } 5638 func rewriteValueMIPS64_OpMIPS64MOVWstorezero_0(v *Value) bool { 5639 // match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 5640 // cond: is32Bit(off1+off2) 5641 // result: (MOVWstorezero [off1+off2] {sym} ptr mem) 5642 for { 5643 off1 := v.AuxInt 5644 sym := v.Aux 5645 _ = v.Args[1] 5646 v_0 := v.Args[0] 5647 if v_0.Op != OpMIPS64ADDVconst { 5648 break 5649 } 5650 off2 := v_0.AuxInt 5651 ptr := v_0.Args[0] 5652 mem := v.Args[1] 5653 if !(is32Bit(off1 + off2)) { 5654 break 5655 } 5656 v.reset(OpMIPS64MOVWstorezero) 5657 v.AuxInt = off1 + off2 5658 v.Aux = sym 5659 v.AddArg(ptr) 5660 v.AddArg(mem) 5661 return true 5662 } 5663 // match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 5664 // cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) 5665 // result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem) 5666 for { 5667 off1 := v.AuxInt 5668 sym1 := v.Aux 5669 _ = v.Args[1] 5670 v_0 := v.Args[0] 5671 if v_0.Op != OpMIPS64MOVVaddr { 5672 break 5673 } 5674 off2 := v_0.AuxInt 5675 sym2 := v_0.Aux 5676 ptr := v_0.Args[0] 5677 mem := v.Args[1] 5678 if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2)) { 5679 break 5680 } 5681 v.reset(OpMIPS64MOVWstorezero) 5682 v.AuxInt = off1 + off2 5683 v.Aux = mergeSym(sym1, sym2) 5684 v.AddArg(ptr) 5685 v.AddArg(mem) 5686 return true 5687 } 5688 return false 5689 } 5690 func rewriteValueMIPS64_OpMIPS64NEGV_0(v *Value) bool { 5691 // match: (NEGV (MOVVconst [c])) 5692 // cond: 5693 // result: (MOVVconst [-c]) 5694 for { 5695 v_0 := v.Args[0] 5696 if v_0.Op != OpMIPS64MOVVconst { 5697 break 5698 } 5699 c := v_0.AuxInt 5700 v.reset(OpMIPS64MOVVconst) 5701 v.AuxInt = -c 5702 return true 5703 } 5704 return false 5705 } 5706 func rewriteValueMIPS64_OpMIPS64NOR_0(v *Value) bool { 5707 // match: (NOR x (MOVVconst [c])) 5708 // cond: is32Bit(c) 5709 // result: (NORconst [c] x) 5710 for { 5711 _ = v.Args[1] 5712 x := v.Args[0] 5713 v_1 := v.Args[1] 5714 if v_1.Op != OpMIPS64MOVVconst { 5715 break 5716 } 5717 c := v_1.AuxInt 5718 if !(is32Bit(c)) { 5719 break 5720 } 5721 v.reset(OpMIPS64NORconst) 5722 v.AuxInt = c 5723 v.AddArg(x) 5724 return true 5725 } 5726 // match: (NOR (MOVVconst [c]) x) 5727 // cond: is32Bit(c) 5728 // result: (NORconst [c] x) 5729 for { 5730 _ = v.Args[1] 5731 v_0 := v.Args[0] 5732 if v_0.Op != OpMIPS64MOVVconst { 5733 break 5734 } 5735 c := v_0.AuxInt 5736 x := v.Args[1] 5737 if !(is32Bit(c)) { 5738 break 5739 } 5740 v.reset(OpMIPS64NORconst) 5741 v.AuxInt = c 5742 v.AddArg(x) 5743 return true 5744 } 5745 return false 5746 } 5747 func rewriteValueMIPS64_OpMIPS64NORconst_0(v *Value) bool { 5748 // match: (NORconst [c] (MOVVconst [d])) 5749 // cond: 5750 // result: (MOVVconst [^(c|d)]) 5751 for { 5752 c := v.AuxInt 5753 v_0 := v.Args[0] 5754 if v_0.Op != OpMIPS64MOVVconst { 5755 break 5756 } 5757 d := v_0.AuxInt 5758 v.reset(OpMIPS64MOVVconst) 5759 v.AuxInt = ^(c | d) 5760 return true 5761 } 5762 return false 5763 } 5764 func rewriteValueMIPS64_OpMIPS64OR_0(v *Value) bool { 5765 // match: (OR x (MOVVconst [c])) 5766 // cond: is32Bit(c) 5767 // result: (ORconst [c] x) 5768 for { 5769 _ = v.Args[1] 5770 x := v.Args[0] 5771 v_1 := v.Args[1] 5772 if v_1.Op != OpMIPS64MOVVconst { 5773 break 5774 } 5775 c := v_1.AuxInt 5776 if !(is32Bit(c)) { 5777 break 5778 } 5779 v.reset(OpMIPS64ORconst) 5780 v.AuxInt = c 5781 v.AddArg(x) 5782 return true 5783 } 5784 // match: (OR (MOVVconst [c]) x) 5785 // cond: is32Bit(c) 5786 // result: (ORconst [c] x) 5787 for { 5788 _ = v.Args[1] 5789 v_0 := v.Args[0] 5790 if v_0.Op != OpMIPS64MOVVconst { 5791 break 5792 } 5793 c := v_0.AuxInt 5794 x := v.Args[1] 5795 if !(is32Bit(c)) { 5796 break 5797 } 5798 v.reset(OpMIPS64ORconst) 5799 v.AuxInt = c 5800 v.AddArg(x) 5801 return true 5802 } 5803 // match: (OR x x) 5804 // cond: 5805 // result: x 5806 for { 5807 _ = v.Args[1] 5808 x := v.Args[0] 5809 if x != v.Args[1] { 5810 break 5811 } 5812 v.reset(OpCopy) 5813 v.Type = x.Type 5814 v.AddArg(x) 5815 return true 5816 } 5817 return false 5818 } 5819 func rewriteValueMIPS64_OpMIPS64ORconst_0(v *Value) bool { 5820 // match: (ORconst [0] x) 5821 // cond: 5822 // result: x 5823 for { 5824 if v.AuxInt != 0 { 5825 break 5826 } 5827 x := v.Args[0] 5828 v.reset(OpCopy) 5829 v.Type = x.Type 5830 v.AddArg(x) 5831 return true 5832 } 5833 // match: (ORconst [-1] _) 5834 // cond: 5835 // result: (MOVVconst [-1]) 5836 for { 5837 if v.AuxInt != -1 { 5838 break 5839 } 5840 v.reset(OpMIPS64MOVVconst) 5841 v.AuxInt = -1 5842 return true 5843 } 5844 // match: (ORconst [c] (MOVVconst [d])) 5845 // cond: 5846 // result: (MOVVconst [c|d]) 5847 for { 5848 c := v.AuxInt 5849 v_0 := v.Args[0] 5850 if v_0.Op != OpMIPS64MOVVconst { 5851 break 5852 } 5853 d := v_0.AuxInt 5854 v.reset(OpMIPS64MOVVconst) 5855 v.AuxInt = c | d 5856 return true 5857 } 5858 // match: (ORconst [c] (ORconst [d] x)) 5859 // cond: is32Bit(c|d) 5860 // result: (ORconst [c|d] x) 5861 for { 5862 c := v.AuxInt 5863 v_0 := v.Args[0] 5864 if v_0.Op != OpMIPS64ORconst { 5865 break 5866 } 5867 d := v_0.AuxInt 5868 x := v_0.Args[0] 5869 if !(is32Bit(c | d)) { 5870 break 5871 } 5872 v.reset(OpMIPS64ORconst) 5873 v.AuxInt = c | d 5874 v.AddArg(x) 5875 return true 5876 } 5877 return false 5878 } 5879 func rewriteValueMIPS64_OpMIPS64SGT_0(v *Value) bool { 5880 // match: (SGT (MOVVconst [c]) x) 5881 // cond: is32Bit(c) 5882 // result: (SGTconst [c] x) 5883 for { 5884 _ = v.Args[1] 5885 v_0 := v.Args[0] 5886 if v_0.Op != OpMIPS64MOVVconst { 5887 break 5888 } 5889 c := v_0.AuxInt 5890 x := v.Args[1] 5891 if !(is32Bit(c)) { 5892 break 5893 } 5894 v.reset(OpMIPS64SGTconst) 5895 v.AuxInt = c 5896 v.AddArg(x) 5897 return true 5898 } 5899 return false 5900 } 5901 func rewriteValueMIPS64_OpMIPS64SGTU_0(v *Value) bool { 5902 // match: (SGTU (MOVVconst [c]) x) 5903 // cond: is32Bit(c) 5904 // result: (SGTUconst [c] x) 5905 for { 5906 _ = v.Args[1] 5907 v_0 := v.Args[0] 5908 if v_0.Op != OpMIPS64MOVVconst { 5909 break 5910 } 5911 c := v_0.AuxInt 5912 x := v.Args[1] 5913 if !(is32Bit(c)) { 5914 break 5915 } 5916 v.reset(OpMIPS64SGTUconst) 5917 v.AuxInt = c 5918 v.AddArg(x) 5919 return true 5920 } 5921 return false 5922 } 5923 func rewriteValueMIPS64_OpMIPS64SGTUconst_0(v *Value) bool { 5924 // match: (SGTUconst [c] (MOVVconst [d])) 5925 // cond: uint64(c)>uint64(d) 5926 // result: (MOVVconst [1]) 5927 for { 5928 c := v.AuxInt 5929 v_0 := v.Args[0] 5930 if v_0.Op != OpMIPS64MOVVconst { 5931 break 5932 } 5933 d := v_0.AuxInt 5934 if !(uint64(c) > uint64(d)) { 5935 break 5936 } 5937 v.reset(OpMIPS64MOVVconst) 5938 v.AuxInt = 1 5939 return true 5940 } 5941 // match: (SGTUconst [c] (MOVVconst [d])) 5942 // cond: uint64(c)<=uint64(d) 5943 // result: (MOVVconst [0]) 5944 for { 5945 c := v.AuxInt 5946 v_0 := v.Args[0] 5947 if v_0.Op != OpMIPS64MOVVconst { 5948 break 5949 } 5950 d := v_0.AuxInt 5951 if !(uint64(c) <= uint64(d)) { 5952 break 5953 } 5954 v.reset(OpMIPS64MOVVconst) 5955 v.AuxInt = 0 5956 return true 5957 } 5958 // match: (SGTUconst [c] (MOVBUreg _)) 5959 // cond: 0xff < uint64(c) 5960 // result: (MOVVconst [1]) 5961 for { 5962 c := v.AuxInt 5963 v_0 := v.Args[0] 5964 if v_0.Op != OpMIPS64MOVBUreg { 5965 break 5966 } 5967 if !(0xff < uint64(c)) { 5968 break 5969 } 5970 v.reset(OpMIPS64MOVVconst) 5971 v.AuxInt = 1 5972 return true 5973 } 5974 // match: (SGTUconst [c] (MOVHUreg _)) 5975 // cond: 0xffff < uint64(c) 5976 // result: (MOVVconst [1]) 5977 for { 5978 c := v.AuxInt 5979 v_0 := v.Args[0] 5980 if v_0.Op != OpMIPS64MOVHUreg { 5981 break 5982 } 5983 if !(0xffff < uint64(c)) { 5984 break 5985 } 5986 v.reset(OpMIPS64MOVVconst) 5987 v.AuxInt = 1 5988 return true 5989 } 5990 // match: (SGTUconst [c] (ANDconst [m] _)) 5991 // cond: uint64(m) < uint64(c) 5992 // result: (MOVVconst [1]) 5993 for { 5994 c := v.AuxInt 5995 v_0 := v.Args[0] 5996 if v_0.Op != OpMIPS64ANDconst { 5997 break 5998 } 5999 m := v_0.AuxInt 6000 if !(uint64(m) < uint64(c)) { 6001 break 6002 } 6003 v.reset(OpMIPS64MOVVconst) 6004 v.AuxInt = 1 6005 return true 6006 } 6007 // match: (SGTUconst [c] (SRLVconst _ [d])) 6008 // cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c) 6009 // result: (MOVVconst [1]) 6010 for { 6011 c := v.AuxInt 6012 v_0 := v.Args[0] 6013 if v_0.Op != OpMIPS64SRLVconst { 6014 break 6015 } 6016 d := v_0.AuxInt 6017 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) { 6018 break 6019 } 6020 v.reset(OpMIPS64MOVVconst) 6021 v.AuxInt = 1 6022 return true 6023 } 6024 return false 6025 } 6026 func rewriteValueMIPS64_OpMIPS64SGTconst_0(v *Value) bool { 6027 // match: (SGTconst [c] (MOVVconst [d])) 6028 // cond: c>d 6029 // result: (MOVVconst [1]) 6030 for { 6031 c := v.AuxInt 6032 v_0 := v.Args[0] 6033 if v_0.Op != OpMIPS64MOVVconst { 6034 break 6035 } 6036 d := v_0.AuxInt 6037 if !(c > d) { 6038 break 6039 } 6040 v.reset(OpMIPS64MOVVconst) 6041 v.AuxInt = 1 6042 return true 6043 } 6044 // match: (SGTconst [c] (MOVVconst [d])) 6045 // cond: c<=d 6046 // result: (MOVVconst [0]) 6047 for { 6048 c := v.AuxInt 6049 v_0 := v.Args[0] 6050 if v_0.Op != OpMIPS64MOVVconst { 6051 break 6052 } 6053 d := v_0.AuxInt 6054 if !(c <= d) { 6055 break 6056 } 6057 v.reset(OpMIPS64MOVVconst) 6058 v.AuxInt = 0 6059 return true 6060 } 6061 // match: (SGTconst [c] (MOVBreg _)) 6062 // cond: 0x7f < c 6063 // result: (MOVVconst [1]) 6064 for { 6065 c := v.AuxInt 6066 v_0 := v.Args[0] 6067 if v_0.Op != OpMIPS64MOVBreg { 6068 break 6069 } 6070 if !(0x7f < c) { 6071 break 6072 } 6073 v.reset(OpMIPS64MOVVconst) 6074 v.AuxInt = 1 6075 return true 6076 } 6077 // match: (SGTconst [c] (MOVBreg _)) 6078 // cond: c <= -0x80 6079 // result: (MOVVconst [0]) 6080 for { 6081 c := v.AuxInt 6082 v_0 := v.Args[0] 6083 if v_0.Op != OpMIPS64MOVBreg { 6084 break 6085 } 6086 if !(c <= -0x80) { 6087 break 6088 } 6089 v.reset(OpMIPS64MOVVconst) 6090 v.AuxInt = 0 6091 return true 6092 } 6093 // match: (SGTconst [c] (MOVBUreg _)) 6094 // cond: 0xff < c 6095 // result: (MOVVconst [1]) 6096 for { 6097 c := v.AuxInt 6098 v_0 := v.Args[0] 6099 if v_0.Op != OpMIPS64MOVBUreg { 6100 break 6101 } 6102 if !(0xff < c) { 6103 break 6104 } 6105 v.reset(OpMIPS64MOVVconst) 6106 v.AuxInt = 1 6107 return true 6108 } 6109 // match: (SGTconst [c] (MOVBUreg _)) 6110 // cond: c < 0 6111 // result: (MOVVconst [0]) 6112 for { 6113 c := v.AuxInt 6114 v_0 := v.Args[0] 6115 if v_0.Op != OpMIPS64MOVBUreg { 6116 break 6117 } 6118 if !(c < 0) { 6119 break 6120 } 6121 v.reset(OpMIPS64MOVVconst) 6122 v.AuxInt = 0 6123 return true 6124 } 6125 // match: (SGTconst [c] (MOVHreg _)) 6126 // cond: 0x7fff < c 6127 // result: (MOVVconst [1]) 6128 for { 6129 c := v.AuxInt 6130 v_0 := v.Args[0] 6131 if v_0.Op != OpMIPS64MOVHreg { 6132 break 6133 } 6134 if !(0x7fff < c) { 6135 break 6136 } 6137 v.reset(OpMIPS64MOVVconst) 6138 v.AuxInt = 1 6139 return true 6140 } 6141 // match: (SGTconst [c] (MOVHreg _)) 6142 // cond: c <= -0x8000 6143 // result: (MOVVconst [0]) 6144 for { 6145 c := v.AuxInt 6146 v_0 := v.Args[0] 6147 if v_0.Op != OpMIPS64MOVHreg { 6148 break 6149 } 6150 if !(c <= -0x8000) { 6151 break 6152 } 6153 v.reset(OpMIPS64MOVVconst) 6154 v.AuxInt = 0 6155 return true 6156 } 6157 // match: (SGTconst [c] (MOVHUreg _)) 6158 // cond: 0xffff < c 6159 // result: (MOVVconst [1]) 6160 for { 6161 c := v.AuxInt 6162 v_0 := v.Args[0] 6163 if v_0.Op != OpMIPS64MOVHUreg { 6164 break 6165 } 6166 if !(0xffff < c) { 6167 break 6168 } 6169 v.reset(OpMIPS64MOVVconst) 6170 v.AuxInt = 1 6171 return true 6172 } 6173 // match: (SGTconst [c] (MOVHUreg _)) 6174 // cond: c < 0 6175 // result: (MOVVconst [0]) 6176 for { 6177 c := v.AuxInt 6178 v_0 := v.Args[0] 6179 if v_0.Op != OpMIPS64MOVHUreg { 6180 break 6181 } 6182 if !(c < 0) { 6183 break 6184 } 6185 v.reset(OpMIPS64MOVVconst) 6186 v.AuxInt = 0 6187 return true 6188 } 6189 return false 6190 } 6191 func rewriteValueMIPS64_OpMIPS64SGTconst_10(v *Value) bool { 6192 // match: (SGTconst [c] (MOVWUreg _)) 6193 // cond: c < 0 6194 // result: (MOVVconst [0]) 6195 for { 6196 c := v.AuxInt 6197 v_0 := v.Args[0] 6198 if v_0.Op != OpMIPS64MOVWUreg { 6199 break 6200 } 6201 if !(c < 0) { 6202 break 6203 } 6204 v.reset(OpMIPS64MOVVconst) 6205 v.AuxInt = 0 6206 return true 6207 } 6208 // match: (SGTconst [c] (ANDconst [m] _)) 6209 // cond: 0 <= m && m < c 6210 // result: (MOVVconst [1]) 6211 for { 6212 c := v.AuxInt 6213 v_0 := v.Args[0] 6214 if v_0.Op != OpMIPS64ANDconst { 6215 break 6216 } 6217 m := v_0.AuxInt 6218 if !(0 <= m && m < c) { 6219 break 6220 } 6221 v.reset(OpMIPS64MOVVconst) 6222 v.AuxInt = 1 6223 return true 6224 } 6225 // match: (SGTconst [c] (SRLVconst _ [d])) 6226 // cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c) 6227 // result: (MOVVconst [1]) 6228 for { 6229 c := v.AuxInt 6230 v_0 := v.Args[0] 6231 if v_0.Op != OpMIPS64SRLVconst { 6232 break 6233 } 6234 d := v_0.AuxInt 6235 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) { 6236 break 6237 } 6238 v.reset(OpMIPS64MOVVconst) 6239 v.AuxInt = 1 6240 return true 6241 } 6242 return false 6243 } 6244 func rewriteValueMIPS64_OpMIPS64SLLV_0(v *Value) bool { 6245 // match: (SLLV _ (MOVVconst [c])) 6246 // cond: uint64(c)>=64 6247 // result: (MOVVconst [0]) 6248 for { 6249 _ = v.Args[1] 6250 v_1 := v.Args[1] 6251 if v_1.Op != OpMIPS64MOVVconst { 6252 break 6253 } 6254 c := v_1.AuxInt 6255 if !(uint64(c) >= 64) { 6256 break 6257 } 6258 v.reset(OpMIPS64MOVVconst) 6259 v.AuxInt = 0 6260 return true 6261 } 6262 // match: (SLLV x (MOVVconst [c])) 6263 // cond: 6264 // result: (SLLVconst x [c]) 6265 for { 6266 _ = v.Args[1] 6267 x := v.Args[0] 6268 v_1 := v.Args[1] 6269 if v_1.Op != OpMIPS64MOVVconst { 6270 break 6271 } 6272 c := v_1.AuxInt 6273 v.reset(OpMIPS64SLLVconst) 6274 v.AuxInt = c 6275 v.AddArg(x) 6276 return true 6277 } 6278 return false 6279 } 6280 func rewriteValueMIPS64_OpMIPS64SLLVconst_0(v *Value) bool { 6281 // match: (SLLVconst [c] (MOVVconst [d])) 6282 // cond: 6283 // result: (MOVVconst [d<<uint64(c)]) 6284 for { 6285 c := v.AuxInt 6286 v_0 := v.Args[0] 6287 if v_0.Op != OpMIPS64MOVVconst { 6288 break 6289 } 6290 d := v_0.AuxInt 6291 v.reset(OpMIPS64MOVVconst) 6292 v.AuxInt = d << uint64(c) 6293 return true 6294 } 6295 return false 6296 } 6297 func rewriteValueMIPS64_OpMIPS64SRAV_0(v *Value) bool { 6298 // match: (SRAV x (MOVVconst [c])) 6299 // cond: uint64(c)>=64 6300 // result: (SRAVconst x [63]) 6301 for { 6302 _ = v.Args[1] 6303 x := v.Args[0] 6304 v_1 := v.Args[1] 6305 if v_1.Op != OpMIPS64MOVVconst { 6306 break 6307 } 6308 c := v_1.AuxInt 6309 if !(uint64(c) >= 64) { 6310 break 6311 } 6312 v.reset(OpMIPS64SRAVconst) 6313 v.AuxInt = 63 6314 v.AddArg(x) 6315 return true 6316 } 6317 // match: (SRAV x (MOVVconst [c])) 6318 // cond: 6319 // result: (SRAVconst x [c]) 6320 for { 6321 _ = v.Args[1] 6322 x := v.Args[0] 6323 v_1 := v.Args[1] 6324 if v_1.Op != OpMIPS64MOVVconst { 6325 break 6326 } 6327 c := v_1.AuxInt 6328 v.reset(OpMIPS64SRAVconst) 6329 v.AuxInt = c 6330 v.AddArg(x) 6331 return true 6332 } 6333 return false 6334 } 6335 func rewriteValueMIPS64_OpMIPS64SRAVconst_0(v *Value) bool { 6336 // match: (SRAVconst [c] (MOVVconst [d])) 6337 // cond: 6338 // result: (MOVVconst [d>>uint64(c)]) 6339 for { 6340 c := v.AuxInt 6341 v_0 := v.Args[0] 6342 if v_0.Op != OpMIPS64MOVVconst { 6343 break 6344 } 6345 d := v_0.AuxInt 6346 v.reset(OpMIPS64MOVVconst) 6347 v.AuxInt = d >> uint64(c) 6348 return true 6349 } 6350 return false 6351 } 6352 func rewriteValueMIPS64_OpMIPS64SRLV_0(v *Value) bool { 6353 // match: (SRLV _ (MOVVconst [c])) 6354 // cond: uint64(c)>=64 6355 // result: (MOVVconst [0]) 6356 for { 6357 _ = v.Args[1] 6358 v_1 := v.Args[1] 6359 if v_1.Op != OpMIPS64MOVVconst { 6360 break 6361 } 6362 c := v_1.AuxInt 6363 if !(uint64(c) >= 64) { 6364 break 6365 } 6366 v.reset(OpMIPS64MOVVconst) 6367 v.AuxInt = 0 6368 return true 6369 } 6370 // match: (SRLV x (MOVVconst [c])) 6371 // cond: 6372 // result: (SRLVconst x [c]) 6373 for { 6374 _ = v.Args[1] 6375 x := v.Args[0] 6376 v_1 := v.Args[1] 6377 if v_1.Op != OpMIPS64MOVVconst { 6378 break 6379 } 6380 c := v_1.AuxInt 6381 v.reset(OpMIPS64SRLVconst) 6382 v.AuxInt = c 6383 v.AddArg(x) 6384 return true 6385 } 6386 return false 6387 } 6388 func rewriteValueMIPS64_OpMIPS64SRLVconst_0(v *Value) bool { 6389 // match: (SRLVconst [c] (MOVVconst [d])) 6390 // cond: 6391 // result: (MOVVconst [int64(uint64(d)>>uint64(c))]) 6392 for { 6393 c := v.AuxInt 6394 v_0 := v.Args[0] 6395 if v_0.Op != OpMIPS64MOVVconst { 6396 break 6397 } 6398 d := v_0.AuxInt 6399 v.reset(OpMIPS64MOVVconst) 6400 v.AuxInt = int64(uint64(d) >> uint64(c)) 6401 return true 6402 } 6403 return false 6404 } 6405 func rewriteValueMIPS64_OpMIPS64SUBV_0(v *Value) bool { 6406 // match: (SUBV x (MOVVconst [c])) 6407 // cond: is32Bit(c) 6408 // result: (SUBVconst [c] x) 6409 for { 6410 _ = v.Args[1] 6411 x := v.Args[0] 6412 v_1 := v.Args[1] 6413 if v_1.Op != OpMIPS64MOVVconst { 6414 break 6415 } 6416 c := v_1.AuxInt 6417 if !(is32Bit(c)) { 6418 break 6419 } 6420 v.reset(OpMIPS64SUBVconst) 6421 v.AuxInt = c 6422 v.AddArg(x) 6423 return true 6424 } 6425 // match: (SUBV x x) 6426 // cond: 6427 // result: (MOVVconst [0]) 6428 for { 6429 _ = v.Args[1] 6430 x := v.Args[0] 6431 if x != v.Args[1] { 6432 break 6433 } 6434 v.reset(OpMIPS64MOVVconst) 6435 v.AuxInt = 0 6436 return true 6437 } 6438 // match: (SUBV (MOVVconst [0]) x) 6439 // cond: 6440 // result: (NEGV x) 6441 for { 6442 _ = v.Args[1] 6443 v_0 := v.Args[0] 6444 if v_0.Op != OpMIPS64MOVVconst { 6445 break 6446 } 6447 if v_0.AuxInt != 0 { 6448 break 6449 } 6450 x := v.Args[1] 6451 v.reset(OpMIPS64NEGV) 6452 v.AddArg(x) 6453 return true 6454 } 6455 return false 6456 } 6457 func rewriteValueMIPS64_OpMIPS64SUBVconst_0(v *Value) bool { 6458 // match: (SUBVconst [0] x) 6459 // cond: 6460 // result: x 6461 for { 6462 if v.AuxInt != 0 { 6463 break 6464 } 6465 x := v.Args[0] 6466 v.reset(OpCopy) 6467 v.Type = x.Type 6468 v.AddArg(x) 6469 return true 6470 } 6471 // match: (SUBVconst [c] (MOVVconst [d])) 6472 // cond: 6473 // result: (MOVVconst [d-c]) 6474 for { 6475 c := v.AuxInt 6476 v_0 := v.Args[0] 6477 if v_0.Op != OpMIPS64MOVVconst { 6478 break 6479 } 6480 d := v_0.AuxInt 6481 v.reset(OpMIPS64MOVVconst) 6482 v.AuxInt = d - c 6483 return true 6484 } 6485 // match: (SUBVconst [c] (SUBVconst [d] x)) 6486 // cond: is32Bit(-c-d) 6487 // result: (ADDVconst [-c-d] x) 6488 for { 6489 c := v.AuxInt 6490 v_0 := v.Args[0] 6491 if v_0.Op != OpMIPS64SUBVconst { 6492 break 6493 } 6494 d := v_0.AuxInt 6495 x := v_0.Args[0] 6496 if !(is32Bit(-c - d)) { 6497 break 6498 } 6499 v.reset(OpMIPS64ADDVconst) 6500 v.AuxInt = -c - d 6501 v.AddArg(x) 6502 return true 6503 } 6504 // match: (SUBVconst [c] (ADDVconst [d] x)) 6505 // cond: is32Bit(-c+d) 6506 // result: (ADDVconst [-c+d] x) 6507 for { 6508 c := v.AuxInt 6509 v_0 := v.Args[0] 6510 if v_0.Op != OpMIPS64ADDVconst { 6511 break 6512 } 6513 d := v_0.AuxInt 6514 x := v_0.Args[0] 6515 if !(is32Bit(-c + d)) { 6516 break 6517 } 6518 v.reset(OpMIPS64ADDVconst) 6519 v.AuxInt = -c + d 6520 v.AddArg(x) 6521 return true 6522 } 6523 return false 6524 } 6525 func rewriteValueMIPS64_OpMIPS64XOR_0(v *Value) bool { 6526 // match: (XOR x (MOVVconst [c])) 6527 // cond: is32Bit(c) 6528 // result: (XORconst [c] x) 6529 for { 6530 _ = v.Args[1] 6531 x := v.Args[0] 6532 v_1 := v.Args[1] 6533 if v_1.Op != OpMIPS64MOVVconst { 6534 break 6535 } 6536 c := v_1.AuxInt 6537 if !(is32Bit(c)) { 6538 break 6539 } 6540 v.reset(OpMIPS64XORconst) 6541 v.AuxInt = c 6542 v.AddArg(x) 6543 return true 6544 } 6545 // match: (XOR (MOVVconst [c]) x) 6546 // cond: is32Bit(c) 6547 // result: (XORconst [c] x) 6548 for { 6549 _ = v.Args[1] 6550 v_0 := v.Args[0] 6551 if v_0.Op != OpMIPS64MOVVconst { 6552 break 6553 } 6554 c := v_0.AuxInt 6555 x := v.Args[1] 6556 if !(is32Bit(c)) { 6557 break 6558 } 6559 v.reset(OpMIPS64XORconst) 6560 v.AuxInt = c 6561 v.AddArg(x) 6562 return true 6563 } 6564 // match: (XOR x x) 6565 // cond: 6566 // result: (MOVVconst [0]) 6567 for { 6568 _ = v.Args[1] 6569 x := v.Args[0] 6570 if x != v.Args[1] { 6571 break 6572 } 6573 v.reset(OpMIPS64MOVVconst) 6574 v.AuxInt = 0 6575 return true 6576 } 6577 return false 6578 } 6579 func rewriteValueMIPS64_OpMIPS64XORconst_0(v *Value) bool { 6580 // match: (XORconst [0] x) 6581 // cond: 6582 // result: x 6583 for { 6584 if v.AuxInt != 0 { 6585 break 6586 } 6587 x := v.Args[0] 6588 v.reset(OpCopy) 6589 v.Type = x.Type 6590 v.AddArg(x) 6591 return true 6592 } 6593 // match: (XORconst [-1] x) 6594 // cond: 6595 // result: (NORconst [0] x) 6596 for { 6597 if v.AuxInt != -1 { 6598 break 6599 } 6600 x := v.Args[0] 6601 v.reset(OpMIPS64NORconst) 6602 v.AuxInt = 0 6603 v.AddArg(x) 6604 return true 6605 } 6606 // match: (XORconst [c] (MOVVconst [d])) 6607 // cond: 6608 // result: (MOVVconst [c^d]) 6609 for { 6610 c := v.AuxInt 6611 v_0 := v.Args[0] 6612 if v_0.Op != OpMIPS64MOVVconst { 6613 break 6614 } 6615 d := v_0.AuxInt 6616 v.reset(OpMIPS64MOVVconst) 6617 v.AuxInt = c ^ d 6618 return true 6619 } 6620 // match: (XORconst [c] (XORconst [d] x)) 6621 // cond: is32Bit(c^d) 6622 // result: (XORconst [c^d] x) 6623 for { 6624 c := v.AuxInt 6625 v_0 := v.Args[0] 6626 if v_0.Op != OpMIPS64XORconst { 6627 break 6628 } 6629 d := v_0.AuxInt 6630 x := v_0.Args[0] 6631 if !(is32Bit(c ^ d)) { 6632 break 6633 } 6634 v.reset(OpMIPS64XORconst) 6635 v.AuxInt = c ^ d 6636 v.AddArg(x) 6637 return true 6638 } 6639 return false 6640 } 6641 func rewriteValueMIPS64_OpMod16_0(v *Value) bool { 6642 b := v.Block 6643 _ = b 6644 typ := &b.Func.Config.Types 6645 _ = typ 6646 // match: (Mod16 x y) 6647 // cond: 6648 // result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 6649 for { 6650 _ = v.Args[1] 6651 x := v.Args[0] 6652 y := v.Args[1] 6653 v.reset(OpSelect0) 6654 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6655 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6656 v1.AddArg(x) 6657 v0.AddArg(v1) 6658 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6659 v2.AddArg(y) 6660 v0.AddArg(v2) 6661 v.AddArg(v0) 6662 return true 6663 } 6664 } 6665 func rewriteValueMIPS64_OpMod16u_0(v *Value) bool { 6666 b := v.Block 6667 _ = b 6668 typ := &b.Func.Config.Types 6669 _ = typ 6670 // match: (Mod16u x y) 6671 // cond: 6672 // result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 6673 for { 6674 _ = v.Args[1] 6675 x := v.Args[0] 6676 y := v.Args[1] 6677 v.reset(OpSelect0) 6678 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6679 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6680 v1.AddArg(x) 6681 v0.AddArg(v1) 6682 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6683 v2.AddArg(y) 6684 v0.AddArg(v2) 6685 v.AddArg(v0) 6686 return true 6687 } 6688 } 6689 func rewriteValueMIPS64_OpMod32_0(v *Value) bool { 6690 b := v.Block 6691 _ = b 6692 typ := &b.Func.Config.Types 6693 _ = typ 6694 // match: (Mod32 x y) 6695 // cond: 6696 // result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 6697 for { 6698 _ = v.Args[1] 6699 x := v.Args[0] 6700 y := v.Args[1] 6701 v.reset(OpSelect0) 6702 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6703 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6704 v1.AddArg(x) 6705 v0.AddArg(v1) 6706 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6707 v2.AddArg(y) 6708 v0.AddArg(v2) 6709 v.AddArg(v0) 6710 return true 6711 } 6712 } 6713 func rewriteValueMIPS64_OpMod32u_0(v *Value) bool { 6714 b := v.Block 6715 _ = b 6716 typ := &b.Func.Config.Types 6717 _ = typ 6718 // match: (Mod32u x y) 6719 // cond: 6720 // result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 6721 for { 6722 _ = v.Args[1] 6723 x := v.Args[0] 6724 y := v.Args[1] 6725 v.reset(OpSelect0) 6726 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6727 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6728 v1.AddArg(x) 6729 v0.AddArg(v1) 6730 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6731 v2.AddArg(y) 6732 v0.AddArg(v2) 6733 v.AddArg(v0) 6734 return true 6735 } 6736 } 6737 func rewriteValueMIPS64_OpMod64_0(v *Value) bool { 6738 b := v.Block 6739 _ = b 6740 typ := &b.Func.Config.Types 6741 _ = typ 6742 // match: (Mod64 x y) 6743 // cond: 6744 // result: (Select0 (DIVV x y)) 6745 for { 6746 _ = v.Args[1] 6747 x := v.Args[0] 6748 y := v.Args[1] 6749 v.reset(OpSelect0) 6750 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6751 v0.AddArg(x) 6752 v0.AddArg(y) 6753 v.AddArg(v0) 6754 return true 6755 } 6756 } 6757 func rewriteValueMIPS64_OpMod64u_0(v *Value) bool { 6758 b := v.Block 6759 _ = b 6760 typ := &b.Func.Config.Types 6761 _ = typ 6762 // match: (Mod64u x y) 6763 // cond: 6764 // result: (Select0 (DIVVU x y)) 6765 for { 6766 _ = v.Args[1] 6767 x := v.Args[0] 6768 y := v.Args[1] 6769 v.reset(OpSelect0) 6770 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6771 v0.AddArg(x) 6772 v0.AddArg(y) 6773 v.AddArg(v0) 6774 return true 6775 } 6776 } 6777 func rewriteValueMIPS64_OpMod8_0(v *Value) bool { 6778 b := v.Block 6779 _ = b 6780 typ := &b.Func.Config.Types 6781 _ = typ 6782 // match: (Mod8 x y) 6783 // cond: 6784 // result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 6785 for { 6786 _ = v.Args[1] 6787 x := v.Args[0] 6788 y := v.Args[1] 6789 v.reset(OpSelect0) 6790 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 6791 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6792 v1.AddArg(x) 6793 v0.AddArg(v1) 6794 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6795 v2.AddArg(y) 6796 v0.AddArg(v2) 6797 v.AddArg(v0) 6798 return true 6799 } 6800 } 6801 func rewriteValueMIPS64_OpMod8u_0(v *Value) bool { 6802 b := v.Block 6803 _ = b 6804 typ := &b.Func.Config.Types 6805 _ = typ 6806 // match: (Mod8u x y) 6807 // cond: 6808 // result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 6809 for { 6810 _ = v.Args[1] 6811 x := v.Args[0] 6812 y := v.Args[1] 6813 v.reset(OpSelect0) 6814 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6815 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6816 v1.AddArg(x) 6817 v0.AddArg(v1) 6818 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6819 v2.AddArg(y) 6820 v0.AddArg(v2) 6821 v.AddArg(v0) 6822 return true 6823 } 6824 } 6825 func rewriteValueMIPS64_OpMove_0(v *Value) bool { 6826 b := v.Block 6827 _ = b 6828 typ := &b.Func.Config.Types 6829 _ = typ 6830 // match: (Move [0] _ _ mem) 6831 // cond: 6832 // result: mem 6833 for { 6834 if v.AuxInt != 0 { 6835 break 6836 } 6837 _ = v.Args[2] 6838 mem := v.Args[2] 6839 v.reset(OpCopy) 6840 v.Type = mem.Type 6841 v.AddArg(mem) 6842 return true 6843 } 6844 // match: (Move [1] dst src mem) 6845 // cond: 6846 // result: (MOVBstore dst (MOVBload src mem) mem) 6847 for { 6848 if v.AuxInt != 1 { 6849 break 6850 } 6851 _ = v.Args[2] 6852 dst := v.Args[0] 6853 src := v.Args[1] 6854 mem := v.Args[2] 6855 v.reset(OpMIPS64MOVBstore) 6856 v.AddArg(dst) 6857 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6858 v0.AddArg(src) 6859 v0.AddArg(mem) 6860 v.AddArg(v0) 6861 v.AddArg(mem) 6862 return true 6863 } 6864 // match: (Move [2] {t} dst src mem) 6865 // cond: t.(*types.Type).Alignment()%2 == 0 6866 // result: (MOVHstore dst (MOVHload src mem) mem) 6867 for { 6868 if v.AuxInt != 2 { 6869 break 6870 } 6871 t := v.Aux 6872 _ = v.Args[2] 6873 dst := v.Args[0] 6874 src := v.Args[1] 6875 mem := v.Args[2] 6876 if !(t.(*types.Type).Alignment()%2 == 0) { 6877 break 6878 } 6879 v.reset(OpMIPS64MOVHstore) 6880 v.AddArg(dst) 6881 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6882 v0.AddArg(src) 6883 v0.AddArg(mem) 6884 v.AddArg(v0) 6885 v.AddArg(mem) 6886 return true 6887 } 6888 // match: (Move [2] dst src mem) 6889 // cond: 6890 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)) 6891 for { 6892 if v.AuxInt != 2 { 6893 break 6894 } 6895 _ = v.Args[2] 6896 dst := v.Args[0] 6897 src := v.Args[1] 6898 mem := v.Args[2] 6899 v.reset(OpMIPS64MOVBstore) 6900 v.AuxInt = 1 6901 v.AddArg(dst) 6902 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6903 v0.AuxInt = 1 6904 v0.AddArg(src) 6905 v0.AddArg(mem) 6906 v.AddArg(v0) 6907 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6908 v1.AddArg(dst) 6909 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6910 v2.AddArg(src) 6911 v2.AddArg(mem) 6912 v1.AddArg(v2) 6913 v1.AddArg(mem) 6914 v.AddArg(v1) 6915 return true 6916 } 6917 // match: (Move [4] {t} dst src mem) 6918 // cond: t.(*types.Type).Alignment()%4 == 0 6919 // result: (MOVWstore dst (MOVWload src mem) mem) 6920 for { 6921 if v.AuxInt != 4 { 6922 break 6923 } 6924 t := v.Aux 6925 _ = v.Args[2] 6926 dst := v.Args[0] 6927 src := v.Args[1] 6928 mem := v.Args[2] 6929 if !(t.(*types.Type).Alignment()%4 == 0) { 6930 break 6931 } 6932 v.reset(OpMIPS64MOVWstore) 6933 v.AddArg(dst) 6934 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 6935 v0.AddArg(src) 6936 v0.AddArg(mem) 6937 v.AddArg(v0) 6938 v.AddArg(mem) 6939 return true 6940 } 6941 // match: (Move [4] {t} dst src mem) 6942 // cond: t.(*types.Type).Alignment()%2 == 0 6943 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)) 6944 for { 6945 if v.AuxInt != 4 { 6946 break 6947 } 6948 t := v.Aux 6949 _ = v.Args[2] 6950 dst := v.Args[0] 6951 src := v.Args[1] 6952 mem := v.Args[2] 6953 if !(t.(*types.Type).Alignment()%2 == 0) { 6954 break 6955 } 6956 v.reset(OpMIPS64MOVHstore) 6957 v.AuxInt = 2 6958 v.AddArg(dst) 6959 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6960 v0.AuxInt = 2 6961 v0.AddArg(src) 6962 v0.AddArg(mem) 6963 v.AddArg(v0) 6964 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 6965 v1.AddArg(dst) 6966 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 6967 v2.AddArg(src) 6968 v2.AddArg(mem) 6969 v1.AddArg(v2) 6970 v1.AddArg(mem) 6971 v.AddArg(v1) 6972 return true 6973 } 6974 // match: (Move [4] dst src mem) 6975 // cond: 6976 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))) 6977 for { 6978 if v.AuxInt != 4 { 6979 break 6980 } 6981 _ = v.Args[2] 6982 dst := v.Args[0] 6983 src := v.Args[1] 6984 mem := v.Args[2] 6985 v.reset(OpMIPS64MOVBstore) 6986 v.AuxInt = 3 6987 v.AddArg(dst) 6988 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6989 v0.AuxInt = 3 6990 v0.AddArg(src) 6991 v0.AddArg(mem) 6992 v.AddArg(v0) 6993 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 6994 v1.AuxInt = 2 6995 v1.AddArg(dst) 6996 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 6997 v2.AuxInt = 2 6998 v2.AddArg(src) 6999 v2.AddArg(mem) 7000 v1.AddArg(v2) 7001 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7002 v3.AuxInt = 1 7003 v3.AddArg(dst) 7004 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7005 v4.AuxInt = 1 7006 v4.AddArg(src) 7007 v4.AddArg(mem) 7008 v3.AddArg(v4) 7009 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7010 v5.AddArg(dst) 7011 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7012 v6.AddArg(src) 7013 v6.AddArg(mem) 7014 v5.AddArg(v6) 7015 v5.AddArg(mem) 7016 v3.AddArg(v5) 7017 v1.AddArg(v3) 7018 v.AddArg(v1) 7019 return true 7020 } 7021 // match: (Move [8] {t} dst src mem) 7022 // cond: t.(*types.Type).Alignment()%8 == 0 7023 // result: (MOVVstore dst (MOVVload src mem) mem) 7024 for { 7025 if v.AuxInt != 8 { 7026 break 7027 } 7028 t := v.Aux 7029 _ = v.Args[2] 7030 dst := v.Args[0] 7031 src := v.Args[1] 7032 mem := v.Args[2] 7033 if !(t.(*types.Type).Alignment()%8 == 0) { 7034 break 7035 } 7036 v.reset(OpMIPS64MOVVstore) 7037 v.AddArg(dst) 7038 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7039 v0.AddArg(src) 7040 v0.AddArg(mem) 7041 v.AddArg(v0) 7042 v.AddArg(mem) 7043 return true 7044 } 7045 // match: (Move [8] {t} dst src mem) 7046 // cond: t.(*types.Type).Alignment()%4 == 0 7047 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) 7048 for { 7049 if v.AuxInt != 8 { 7050 break 7051 } 7052 t := v.Aux 7053 _ = v.Args[2] 7054 dst := v.Args[0] 7055 src := v.Args[1] 7056 mem := v.Args[2] 7057 if !(t.(*types.Type).Alignment()%4 == 0) { 7058 break 7059 } 7060 v.reset(OpMIPS64MOVWstore) 7061 v.AuxInt = 4 7062 v.AddArg(dst) 7063 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7064 v0.AuxInt = 4 7065 v0.AddArg(src) 7066 v0.AddArg(mem) 7067 v.AddArg(v0) 7068 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 7069 v1.AddArg(dst) 7070 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7071 v2.AddArg(src) 7072 v2.AddArg(mem) 7073 v1.AddArg(v2) 7074 v1.AddArg(mem) 7075 v.AddArg(v1) 7076 return true 7077 } 7078 // match: (Move [8] {t} dst src mem) 7079 // cond: t.(*types.Type).Alignment()%2 == 0 7080 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) 7081 for { 7082 if v.AuxInt != 8 { 7083 break 7084 } 7085 t := v.Aux 7086 _ = v.Args[2] 7087 dst := v.Args[0] 7088 src := v.Args[1] 7089 mem := v.Args[2] 7090 if !(t.(*types.Type).Alignment()%2 == 0) { 7091 break 7092 } 7093 v.reset(OpMIPS64MOVHstore) 7094 v.AuxInt = 6 7095 v.AddArg(dst) 7096 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7097 v0.AuxInt = 6 7098 v0.AddArg(src) 7099 v0.AddArg(mem) 7100 v.AddArg(v0) 7101 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7102 v1.AuxInt = 4 7103 v1.AddArg(dst) 7104 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7105 v2.AuxInt = 4 7106 v2.AddArg(src) 7107 v2.AddArg(mem) 7108 v1.AddArg(v2) 7109 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7110 v3.AuxInt = 2 7111 v3.AddArg(dst) 7112 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7113 v4.AuxInt = 2 7114 v4.AddArg(src) 7115 v4.AddArg(mem) 7116 v3.AddArg(v4) 7117 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7118 v5.AddArg(dst) 7119 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7120 v6.AddArg(src) 7121 v6.AddArg(mem) 7122 v5.AddArg(v6) 7123 v5.AddArg(mem) 7124 v3.AddArg(v5) 7125 v1.AddArg(v3) 7126 v.AddArg(v1) 7127 return true 7128 } 7129 return false 7130 } 7131 func rewriteValueMIPS64_OpMove_10(v *Value) bool { 7132 b := v.Block 7133 _ = b 7134 config := b.Func.Config 7135 _ = config 7136 typ := &b.Func.Config.Types 7137 _ = typ 7138 // match: (Move [3] dst src mem) 7139 // cond: 7140 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))) 7141 for { 7142 if v.AuxInt != 3 { 7143 break 7144 } 7145 _ = v.Args[2] 7146 dst := v.Args[0] 7147 src := v.Args[1] 7148 mem := v.Args[2] 7149 v.reset(OpMIPS64MOVBstore) 7150 v.AuxInt = 2 7151 v.AddArg(dst) 7152 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7153 v0.AuxInt = 2 7154 v0.AddArg(src) 7155 v0.AddArg(mem) 7156 v.AddArg(v0) 7157 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7158 v1.AuxInt = 1 7159 v1.AddArg(dst) 7160 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7161 v2.AuxInt = 1 7162 v2.AddArg(src) 7163 v2.AddArg(mem) 7164 v1.AddArg(v2) 7165 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 7166 v3.AddArg(dst) 7167 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8) 7168 v4.AddArg(src) 7169 v4.AddArg(mem) 7170 v3.AddArg(v4) 7171 v3.AddArg(mem) 7172 v1.AddArg(v3) 7173 v.AddArg(v1) 7174 return true 7175 } 7176 // match: (Move [6] {t} dst src mem) 7177 // cond: t.(*types.Type).Alignment()%2 == 0 7178 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) 7179 for { 7180 if v.AuxInt != 6 { 7181 break 7182 } 7183 t := v.Aux 7184 _ = v.Args[2] 7185 dst := v.Args[0] 7186 src := v.Args[1] 7187 mem := v.Args[2] 7188 if !(t.(*types.Type).Alignment()%2 == 0) { 7189 break 7190 } 7191 v.reset(OpMIPS64MOVHstore) 7192 v.AuxInt = 4 7193 v.AddArg(dst) 7194 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7195 v0.AuxInt = 4 7196 v0.AddArg(src) 7197 v0.AddArg(mem) 7198 v.AddArg(v0) 7199 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7200 v1.AuxInt = 2 7201 v1.AddArg(dst) 7202 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7203 v2.AuxInt = 2 7204 v2.AddArg(src) 7205 v2.AddArg(mem) 7206 v1.AddArg(v2) 7207 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 7208 v3.AddArg(dst) 7209 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16) 7210 v4.AddArg(src) 7211 v4.AddArg(mem) 7212 v3.AddArg(v4) 7213 v3.AddArg(mem) 7214 v1.AddArg(v3) 7215 v.AddArg(v1) 7216 return true 7217 } 7218 // match: (Move [12] {t} dst src mem) 7219 // cond: t.(*types.Type).Alignment()%4 == 0 7220 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) 7221 for { 7222 if v.AuxInt != 12 { 7223 break 7224 } 7225 t := v.Aux 7226 _ = v.Args[2] 7227 dst := v.Args[0] 7228 src := v.Args[1] 7229 mem := v.Args[2] 7230 if !(t.(*types.Type).Alignment()%4 == 0) { 7231 break 7232 } 7233 v.reset(OpMIPS64MOVWstore) 7234 v.AuxInt = 8 7235 v.AddArg(dst) 7236 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7237 v0.AuxInt = 8 7238 v0.AddArg(src) 7239 v0.AddArg(mem) 7240 v.AddArg(v0) 7241 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 7242 v1.AuxInt = 4 7243 v1.AddArg(dst) 7244 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7245 v2.AuxInt = 4 7246 v2.AddArg(src) 7247 v2.AddArg(mem) 7248 v1.AddArg(v2) 7249 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 7250 v3.AddArg(dst) 7251 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32) 7252 v4.AddArg(src) 7253 v4.AddArg(mem) 7254 v3.AddArg(v4) 7255 v3.AddArg(mem) 7256 v1.AddArg(v3) 7257 v.AddArg(v1) 7258 return true 7259 } 7260 // match: (Move [16] {t} dst src mem) 7261 // cond: t.(*types.Type).Alignment()%8 == 0 7262 // result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)) 7263 for { 7264 if v.AuxInt != 16 { 7265 break 7266 } 7267 t := v.Aux 7268 _ = v.Args[2] 7269 dst := v.Args[0] 7270 src := v.Args[1] 7271 mem := v.Args[2] 7272 if !(t.(*types.Type).Alignment()%8 == 0) { 7273 break 7274 } 7275 v.reset(OpMIPS64MOVVstore) 7276 v.AuxInt = 8 7277 v.AddArg(dst) 7278 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7279 v0.AuxInt = 8 7280 v0.AddArg(src) 7281 v0.AddArg(mem) 7282 v.AddArg(v0) 7283 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 7284 v1.AddArg(dst) 7285 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7286 v2.AddArg(src) 7287 v2.AddArg(mem) 7288 v1.AddArg(v2) 7289 v1.AddArg(mem) 7290 v.AddArg(v1) 7291 return true 7292 } 7293 // match: (Move [24] {t} dst src mem) 7294 // cond: t.(*types.Type).Alignment()%8 == 0 7295 // result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))) 7296 for { 7297 if v.AuxInt != 24 { 7298 break 7299 } 7300 t := v.Aux 7301 _ = v.Args[2] 7302 dst := v.Args[0] 7303 src := v.Args[1] 7304 mem := v.Args[2] 7305 if !(t.(*types.Type).Alignment()%8 == 0) { 7306 break 7307 } 7308 v.reset(OpMIPS64MOVVstore) 7309 v.AuxInt = 16 7310 v.AddArg(dst) 7311 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7312 v0.AuxInt = 16 7313 v0.AddArg(src) 7314 v0.AddArg(mem) 7315 v.AddArg(v0) 7316 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 7317 v1.AuxInt = 8 7318 v1.AddArg(dst) 7319 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7320 v2.AuxInt = 8 7321 v2.AddArg(src) 7322 v2.AddArg(mem) 7323 v1.AddArg(v2) 7324 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 7325 v3.AddArg(dst) 7326 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64) 7327 v4.AddArg(src) 7328 v4.AddArg(mem) 7329 v3.AddArg(v4) 7330 v3.AddArg(mem) 7331 v1.AddArg(v3) 7332 v.AddArg(v1) 7333 return true 7334 } 7335 // match: (Move [s] {t} dst src mem) 7336 // cond: s > 24 || t.(*types.Type).Alignment()%8 != 0 7337 // result: (LoweredMove [t.(*types.Type).Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 7338 for { 7339 s := v.AuxInt 7340 t := v.Aux 7341 _ = v.Args[2] 7342 dst := v.Args[0] 7343 src := v.Args[1] 7344 mem := v.Args[2] 7345 if !(s > 24 || t.(*types.Type).Alignment()%8 != 0) { 7346 break 7347 } 7348 v.reset(OpMIPS64LoweredMove) 7349 v.AuxInt = t.(*types.Type).Alignment() 7350 v.AddArg(dst) 7351 v.AddArg(src) 7352 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type) 7353 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 7354 v0.AddArg(src) 7355 v.AddArg(v0) 7356 v.AddArg(mem) 7357 return true 7358 } 7359 return false 7360 } 7361 func rewriteValueMIPS64_OpMul16_0(v *Value) bool { 7362 b := v.Block 7363 _ = b 7364 typ := &b.Func.Config.Types 7365 _ = typ 7366 // match: (Mul16 x y) 7367 // cond: 7368 // result: (Select1 (MULVU x y)) 7369 for { 7370 _ = v.Args[1] 7371 x := v.Args[0] 7372 y := v.Args[1] 7373 v.reset(OpSelect1) 7374 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7375 v0.AddArg(x) 7376 v0.AddArg(y) 7377 v.AddArg(v0) 7378 return true 7379 } 7380 } 7381 func rewriteValueMIPS64_OpMul32_0(v *Value) bool { 7382 b := v.Block 7383 _ = b 7384 typ := &b.Func.Config.Types 7385 _ = typ 7386 // match: (Mul32 x y) 7387 // cond: 7388 // result: (Select1 (MULVU x y)) 7389 for { 7390 _ = v.Args[1] 7391 x := v.Args[0] 7392 y := v.Args[1] 7393 v.reset(OpSelect1) 7394 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7395 v0.AddArg(x) 7396 v0.AddArg(y) 7397 v.AddArg(v0) 7398 return true 7399 } 7400 } 7401 func rewriteValueMIPS64_OpMul32F_0(v *Value) bool { 7402 // match: (Mul32F x y) 7403 // cond: 7404 // result: (MULF x y) 7405 for { 7406 _ = v.Args[1] 7407 x := v.Args[0] 7408 y := v.Args[1] 7409 v.reset(OpMIPS64MULF) 7410 v.AddArg(x) 7411 v.AddArg(y) 7412 return true 7413 } 7414 } 7415 func rewriteValueMIPS64_OpMul64_0(v *Value) bool { 7416 b := v.Block 7417 _ = b 7418 typ := &b.Func.Config.Types 7419 _ = typ 7420 // match: (Mul64 x y) 7421 // cond: 7422 // result: (Select1 (MULVU x y)) 7423 for { 7424 _ = v.Args[1] 7425 x := v.Args[0] 7426 y := v.Args[1] 7427 v.reset(OpSelect1) 7428 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7429 v0.AddArg(x) 7430 v0.AddArg(y) 7431 v.AddArg(v0) 7432 return true 7433 } 7434 } 7435 func rewriteValueMIPS64_OpMul64F_0(v *Value) bool { 7436 // match: (Mul64F x y) 7437 // cond: 7438 // result: (MULD x y) 7439 for { 7440 _ = v.Args[1] 7441 x := v.Args[0] 7442 y := v.Args[1] 7443 v.reset(OpMIPS64MULD) 7444 v.AddArg(x) 7445 v.AddArg(y) 7446 return true 7447 } 7448 } 7449 func rewriteValueMIPS64_OpMul8_0(v *Value) bool { 7450 b := v.Block 7451 _ = b 7452 typ := &b.Func.Config.Types 7453 _ = typ 7454 // match: (Mul8 x y) 7455 // cond: 7456 // result: (Select1 (MULVU x y)) 7457 for { 7458 _ = v.Args[1] 7459 x := v.Args[0] 7460 y := v.Args[1] 7461 v.reset(OpSelect1) 7462 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7463 v0.AddArg(x) 7464 v0.AddArg(y) 7465 v.AddArg(v0) 7466 return true 7467 } 7468 } 7469 func rewriteValueMIPS64_OpNeg16_0(v *Value) bool { 7470 // match: (Neg16 x) 7471 // cond: 7472 // result: (NEGV x) 7473 for { 7474 x := v.Args[0] 7475 v.reset(OpMIPS64NEGV) 7476 v.AddArg(x) 7477 return true 7478 } 7479 } 7480 func rewriteValueMIPS64_OpNeg32_0(v *Value) bool { 7481 // match: (Neg32 x) 7482 // cond: 7483 // result: (NEGV x) 7484 for { 7485 x := v.Args[0] 7486 v.reset(OpMIPS64NEGV) 7487 v.AddArg(x) 7488 return true 7489 } 7490 } 7491 func rewriteValueMIPS64_OpNeg32F_0(v *Value) bool { 7492 // match: (Neg32F x) 7493 // cond: 7494 // result: (NEGF x) 7495 for { 7496 x := v.Args[0] 7497 v.reset(OpMIPS64NEGF) 7498 v.AddArg(x) 7499 return true 7500 } 7501 } 7502 func rewriteValueMIPS64_OpNeg64_0(v *Value) bool { 7503 // match: (Neg64 x) 7504 // cond: 7505 // result: (NEGV x) 7506 for { 7507 x := v.Args[0] 7508 v.reset(OpMIPS64NEGV) 7509 v.AddArg(x) 7510 return true 7511 } 7512 } 7513 func rewriteValueMIPS64_OpNeg64F_0(v *Value) bool { 7514 // match: (Neg64F x) 7515 // cond: 7516 // result: (NEGD x) 7517 for { 7518 x := v.Args[0] 7519 v.reset(OpMIPS64NEGD) 7520 v.AddArg(x) 7521 return true 7522 } 7523 } 7524 func rewriteValueMIPS64_OpNeg8_0(v *Value) bool { 7525 // match: (Neg8 x) 7526 // cond: 7527 // result: (NEGV x) 7528 for { 7529 x := v.Args[0] 7530 v.reset(OpMIPS64NEGV) 7531 v.AddArg(x) 7532 return true 7533 } 7534 } 7535 func rewriteValueMIPS64_OpNeq16_0(v *Value) bool { 7536 b := v.Block 7537 _ = b 7538 typ := &b.Func.Config.Types 7539 _ = typ 7540 // match: (Neq16 x y) 7541 // cond: 7542 // result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0])) 7543 for { 7544 _ = v.Args[1] 7545 x := v.Args[0] 7546 y := v.Args[1] 7547 v.reset(OpMIPS64SGTU) 7548 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7549 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 7550 v1.AddArg(x) 7551 v0.AddArg(v1) 7552 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7553 v2.AddArg(y) 7554 v0.AddArg(v2) 7555 v.AddArg(v0) 7556 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7557 v3.AuxInt = 0 7558 v.AddArg(v3) 7559 return true 7560 } 7561 } 7562 func rewriteValueMIPS64_OpNeq32_0(v *Value) bool { 7563 b := v.Block 7564 _ = b 7565 typ := &b.Func.Config.Types 7566 _ = typ 7567 // match: (Neq32 x y) 7568 // cond: 7569 // result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0])) 7570 for { 7571 _ = v.Args[1] 7572 x := v.Args[0] 7573 y := v.Args[1] 7574 v.reset(OpMIPS64SGTU) 7575 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7576 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7577 v1.AddArg(x) 7578 v0.AddArg(v1) 7579 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7580 v2.AddArg(y) 7581 v0.AddArg(v2) 7582 v.AddArg(v0) 7583 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7584 v3.AuxInt = 0 7585 v.AddArg(v3) 7586 return true 7587 } 7588 } 7589 func rewriteValueMIPS64_OpNeq32F_0(v *Value) bool { 7590 b := v.Block 7591 _ = b 7592 // match: (Neq32F x y) 7593 // cond: 7594 // result: (FPFlagFalse (CMPEQF x y)) 7595 for { 7596 _ = v.Args[1] 7597 x := v.Args[0] 7598 y := v.Args[1] 7599 v.reset(OpMIPS64FPFlagFalse) 7600 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags) 7601 v0.AddArg(x) 7602 v0.AddArg(y) 7603 v.AddArg(v0) 7604 return true 7605 } 7606 } 7607 func rewriteValueMIPS64_OpNeq64_0(v *Value) bool { 7608 b := v.Block 7609 _ = b 7610 typ := &b.Func.Config.Types 7611 _ = typ 7612 // match: (Neq64 x y) 7613 // cond: 7614 // result: (SGTU (XOR x y) (MOVVconst [0])) 7615 for { 7616 _ = v.Args[1] 7617 x := v.Args[0] 7618 y := v.Args[1] 7619 v.reset(OpMIPS64SGTU) 7620 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7621 v0.AddArg(x) 7622 v0.AddArg(y) 7623 v.AddArg(v0) 7624 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7625 v1.AuxInt = 0 7626 v.AddArg(v1) 7627 return true 7628 } 7629 } 7630 func rewriteValueMIPS64_OpNeq64F_0(v *Value) bool { 7631 b := v.Block 7632 _ = b 7633 // match: (Neq64F x y) 7634 // cond: 7635 // result: (FPFlagFalse (CMPEQD x y)) 7636 for { 7637 _ = v.Args[1] 7638 x := v.Args[0] 7639 y := v.Args[1] 7640 v.reset(OpMIPS64FPFlagFalse) 7641 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags) 7642 v0.AddArg(x) 7643 v0.AddArg(y) 7644 v.AddArg(v0) 7645 return true 7646 } 7647 } 7648 func rewriteValueMIPS64_OpNeq8_0(v *Value) bool { 7649 b := v.Block 7650 _ = b 7651 typ := &b.Func.Config.Types 7652 _ = typ 7653 // match: (Neq8 x y) 7654 // cond: 7655 // result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0])) 7656 for { 7657 _ = v.Args[1] 7658 x := v.Args[0] 7659 y := v.Args[1] 7660 v.reset(OpMIPS64SGTU) 7661 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7662 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7663 v1.AddArg(x) 7664 v0.AddArg(v1) 7665 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7666 v2.AddArg(y) 7667 v0.AddArg(v2) 7668 v.AddArg(v0) 7669 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7670 v3.AuxInt = 0 7671 v.AddArg(v3) 7672 return true 7673 } 7674 } 7675 func rewriteValueMIPS64_OpNeqB_0(v *Value) bool { 7676 // match: (NeqB x y) 7677 // cond: 7678 // result: (XOR x y) 7679 for { 7680 _ = v.Args[1] 7681 x := v.Args[0] 7682 y := v.Args[1] 7683 v.reset(OpMIPS64XOR) 7684 v.AddArg(x) 7685 v.AddArg(y) 7686 return true 7687 } 7688 } 7689 func rewriteValueMIPS64_OpNeqPtr_0(v *Value) bool { 7690 b := v.Block 7691 _ = b 7692 typ := &b.Func.Config.Types 7693 _ = typ 7694 // match: (NeqPtr x y) 7695 // cond: 7696 // result: (SGTU (XOR x y) (MOVVconst [0])) 7697 for { 7698 _ = v.Args[1] 7699 x := v.Args[0] 7700 y := v.Args[1] 7701 v.reset(OpMIPS64SGTU) 7702 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64) 7703 v0.AddArg(x) 7704 v0.AddArg(y) 7705 v.AddArg(v0) 7706 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7707 v1.AuxInt = 0 7708 v.AddArg(v1) 7709 return true 7710 } 7711 } 7712 func rewriteValueMIPS64_OpNilCheck_0(v *Value) bool { 7713 // match: (NilCheck ptr mem) 7714 // cond: 7715 // result: (LoweredNilCheck ptr mem) 7716 for { 7717 _ = v.Args[1] 7718 ptr := v.Args[0] 7719 mem := v.Args[1] 7720 v.reset(OpMIPS64LoweredNilCheck) 7721 v.AddArg(ptr) 7722 v.AddArg(mem) 7723 return true 7724 } 7725 } 7726 func rewriteValueMIPS64_OpNot_0(v *Value) bool { 7727 // match: (Not x) 7728 // cond: 7729 // result: (XORconst [1] x) 7730 for { 7731 x := v.Args[0] 7732 v.reset(OpMIPS64XORconst) 7733 v.AuxInt = 1 7734 v.AddArg(x) 7735 return true 7736 } 7737 } 7738 func rewriteValueMIPS64_OpOffPtr_0(v *Value) bool { 7739 // match: (OffPtr [off] ptr:(SP)) 7740 // cond: 7741 // result: (MOVVaddr [off] ptr) 7742 for { 7743 off := v.AuxInt 7744 ptr := v.Args[0] 7745 if ptr.Op != OpSP { 7746 break 7747 } 7748 v.reset(OpMIPS64MOVVaddr) 7749 v.AuxInt = off 7750 v.AddArg(ptr) 7751 return true 7752 } 7753 // match: (OffPtr [off] ptr) 7754 // cond: 7755 // result: (ADDVconst [off] ptr) 7756 for { 7757 off := v.AuxInt 7758 ptr := v.Args[0] 7759 v.reset(OpMIPS64ADDVconst) 7760 v.AuxInt = off 7761 v.AddArg(ptr) 7762 return true 7763 } 7764 } 7765 func rewriteValueMIPS64_OpOr16_0(v *Value) bool { 7766 // match: (Or16 x y) 7767 // cond: 7768 // result: (OR x y) 7769 for { 7770 _ = v.Args[1] 7771 x := v.Args[0] 7772 y := v.Args[1] 7773 v.reset(OpMIPS64OR) 7774 v.AddArg(x) 7775 v.AddArg(y) 7776 return true 7777 } 7778 } 7779 func rewriteValueMIPS64_OpOr32_0(v *Value) bool { 7780 // match: (Or32 x y) 7781 // cond: 7782 // result: (OR x y) 7783 for { 7784 _ = v.Args[1] 7785 x := v.Args[0] 7786 y := v.Args[1] 7787 v.reset(OpMIPS64OR) 7788 v.AddArg(x) 7789 v.AddArg(y) 7790 return true 7791 } 7792 } 7793 func rewriteValueMIPS64_OpOr64_0(v *Value) bool { 7794 // match: (Or64 x y) 7795 // cond: 7796 // result: (OR x y) 7797 for { 7798 _ = v.Args[1] 7799 x := v.Args[0] 7800 y := v.Args[1] 7801 v.reset(OpMIPS64OR) 7802 v.AddArg(x) 7803 v.AddArg(y) 7804 return true 7805 } 7806 } 7807 func rewriteValueMIPS64_OpOr8_0(v *Value) bool { 7808 // match: (Or8 x y) 7809 // cond: 7810 // result: (OR x y) 7811 for { 7812 _ = v.Args[1] 7813 x := v.Args[0] 7814 y := v.Args[1] 7815 v.reset(OpMIPS64OR) 7816 v.AddArg(x) 7817 v.AddArg(y) 7818 return true 7819 } 7820 } 7821 func rewriteValueMIPS64_OpOrB_0(v *Value) bool { 7822 // match: (OrB x y) 7823 // cond: 7824 // result: (OR x y) 7825 for { 7826 _ = v.Args[1] 7827 x := v.Args[0] 7828 y := v.Args[1] 7829 v.reset(OpMIPS64OR) 7830 v.AddArg(x) 7831 v.AddArg(y) 7832 return true 7833 } 7834 } 7835 func rewriteValueMIPS64_OpRound32F_0(v *Value) bool { 7836 // match: (Round32F x) 7837 // cond: 7838 // result: x 7839 for { 7840 x := v.Args[0] 7841 v.reset(OpCopy) 7842 v.Type = x.Type 7843 v.AddArg(x) 7844 return true 7845 } 7846 } 7847 func rewriteValueMIPS64_OpRound64F_0(v *Value) bool { 7848 // match: (Round64F x) 7849 // cond: 7850 // result: x 7851 for { 7852 x := v.Args[0] 7853 v.reset(OpCopy) 7854 v.Type = x.Type 7855 v.AddArg(x) 7856 return true 7857 } 7858 } 7859 func rewriteValueMIPS64_OpRsh16Ux16_0(v *Value) bool { 7860 b := v.Block 7861 _ = b 7862 typ := &b.Func.Config.Types 7863 _ = typ 7864 // match: (Rsh16Ux16 <t> x y) 7865 // cond: 7866 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y))) 7867 for { 7868 t := v.Type 7869 _ = v.Args[1] 7870 x := v.Args[0] 7871 y := v.Args[1] 7872 v.reset(OpMIPS64AND) 7873 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7874 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7875 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7876 v2.AuxInt = 64 7877 v1.AddArg(v2) 7878 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7879 v3.AddArg(y) 7880 v1.AddArg(v3) 7881 v0.AddArg(v1) 7882 v.AddArg(v0) 7883 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7884 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7885 v5.AddArg(x) 7886 v4.AddArg(v5) 7887 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7888 v6.AddArg(y) 7889 v4.AddArg(v6) 7890 v.AddArg(v4) 7891 return true 7892 } 7893 } 7894 func rewriteValueMIPS64_OpRsh16Ux32_0(v *Value) bool { 7895 b := v.Block 7896 _ = b 7897 typ := &b.Func.Config.Types 7898 _ = typ 7899 // match: (Rsh16Ux32 <t> x y) 7900 // cond: 7901 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y))) 7902 for { 7903 t := v.Type 7904 _ = v.Args[1] 7905 x := v.Args[0] 7906 y := v.Args[1] 7907 v.reset(OpMIPS64AND) 7908 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7909 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7910 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7911 v2.AuxInt = 64 7912 v1.AddArg(v2) 7913 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7914 v3.AddArg(y) 7915 v1.AddArg(v3) 7916 v0.AddArg(v1) 7917 v.AddArg(v0) 7918 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7919 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7920 v5.AddArg(x) 7921 v4.AddArg(v5) 7922 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 7923 v6.AddArg(y) 7924 v4.AddArg(v6) 7925 v.AddArg(v4) 7926 return true 7927 } 7928 } 7929 func rewriteValueMIPS64_OpRsh16Ux64_0(v *Value) bool { 7930 b := v.Block 7931 _ = b 7932 typ := &b.Func.Config.Types 7933 _ = typ 7934 // match: (Rsh16Ux64 <t> x y) 7935 // cond: 7936 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y)) 7937 for { 7938 t := v.Type 7939 _ = v.Args[1] 7940 x := v.Args[0] 7941 y := v.Args[1] 7942 v.reset(OpMIPS64AND) 7943 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7944 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7945 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7946 v2.AuxInt = 64 7947 v1.AddArg(v2) 7948 v1.AddArg(y) 7949 v0.AddArg(v1) 7950 v.AddArg(v0) 7951 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7952 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7953 v4.AddArg(x) 7954 v3.AddArg(v4) 7955 v3.AddArg(y) 7956 v.AddArg(v3) 7957 return true 7958 } 7959 } 7960 func rewriteValueMIPS64_OpRsh16Ux8_0(v *Value) bool { 7961 b := v.Block 7962 _ = b 7963 typ := &b.Func.Config.Types 7964 _ = typ 7965 // match: (Rsh16Ux8 <t> x y) 7966 // cond: 7967 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y))) 7968 for { 7969 t := v.Type 7970 _ = v.Args[1] 7971 x := v.Args[0] 7972 y := v.Args[1] 7973 v.reset(OpMIPS64AND) 7974 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 7975 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 7976 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 7977 v2.AuxInt = 64 7978 v1.AddArg(v2) 7979 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7980 v3.AddArg(y) 7981 v1.AddArg(v3) 7982 v0.AddArg(v1) 7983 v.AddArg(v0) 7984 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 7985 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 7986 v5.AddArg(x) 7987 v4.AddArg(v5) 7988 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 7989 v6.AddArg(y) 7990 v4.AddArg(v6) 7991 v.AddArg(v4) 7992 return true 7993 } 7994 } 7995 func rewriteValueMIPS64_OpRsh16x16_0(v *Value) bool { 7996 b := v.Block 7997 _ = b 7998 typ := &b.Func.Config.Types 7999 _ = typ 8000 // match: (Rsh16x16 <t> x y) 8001 // cond: 8002 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8003 for { 8004 t := v.Type 8005 _ = v.Args[1] 8006 x := v.Args[0] 8007 y := v.Args[1] 8008 v.reset(OpMIPS64SRAV) 8009 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8010 v0.AddArg(x) 8011 v.AddArg(v0) 8012 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8013 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8014 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8015 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8016 v4.AddArg(y) 8017 v3.AddArg(v4) 8018 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8019 v5.AuxInt = 63 8020 v3.AddArg(v5) 8021 v2.AddArg(v3) 8022 v1.AddArg(v2) 8023 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8024 v6.AddArg(y) 8025 v1.AddArg(v6) 8026 v.AddArg(v1) 8027 return true 8028 } 8029 } 8030 func rewriteValueMIPS64_OpRsh16x32_0(v *Value) bool { 8031 b := v.Block 8032 _ = b 8033 typ := &b.Func.Config.Types 8034 _ = typ 8035 // match: (Rsh16x32 <t> x y) 8036 // cond: 8037 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8038 for { 8039 t := v.Type 8040 _ = v.Args[1] 8041 x := v.Args[0] 8042 y := v.Args[1] 8043 v.reset(OpMIPS64SRAV) 8044 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8045 v0.AddArg(x) 8046 v.AddArg(v0) 8047 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8048 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8049 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8050 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8051 v4.AddArg(y) 8052 v3.AddArg(v4) 8053 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8054 v5.AuxInt = 63 8055 v3.AddArg(v5) 8056 v2.AddArg(v3) 8057 v1.AddArg(v2) 8058 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8059 v6.AddArg(y) 8060 v1.AddArg(v6) 8061 v.AddArg(v1) 8062 return true 8063 } 8064 } 8065 func rewriteValueMIPS64_OpRsh16x64_0(v *Value) bool { 8066 b := v.Block 8067 _ = b 8068 typ := &b.Func.Config.Types 8069 _ = typ 8070 // match: (Rsh16x64 <t> x y) 8071 // cond: 8072 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8073 for { 8074 t := v.Type 8075 _ = v.Args[1] 8076 x := v.Args[0] 8077 y := v.Args[1] 8078 v.reset(OpMIPS64SRAV) 8079 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8080 v0.AddArg(x) 8081 v.AddArg(v0) 8082 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8083 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8084 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8085 v3.AddArg(y) 8086 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8087 v4.AuxInt = 63 8088 v3.AddArg(v4) 8089 v2.AddArg(v3) 8090 v1.AddArg(v2) 8091 v1.AddArg(y) 8092 v.AddArg(v1) 8093 return true 8094 } 8095 } 8096 func rewriteValueMIPS64_OpRsh16x8_0(v *Value) bool { 8097 b := v.Block 8098 _ = b 8099 typ := &b.Func.Config.Types 8100 _ = typ 8101 // match: (Rsh16x8 <t> x y) 8102 // cond: 8103 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8104 for { 8105 t := v.Type 8106 _ = v.Args[1] 8107 x := v.Args[0] 8108 y := v.Args[1] 8109 v.reset(OpMIPS64SRAV) 8110 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 8111 v0.AddArg(x) 8112 v.AddArg(v0) 8113 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8114 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8115 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8116 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8117 v4.AddArg(y) 8118 v3.AddArg(v4) 8119 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8120 v5.AuxInt = 63 8121 v3.AddArg(v5) 8122 v2.AddArg(v3) 8123 v1.AddArg(v2) 8124 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8125 v6.AddArg(y) 8126 v1.AddArg(v6) 8127 v.AddArg(v1) 8128 return true 8129 } 8130 } 8131 func rewriteValueMIPS64_OpRsh32Ux16_0(v *Value) bool { 8132 b := v.Block 8133 _ = b 8134 typ := &b.Func.Config.Types 8135 _ = typ 8136 // match: (Rsh32Ux16 <t> x y) 8137 // cond: 8138 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y))) 8139 for { 8140 t := v.Type 8141 _ = v.Args[1] 8142 x := v.Args[0] 8143 y := v.Args[1] 8144 v.reset(OpMIPS64AND) 8145 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8146 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8147 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8148 v2.AuxInt = 64 8149 v1.AddArg(v2) 8150 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8151 v3.AddArg(y) 8152 v1.AddArg(v3) 8153 v0.AddArg(v1) 8154 v.AddArg(v0) 8155 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8156 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8157 v5.AddArg(x) 8158 v4.AddArg(v5) 8159 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8160 v6.AddArg(y) 8161 v4.AddArg(v6) 8162 v.AddArg(v4) 8163 return true 8164 } 8165 } 8166 func rewriteValueMIPS64_OpRsh32Ux32_0(v *Value) bool { 8167 b := v.Block 8168 _ = b 8169 typ := &b.Func.Config.Types 8170 _ = typ 8171 // match: (Rsh32Ux32 <t> x y) 8172 // cond: 8173 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y))) 8174 for { 8175 t := v.Type 8176 _ = v.Args[1] 8177 x := v.Args[0] 8178 y := v.Args[1] 8179 v.reset(OpMIPS64AND) 8180 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8181 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8182 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8183 v2.AuxInt = 64 8184 v1.AddArg(v2) 8185 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8186 v3.AddArg(y) 8187 v1.AddArg(v3) 8188 v0.AddArg(v1) 8189 v.AddArg(v0) 8190 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8191 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8192 v5.AddArg(x) 8193 v4.AddArg(v5) 8194 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8195 v6.AddArg(y) 8196 v4.AddArg(v6) 8197 v.AddArg(v4) 8198 return true 8199 } 8200 } 8201 func rewriteValueMIPS64_OpRsh32Ux64_0(v *Value) bool { 8202 b := v.Block 8203 _ = b 8204 typ := &b.Func.Config.Types 8205 _ = typ 8206 // match: (Rsh32Ux64 <t> x y) 8207 // cond: 8208 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y)) 8209 for { 8210 t := v.Type 8211 _ = v.Args[1] 8212 x := v.Args[0] 8213 y := v.Args[1] 8214 v.reset(OpMIPS64AND) 8215 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8216 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8217 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8218 v2.AuxInt = 64 8219 v1.AddArg(v2) 8220 v1.AddArg(y) 8221 v0.AddArg(v1) 8222 v.AddArg(v0) 8223 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8224 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8225 v4.AddArg(x) 8226 v3.AddArg(v4) 8227 v3.AddArg(y) 8228 v.AddArg(v3) 8229 return true 8230 } 8231 } 8232 func rewriteValueMIPS64_OpRsh32Ux8_0(v *Value) bool { 8233 b := v.Block 8234 _ = b 8235 typ := &b.Func.Config.Types 8236 _ = typ 8237 // match: (Rsh32Ux8 <t> x y) 8238 // cond: 8239 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y))) 8240 for { 8241 t := v.Type 8242 _ = v.Args[1] 8243 x := v.Args[0] 8244 y := v.Args[1] 8245 v.reset(OpMIPS64AND) 8246 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8247 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8248 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8249 v2.AuxInt = 64 8250 v1.AddArg(v2) 8251 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8252 v3.AddArg(y) 8253 v1.AddArg(v3) 8254 v0.AddArg(v1) 8255 v.AddArg(v0) 8256 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8257 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8258 v5.AddArg(x) 8259 v4.AddArg(v5) 8260 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8261 v6.AddArg(y) 8262 v4.AddArg(v6) 8263 v.AddArg(v4) 8264 return true 8265 } 8266 } 8267 func rewriteValueMIPS64_OpRsh32x16_0(v *Value) bool { 8268 b := v.Block 8269 _ = b 8270 typ := &b.Func.Config.Types 8271 _ = typ 8272 // match: (Rsh32x16 <t> x y) 8273 // cond: 8274 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8275 for { 8276 t := v.Type 8277 _ = v.Args[1] 8278 x := v.Args[0] 8279 y := v.Args[1] 8280 v.reset(OpMIPS64SRAV) 8281 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8282 v0.AddArg(x) 8283 v.AddArg(v0) 8284 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8285 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8286 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8287 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8288 v4.AddArg(y) 8289 v3.AddArg(v4) 8290 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8291 v5.AuxInt = 63 8292 v3.AddArg(v5) 8293 v2.AddArg(v3) 8294 v1.AddArg(v2) 8295 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8296 v6.AddArg(y) 8297 v1.AddArg(v6) 8298 v.AddArg(v1) 8299 return true 8300 } 8301 } 8302 func rewriteValueMIPS64_OpRsh32x32_0(v *Value) bool { 8303 b := v.Block 8304 _ = b 8305 typ := &b.Func.Config.Types 8306 _ = typ 8307 // match: (Rsh32x32 <t> x y) 8308 // cond: 8309 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8310 for { 8311 t := v.Type 8312 _ = v.Args[1] 8313 x := v.Args[0] 8314 y := v.Args[1] 8315 v.reset(OpMIPS64SRAV) 8316 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8317 v0.AddArg(x) 8318 v.AddArg(v0) 8319 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8320 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8321 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8322 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8323 v4.AddArg(y) 8324 v3.AddArg(v4) 8325 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8326 v5.AuxInt = 63 8327 v3.AddArg(v5) 8328 v2.AddArg(v3) 8329 v1.AddArg(v2) 8330 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8331 v6.AddArg(y) 8332 v1.AddArg(v6) 8333 v.AddArg(v1) 8334 return true 8335 } 8336 } 8337 func rewriteValueMIPS64_OpRsh32x64_0(v *Value) bool { 8338 b := v.Block 8339 _ = b 8340 typ := &b.Func.Config.Types 8341 _ = typ 8342 // match: (Rsh32x64 <t> x y) 8343 // cond: 8344 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8345 for { 8346 t := v.Type 8347 _ = v.Args[1] 8348 x := v.Args[0] 8349 y := v.Args[1] 8350 v.reset(OpMIPS64SRAV) 8351 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8352 v0.AddArg(x) 8353 v.AddArg(v0) 8354 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8355 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8356 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8357 v3.AddArg(y) 8358 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8359 v4.AuxInt = 63 8360 v3.AddArg(v4) 8361 v2.AddArg(v3) 8362 v1.AddArg(v2) 8363 v1.AddArg(y) 8364 v.AddArg(v1) 8365 return true 8366 } 8367 } 8368 func rewriteValueMIPS64_OpRsh32x8_0(v *Value) bool { 8369 b := v.Block 8370 _ = b 8371 typ := &b.Func.Config.Types 8372 _ = typ 8373 // match: (Rsh32x8 <t> x y) 8374 // cond: 8375 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8376 for { 8377 t := v.Type 8378 _ = v.Args[1] 8379 x := v.Args[0] 8380 y := v.Args[1] 8381 v.reset(OpMIPS64SRAV) 8382 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 8383 v0.AddArg(x) 8384 v.AddArg(v0) 8385 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8386 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8387 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8388 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8389 v4.AddArg(y) 8390 v3.AddArg(v4) 8391 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8392 v5.AuxInt = 63 8393 v3.AddArg(v5) 8394 v2.AddArg(v3) 8395 v1.AddArg(v2) 8396 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8397 v6.AddArg(y) 8398 v1.AddArg(v6) 8399 v.AddArg(v1) 8400 return true 8401 } 8402 } 8403 func rewriteValueMIPS64_OpRsh64Ux16_0(v *Value) bool { 8404 b := v.Block 8405 _ = b 8406 typ := &b.Func.Config.Types 8407 _ = typ 8408 // match: (Rsh64Ux16 <t> x y) 8409 // cond: 8410 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y))) 8411 for { 8412 t := v.Type 8413 _ = v.Args[1] 8414 x := v.Args[0] 8415 y := v.Args[1] 8416 v.reset(OpMIPS64AND) 8417 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8418 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8419 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8420 v2.AuxInt = 64 8421 v1.AddArg(v2) 8422 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8423 v3.AddArg(y) 8424 v1.AddArg(v3) 8425 v0.AddArg(v1) 8426 v.AddArg(v0) 8427 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8428 v4.AddArg(x) 8429 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8430 v5.AddArg(y) 8431 v4.AddArg(v5) 8432 v.AddArg(v4) 8433 return true 8434 } 8435 } 8436 func rewriteValueMIPS64_OpRsh64Ux32_0(v *Value) bool { 8437 b := v.Block 8438 _ = b 8439 typ := &b.Func.Config.Types 8440 _ = typ 8441 // match: (Rsh64Ux32 <t> x y) 8442 // cond: 8443 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y))) 8444 for { 8445 t := v.Type 8446 _ = v.Args[1] 8447 x := v.Args[0] 8448 y := v.Args[1] 8449 v.reset(OpMIPS64AND) 8450 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8451 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8452 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8453 v2.AuxInt = 64 8454 v1.AddArg(v2) 8455 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8456 v3.AddArg(y) 8457 v1.AddArg(v3) 8458 v0.AddArg(v1) 8459 v.AddArg(v0) 8460 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8461 v4.AddArg(x) 8462 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8463 v5.AddArg(y) 8464 v4.AddArg(v5) 8465 v.AddArg(v4) 8466 return true 8467 } 8468 } 8469 func rewriteValueMIPS64_OpRsh64Ux64_0(v *Value) bool { 8470 b := v.Block 8471 _ = b 8472 typ := &b.Func.Config.Types 8473 _ = typ 8474 // match: (Rsh64Ux64 <t> x y) 8475 // cond: 8476 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y)) 8477 for { 8478 t := v.Type 8479 _ = v.Args[1] 8480 x := v.Args[0] 8481 y := v.Args[1] 8482 v.reset(OpMIPS64AND) 8483 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8484 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8485 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8486 v2.AuxInt = 64 8487 v1.AddArg(v2) 8488 v1.AddArg(y) 8489 v0.AddArg(v1) 8490 v.AddArg(v0) 8491 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8492 v3.AddArg(x) 8493 v3.AddArg(y) 8494 v.AddArg(v3) 8495 return true 8496 } 8497 } 8498 func rewriteValueMIPS64_OpRsh64Ux8_0(v *Value) bool { 8499 b := v.Block 8500 _ = b 8501 typ := &b.Func.Config.Types 8502 _ = typ 8503 // match: (Rsh64Ux8 <t> x y) 8504 // cond: 8505 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y))) 8506 for { 8507 t := v.Type 8508 _ = v.Args[1] 8509 x := v.Args[0] 8510 y := v.Args[1] 8511 v.reset(OpMIPS64AND) 8512 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8513 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8514 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8515 v2.AuxInt = 64 8516 v1.AddArg(v2) 8517 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8518 v3.AddArg(y) 8519 v1.AddArg(v3) 8520 v0.AddArg(v1) 8521 v.AddArg(v0) 8522 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8523 v4.AddArg(x) 8524 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8525 v5.AddArg(y) 8526 v4.AddArg(v5) 8527 v.AddArg(v4) 8528 return true 8529 } 8530 } 8531 func rewriteValueMIPS64_OpRsh64x16_0(v *Value) bool { 8532 b := v.Block 8533 _ = b 8534 typ := &b.Func.Config.Types 8535 _ = typ 8536 // match: (Rsh64x16 <t> x y) 8537 // cond: 8538 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8539 for { 8540 t := v.Type 8541 _ = v.Args[1] 8542 x := v.Args[0] 8543 y := v.Args[1] 8544 v.reset(OpMIPS64SRAV) 8545 v.AddArg(x) 8546 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8547 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8548 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8549 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8550 v3.AddArg(y) 8551 v2.AddArg(v3) 8552 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8553 v4.AuxInt = 63 8554 v2.AddArg(v4) 8555 v1.AddArg(v2) 8556 v0.AddArg(v1) 8557 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8558 v5.AddArg(y) 8559 v0.AddArg(v5) 8560 v.AddArg(v0) 8561 return true 8562 } 8563 } 8564 func rewriteValueMIPS64_OpRsh64x32_0(v *Value) bool { 8565 b := v.Block 8566 _ = b 8567 typ := &b.Func.Config.Types 8568 _ = typ 8569 // match: (Rsh64x32 <t> x y) 8570 // cond: 8571 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8572 for { 8573 t := v.Type 8574 _ = v.Args[1] 8575 x := v.Args[0] 8576 y := v.Args[1] 8577 v.reset(OpMIPS64SRAV) 8578 v.AddArg(x) 8579 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8580 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8581 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8582 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8583 v3.AddArg(y) 8584 v2.AddArg(v3) 8585 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8586 v4.AuxInt = 63 8587 v2.AddArg(v4) 8588 v1.AddArg(v2) 8589 v0.AddArg(v1) 8590 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8591 v5.AddArg(y) 8592 v0.AddArg(v5) 8593 v.AddArg(v0) 8594 return true 8595 } 8596 } 8597 func rewriteValueMIPS64_OpRsh64x64_0(v *Value) bool { 8598 b := v.Block 8599 _ = b 8600 typ := &b.Func.Config.Types 8601 _ = typ 8602 // match: (Rsh64x64 <t> x y) 8603 // cond: 8604 // result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8605 for { 8606 t := v.Type 8607 _ = v.Args[1] 8608 x := v.Args[0] 8609 y := v.Args[1] 8610 v.reset(OpMIPS64SRAV) 8611 v.AddArg(x) 8612 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8613 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8614 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8615 v2.AddArg(y) 8616 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8617 v3.AuxInt = 63 8618 v2.AddArg(v3) 8619 v1.AddArg(v2) 8620 v0.AddArg(v1) 8621 v0.AddArg(y) 8622 v.AddArg(v0) 8623 return true 8624 } 8625 } 8626 func rewriteValueMIPS64_OpRsh64x8_0(v *Value) bool { 8627 b := v.Block 8628 _ = b 8629 typ := &b.Func.Config.Types 8630 _ = typ 8631 // match: (Rsh64x8 <t> x y) 8632 // cond: 8633 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8634 for { 8635 t := v.Type 8636 _ = v.Args[1] 8637 x := v.Args[0] 8638 y := v.Args[1] 8639 v.reset(OpMIPS64SRAV) 8640 v.AddArg(x) 8641 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8642 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8643 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8644 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8645 v3.AddArg(y) 8646 v2.AddArg(v3) 8647 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8648 v4.AuxInt = 63 8649 v2.AddArg(v4) 8650 v1.AddArg(v2) 8651 v0.AddArg(v1) 8652 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8653 v5.AddArg(y) 8654 v0.AddArg(v5) 8655 v.AddArg(v0) 8656 return true 8657 } 8658 } 8659 func rewriteValueMIPS64_OpRsh8Ux16_0(v *Value) bool { 8660 b := v.Block 8661 _ = b 8662 typ := &b.Func.Config.Types 8663 _ = typ 8664 // match: (Rsh8Ux16 <t> x y) 8665 // cond: 8666 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y))) 8667 for { 8668 t := v.Type 8669 _ = v.Args[1] 8670 x := v.Args[0] 8671 y := v.Args[1] 8672 v.reset(OpMIPS64AND) 8673 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8674 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8675 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8676 v2.AuxInt = 64 8677 v1.AddArg(v2) 8678 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8679 v3.AddArg(y) 8680 v1.AddArg(v3) 8681 v0.AddArg(v1) 8682 v.AddArg(v0) 8683 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8684 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8685 v5.AddArg(x) 8686 v4.AddArg(v5) 8687 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8688 v6.AddArg(y) 8689 v4.AddArg(v6) 8690 v.AddArg(v4) 8691 return true 8692 } 8693 } 8694 func rewriteValueMIPS64_OpRsh8Ux32_0(v *Value) bool { 8695 b := v.Block 8696 _ = b 8697 typ := &b.Func.Config.Types 8698 _ = typ 8699 // match: (Rsh8Ux32 <t> x y) 8700 // cond: 8701 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y))) 8702 for { 8703 t := v.Type 8704 _ = v.Args[1] 8705 x := v.Args[0] 8706 y := v.Args[1] 8707 v.reset(OpMIPS64AND) 8708 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8709 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8710 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8711 v2.AuxInt = 64 8712 v1.AddArg(v2) 8713 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8714 v3.AddArg(y) 8715 v1.AddArg(v3) 8716 v0.AddArg(v1) 8717 v.AddArg(v0) 8718 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8719 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8720 v5.AddArg(x) 8721 v4.AddArg(v5) 8722 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8723 v6.AddArg(y) 8724 v4.AddArg(v6) 8725 v.AddArg(v4) 8726 return true 8727 } 8728 } 8729 func rewriteValueMIPS64_OpRsh8Ux64_0(v *Value) bool { 8730 b := v.Block 8731 _ = b 8732 typ := &b.Func.Config.Types 8733 _ = typ 8734 // match: (Rsh8Ux64 <t> x y) 8735 // cond: 8736 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y)) 8737 for { 8738 t := v.Type 8739 _ = v.Args[1] 8740 x := v.Args[0] 8741 y := v.Args[1] 8742 v.reset(OpMIPS64AND) 8743 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8744 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8745 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8746 v2.AuxInt = 64 8747 v1.AddArg(v2) 8748 v1.AddArg(y) 8749 v0.AddArg(v1) 8750 v.AddArg(v0) 8751 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8752 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8753 v4.AddArg(x) 8754 v3.AddArg(v4) 8755 v3.AddArg(y) 8756 v.AddArg(v3) 8757 return true 8758 } 8759 } 8760 func rewriteValueMIPS64_OpRsh8Ux8_0(v *Value) bool { 8761 b := v.Block 8762 _ = b 8763 typ := &b.Func.Config.Types 8764 _ = typ 8765 // match: (Rsh8Ux8 <t> x y) 8766 // cond: 8767 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y))) 8768 for { 8769 t := v.Type 8770 _ = v.Args[1] 8771 x := v.Args[0] 8772 y := v.Args[1] 8773 v.reset(OpMIPS64AND) 8774 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8775 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8776 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8777 v2.AuxInt = 64 8778 v1.AddArg(v2) 8779 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8780 v3.AddArg(y) 8781 v1.AddArg(v3) 8782 v0.AddArg(v1) 8783 v.AddArg(v0) 8784 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t) 8785 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8786 v5.AddArg(x) 8787 v4.AddArg(v5) 8788 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8789 v6.AddArg(y) 8790 v4.AddArg(v6) 8791 v.AddArg(v4) 8792 return true 8793 } 8794 } 8795 func rewriteValueMIPS64_OpRsh8x16_0(v *Value) bool { 8796 b := v.Block 8797 _ = b 8798 typ := &b.Func.Config.Types 8799 _ = typ 8800 // match: (Rsh8x16 <t> x y) 8801 // cond: 8802 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 8803 for { 8804 t := v.Type 8805 _ = v.Args[1] 8806 x := v.Args[0] 8807 y := v.Args[1] 8808 v.reset(OpMIPS64SRAV) 8809 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8810 v0.AddArg(x) 8811 v.AddArg(v0) 8812 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8813 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8814 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8815 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8816 v4.AddArg(y) 8817 v3.AddArg(v4) 8818 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8819 v5.AuxInt = 63 8820 v3.AddArg(v5) 8821 v2.AddArg(v3) 8822 v1.AddArg(v2) 8823 v6 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 8824 v6.AddArg(y) 8825 v1.AddArg(v6) 8826 v.AddArg(v1) 8827 return true 8828 } 8829 } 8830 func rewriteValueMIPS64_OpRsh8x32_0(v *Value) bool { 8831 b := v.Block 8832 _ = b 8833 typ := &b.Func.Config.Types 8834 _ = typ 8835 // match: (Rsh8x32 <t> x y) 8836 // cond: 8837 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 8838 for { 8839 t := v.Type 8840 _ = v.Args[1] 8841 x := v.Args[0] 8842 y := v.Args[1] 8843 v.reset(OpMIPS64SRAV) 8844 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8845 v0.AddArg(x) 8846 v.AddArg(v0) 8847 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8848 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8849 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8850 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8851 v4.AddArg(y) 8852 v3.AddArg(v4) 8853 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8854 v5.AuxInt = 63 8855 v3.AddArg(v5) 8856 v2.AddArg(v3) 8857 v1.AddArg(v2) 8858 v6 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 8859 v6.AddArg(y) 8860 v1.AddArg(v6) 8861 v.AddArg(v1) 8862 return true 8863 } 8864 } 8865 func rewriteValueMIPS64_OpRsh8x64_0(v *Value) bool { 8866 b := v.Block 8867 _ = b 8868 typ := &b.Func.Config.Types 8869 _ = typ 8870 // match: (Rsh8x64 <t> x y) 8871 // cond: 8872 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 8873 for { 8874 t := v.Type 8875 _ = v.Args[1] 8876 x := v.Args[0] 8877 y := v.Args[1] 8878 v.reset(OpMIPS64SRAV) 8879 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8880 v0.AddArg(x) 8881 v.AddArg(v0) 8882 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8883 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8884 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8885 v3.AddArg(y) 8886 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8887 v4.AuxInt = 63 8888 v3.AddArg(v4) 8889 v2.AddArg(v3) 8890 v1.AddArg(v2) 8891 v1.AddArg(y) 8892 v.AddArg(v1) 8893 return true 8894 } 8895 } 8896 func rewriteValueMIPS64_OpRsh8x8_0(v *Value) bool { 8897 b := v.Block 8898 _ = b 8899 typ := &b.Func.Config.Types 8900 _ = typ 8901 // match: (Rsh8x8 <t> x y) 8902 // cond: 8903 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 8904 for { 8905 t := v.Type 8906 _ = v.Args[1] 8907 x := v.Args[0] 8908 y := v.Args[1] 8909 v.reset(OpMIPS64SRAV) 8910 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 8911 v0.AddArg(x) 8912 v.AddArg(v0) 8913 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t) 8914 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 8915 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool) 8916 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8917 v4.AddArg(y) 8918 v3.AddArg(v4) 8919 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 8920 v5.AuxInt = 63 8921 v3.AddArg(v5) 8922 v2.AddArg(v3) 8923 v1.AddArg(v2) 8924 v6 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 8925 v6.AddArg(y) 8926 v1.AddArg(v6) 8927 v.AddArg(v1) 8928 return true 8929 } 8930 } 8931 func rewriteValueMIPS64_OpSelect0_0(v *Value) bool { 8932 // match: (Select0 (DIVVU _ (MOVVconst [1]))) 8933 // cond: 8934 // result: (MOVVconst [0]) 8935 for { 8936 v_0 := v.Args[0] 8937 if v_0.Op != OpMIPS64DIVVU { 8938 break 8939 } 8940 _ = v_0.Args[1] 8941 v_0_1 := v_0.Args[1] 8942 if v_0_1.Op != OpMIPS64MOVVconst { 8943 break 8944 } 8945 if v_0_1.AuxInt != 1 { 8946 break 8947 } 8948 v.reset(OpMIPS64MOVVconst) 8949 v.AuxInt = 0 8950 return true 8951 } 8952 // match: (Select0 (DIVVU x (MOVVconst [c]))) 8953 // cond: isPowerOfTwo(c) 8954 // result: (ANDconst [c-1] x) 8955 for { 8956 v_0 := v.Args[0] 8957 if v_0.Op != OpMIPS64DIVVU { 8958 break 8959 } 8960 _ = v_0.Args[1] 8961 x := v_0.Args[0] 8962 v_0_1 := v_0.Args[1] 8963 if v_0_1.Op != OpMIPS64MOVVconst { 8964 break 8965 } 8966 c := v_0_1.AuxInt 8967 if !(isPowerOfTwo(c)) { 8968 break 8969 } 8970 v.reset(OpMIPS64ANDconst) 8971 v.AuxInt = c - 1 8972 v.AddArg(x) 8973 return true 8974 } 8975 // match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 8976 // cond: 8977 // result: (MOVVconst [c%d]) 8978 for { 8979 v_0 := v.Args[0] 8980 if v_0.Op != OpMIPS64DIVV { 8981 break 8982 } 8983 _ = v_0.Args[1] 8984 v_0_0 := v_0.Args[0] 8985 if v_0_0.Op != OpMIPS64MOVVconst { 8986 break 8987 } 8988 c := v_0_0.AuxInt 8989 v_0_1 := v_0.Args[1] 8990 if v_0_1.Op != OpMIPS64MOVVconst { 8991 break 8992 } 8993 d := v_0_1.AuxInt 8994 v.reset(OpMIPS64MOVVconst) 8995 v.AuxInt = c % d 8996 return true 8997 } 8998 // match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 8999 // cond: 9000 // result: (MOVVconst [int64(uint64(c)%uint64(d))]) 9001 for { 9002 v_0 := v.Args[0] 9003 if v_0.Op != OpMIPS64DIVVU { 9004 break 9005 } 9006 _ = v_0.Args[1] 9007 v_0_0 := v_0.Args[0] 9008 if v_0_0.Op != OpMIPS64MOVVconst { 9009 break 9010 } 9011 c := v_0_0.AuxInt 9012 v_0_1 := v_0.Args[1] 9013 if v_0_1.Op != OpMIPS64MOVVconst { 9014 break 9015 } 9016 d := v_0_1.AuxInt 9017 v.reset(OpMIPS64MOVVconst) 9018 v.AuxInt = int64(uint64(c) % uint64(d)) 9019 return true 9020 } 9021 return false 9022 } 9023 func rewriteValueMIPS64_OpSelect1_0(v *Value) bool { 9024 // match: (Select1 (MULVU x (MOVVconst [-1]))) 9025 // cond: 9026 // result: (NEGV x) 9027 for { 9028 v_0 := v.Args[0] 9029 if v_0.Op != OpMIPS64MULVU { 9030 break 9031 } 9032 _ = v_0.Args[1] 9033 x := v_0.Args[0] 9034 v_0_1 := v_0.Args[1] 9035 if v_0_1.Op != OpMIPS64MOVVconst { 9036 break 9037 } 9038 if v_0_1.AuxInt != -1 { 9039 break 9040 } 9041 v.reset(OpMIPS64NEGV) 9042 v.AddArg(x) 9043 return true 9044 } 9045 // match: (Select1 (MULVU (MOVVconst [-1]) x)) 9046 // cond: 9047 // result: (NEGV x) 9048 for { 9049 v_0 := v.Args[0] 9050 if v_0.Op != OpMIPS64MULVU { 9051 break 9052 } 9053 _ = v_0.Args[1] 9054 v_0_0 := v_0.Args[0] 9055 if v_0_0.Op != OpMIPS64MOVVconst { 9056 break 9057 } 9058 if v_0_0.AuxInt != -1 { 9059 break 9060 } 9061 x := v_0.Args[1] 9062 v.reset(OpMIPS64NEGV) 9063 v.AddArg(x) 9064 return true 9065 } 9066 // match: (Select1 (MULVU _ (MOVVconst [0]))) 9067 // cond: 9068 // result: (MOVVconst [0]) 9069 for { 9070 v_0 := v.Args[0] 9071 if v_0.Op != OpMIPS64MULVU { 9072 break 9073 } 9074 _ = v_0.Args[1] 9075 v_0_1 := v_0.Args[1] 9076 if v_0_1.Op != OpMIPS64MOVVconst { 9077 break 9078 } 9079 if v_0_1.AuxInt != 0 { 9080 break 9081 } 9082 v.reset(OpMIPS64MOVVconst) 9083 v.AuxInt = 0 9084 return true 9085 } 9086 // match: (Select1 (MULVU (MOVVconst [0]) _)) 9087 // cond: 9088 // result: (MOVVconst [0]) 9089 for { 9090 v_0 := v.Args[0] 9091 if v_0.Op != OpMIPS64MULVU { 9092 break 9093 } 9094 _ = v_0.Args[1] 9095 v_0_0 := v_0.Args[0] 9096 if v_0_0.Op != OpMIPS64MOVVconst { 9097 break 9098 } 9099 if v_0_0.AuxInt != 0 { 9100 break 9101 } 9102 v.reset(OpMIPS64MOVVconst) 9103 v.AuxInt = 0 9104 return true 9105 } 9106 // match: (Select1 (MULVU x (MOVVconst [1]))) 9107 // cond: 9108 // result: x 9109 for { 9110 v_0 := v.Args[0] 9111 if v_0.Op != OpMIPS64MULVU { 9112 break 9113 } 9114 _ = v_0.Args[1] 9115 x := v_0.Args[0] 9116 v_0_1 := v_0.Args[1] 9117 if v_0_1.Op != OpMIPS64MOVVconst { 9118 break 9119 } 9120 if v_0_1.AuxInt != 1 { 9121 break 9122 } 9123 v.reset(OpCopy) 9124 v.Type = x.Type 9125 v.AddArg(x) 9126 return true 9127 } 9128 // match: (Select1 (MULVU (MOVVconst [1]) x)) 9129 // cond: 9130 // result: x 9131 for { 9132 v_0 := v.Args[0] 9133 if v_0.Op != OpMIPS64MULVU { 9134 break 9135 } 9136 _ = v_0.Args[1] 9137 v_0_0 := v_0.Args[0] 9138 if v_0_0.Op != OpMIPS64MOVVconst { 9139 break 9140 } 9141 if v_0_0.AuxInt != 1 { 9142 break 9143 } 9144 x := v_0.Args[1] 9145 v.reset(OpCopy) 9146 v.Type = x.Type 9147 v.AddArg(x) 9148 return true 9149 } 9150 // match: (Select1 (MULVU x (MOVVconst [c]))) 9151 // cond: isPowerOfTwo(c) 9152 // result: (SLLVconst [log2(c)] x) 9153 for { 9154 v_0 := v.Args[0] 9155 if v_0.Op != OpMIPS64MULVU { 9156 break 9157 } 9158 _ = v_0.Args[1] 9159 x := v_0.Args[0] 9160 v_0_1 := v_0.Args[1] 9161 if v_0_1.Op != OpMIPS64MOVVconst { 9162 break 9163 } 9164 c := v_0_1.AuxInt 9165 if !(isPowerOfTwo(c)) { 9166 break 9167 } 9168 v.reset(OpMIPS64SLLVconst) 9169 v.AuxInt = log2(c) 9170 v.AddArg(x) 9171 return true 9172 } 9173 // match: (Select1 (MULVU (MOVVconst [c]) x)) 9174 // cond: isPowerOfTwo(c) 9175 // result: (SLLVconst [log2(c)] x) 9176 for { 9177 v_0 := v.Args[0] 9178 if v_0.Op != OpMIPS64MULVU { 9179 break 9180 } 9181 _ = v_0.Args[1] 9182 v_0_0 := v_0.Args[0] 9183 if v_0_0.Op != OpMIPS64MOVVconst { 9184 break 9185 } 9186 c := v_0_0.AuxInt 9187 x := v_0.Args[1] 9188 if !(isPowerOfTwo(c)) { 9189 break 9190 } 9191 v.reset(OpMIPS64SLLVconst) 9192 v.AuxInt = log2(c) 9193 v.AddArg(x) 9194 return true 9195 } 9196 // match: (Select1 (MULVU (MOVVconst [-1]) x)) 9197 // cond: 9198 // result: (NEGV x) 9199 for { 9200 v_0 := v.Args[0] 9201 if v_0.Op != OpMIPS64MULVU { 9202 break 9203 } 9204 _ = v_0.Args[1] 9205 v_0_0 := v_0.Args[0] 9206 if v_0_0.Op != OpMIPS64MOVVconst { 9207 break 9208 } 9209 if v_0_0.AuxInt != -1 { 9210 break 9211 } 9212 x := v_0.Args[1] 9213 v.reset(OpMIPS64NEGV) 9214 v.AddArg(x) 9215 return true 9216 } 9217 // match: (Select1 (MULVU x (MOVVconst [-1]))) 9218 // cond: 9219 // result: (NEGV x) 9220 for { 9221 v_0 := v.Args[0] 9222 if v_0.Op != OpMIPS64MULVU { 9223 break 9224 } 9225 _ = v_0.Args[1] 9226 x := v_0.Args[0] 9227 v_0_1 := v_0.Args[1] 9228 if v_0_1.Op != OpMIPS64MOVVconst { 9229 break 9230 } 9231 if v_0_1.AuxInt != -1 { 9232 break 9233 } 9234 v.reset(OpMIPS64NEGV) 9235 v.AddArg(x) 9236 return true 9237 } 9238 return false 9239 } 9240 func rewriteValueMIPS64_OpSelect1_10(v *Value) bool { 9241 // match: (Select1 (MULVU (MOVVconst [0]) _)) 9242 // cond: 9243 // result: (MOVVconst [0]) 9244 for { 9245 v_0 := v.Args[0] 9246 if v_0.Op != OpMIPS64MULVU { 9247 break 9248 } 9249 _ = v_0.Args[1] 9250 v_0_0 := v_0.Args[0] 9251 if v_0_0.Op != OpMIPS64MOVVconst { 9252 break 9253 } 9254 if v_0_0.AuxInt != 0 { 9255 break 9256 } 9257 v.reset(OpMIPS64MOVVconst) 9258 v.AuxInt = 0 9259 return true 9260 } 9261 // match: (Select1 (MULVU _ (MOVVconst [0]))) 9262 // cond: 9263 // result: (MOVVconst [0]) 9264 for { 9265 v_0 := v.Args[0] 9266 if v_0.Op != OpMIPS64MULVU { 9267 break 9268 } 9269 _ = v_0.Args[1] 9270 v_0_1 := v_0.Args[1] 9271 if v_0_1.Op != OpMIPS64MOVVconst { 9272 break 9273 } 9274 if v_0_1.AuxInt != 0 { 9275 break 9276 } 9277 v.reset(OpMIPS64MOVVconst) 9278 v.AuxInt = 0 9279 return true 9280 } 9281 // match: (Select1 (MULVU (MOVVconst [1]) x)) 9282 // cond: 9283 // result: x 9284 for { 9285 v_0 := v.Args[0] 9286 if v_0.Op != OpMIPS64MULVU { 9287 break 9288 } 9289 _ = v_0.Args[1] 9290 v_0_0 := v_0.Args[0] 9291 if v_0_0.Op != OpMIPS64MOVVconst { 9292 break 9293 } 9294 if v_0_0.AuxInt != 1 { 9295 break 9296 } 9297 x := v_0.Args[1] 9298 v.reset(OpCopy) 9299 v.Type = x.Type 9300 v.AddArg(x) 9301 return true 9302 } 9303 // match: (Select1 (MULVU x (MOVVconst [1]))) 9304 // cond: 9305 // result: x 9306 for { 9307 v_0 := v.Args[0] 9308 if v_0.Op != OpMIPS64MULVU { 9309 break 9310 } 9311 _ = v_0.Args[1] 9312 x := v_0.Args[0] 9313 v_0_1 := v_0.Args[1] 9314 if v_0_1.Op != OpMIPS64MOVVconst { 9315 break 9316 } 9317 if v_0_1.AuxInt != 1 { 9318 break 9319 } 9320 v.reset(OpCopy) 9321 v.Type = x.Type 9322 v.AddArg(x) 9323 return true 9324 } 9325 // match: (Select1 (MULVU (MOVVconst [c]) x)) 9326 // cond: isPowerOfTwo(c) 9327 // result: (SLLVconst [log2(c)] x) 9328 for { 9329 v_0 := v.Args[0] 9330 if v_0.Op != OpMIPS64MULVU { 9331 break 9332 } 9333 _ = v_0.Args[1] 9334 v_0_0 := v_0.Args[0] 9335 if v_0_0.Op != OpMIPS64MOVVconst { 9336 break 9337 } 9338 c := v_0_0.AuxInt 9339 x := v_0.Args[1] 9340 if !(isPowerOfTwo(c)) { 9341 break 9342 } 9343 v.reset(OpMIPS64SLLVconst) 9344 v.AuxInt = log2(c) 9345 v.AddArg(x) 9346 return true 9347 } 9348 // match: (Select1 (MULVU x (MOVVconst [c]))) 9349 // cond: isPowerOfTwo(c) 9350 // result: (SLLVconst [log2(c)] x) 9351 for { 9352 v_0 := v.Args[0] 9353 if v_0.Op != OpMIPS64MULVU { 9354 break 9355 } 9356 _ = v_0.Args[1] 9357 x := v_0.Args[0] 9358 v_0_1 := v_0.Args[1] 9359 if v_0_1.Op != OpMIPS64MOVVconst { 9360 break 9361 } 9362 c := v_0_1.AuxInt 9363 if !(isPowerOfTwo(c)) { 9364 break 9365 } 9366 v.reset(OpMIPS64SLLVconst) 9367 v.AuxInt = log2(c) 9368 v.AddArg(x) 9369 return true 9370 } 9371 // match: (Select1 (DIVVU x (MOVVconst [1]))) 9372 // cond: 9373 // result: x 9374 for { 9375 v_0 := v.Args[0] 9376 if v_0.Op != OpMIPS64DIVVU { 9377 break 9378 } 9379 _ = v_0.Args[1] 9380 x := v_0.Args[0] 9381 v_0_1 := v_0.Args[1] 9382 if v_0_1.Op != OpMIPS64MOVVconst { 9383 break 9384 } 9385 if v_0_1.AuxInt != 1 { 9386 break 9387 } 9388 v.reset(OpCopy) 9389 v.Type = x.Type 9390 v.AddArg(x) 9391 return true 9392 } 9393 // match: (Select1 (DIVVU x (MOVVconst [c]))) 9394 // cond: isPowerOfTwo(c) 9395 // result: (SRLVconst [log2(c)] x) 9396 for { 9397 v_0 := v.Args[0] 9398 if v_0.Op != OpMIPS64DIVVU { 9399 break 9400 } 9401 _ = v_0.Args[1] 9402 x := v_0.Args[0] 9403 v_0_1 := v_0.Args[1] 9404 if v_0_1.Op != OpMIPS64MOVVconst { 9405 break 9406 } 9407 c := v_0_1.AuxInt 9408 if !(isPowerOfTwo(c)) { 9409 break 9410 } 9411 v.reset(OpMIPS64SRLVconst) 9412 v.AuxInt = log2(c) 9413 v.AddArg(x) 9414 return true 9415 } 9416 // match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d]))) 9417 // cond: 9418 // result: (MOVVconst [c*d]) 9419 for { 9420 v_0 := v.Args[0] 9421 if v_0.Op != OpMIPS64MULVU { 9422 break 9423 } 9424 _ = v_0.Args[1] 9425 v_0_0 := v_0.Args[0] 9426 if v_0_0.Op != OpMIPS64MOVVconst { 9427 break 9428 } 9429 c := v_0_0.AuxInt 9430 v_0_1 := v_0.Args[1] 9431 if v_0_1.Op != OpMIPS64MOVVconst { 9432 break 9433 } 9434 d := v_0_1.AuxInt 9435 v.reset(OpMIPS64MOVVconst) 9436 v.AuxInt = c * d 9437 return true 9438 } 9439 // match: (Select1 (MULVU (MOVVconst [d]) (MOVVconst [c]))) 9440 // cond: 9441 // result: (MOVVconst [c*d]) 9442 for { 9443 v_0 := v.Args[0] 9444 if v_0.Op != OpMIPS64MULVU { 9445 break 9446 } 9447 _ = v_0.Args[1] 9448 v_0_0 := v_0.Args[0] 9449 if v_0_0.Op != OpMIPS64MOVVconst { 9450 break 9451 } 9452 d := v_0_0.AuxInt 9453 v_0_1 := v_0.Args[1] 9454 if v_0_1.Op != OpMIPS64MOVVconst { 9455 break 9456 } 9457 c := v_0_1.AuxInt 9458 v.reset(OpMIPS64MOVVconst) 9459 v.AuxInt = c * d 9460 return true 9461 } 9462 return false 9463 } 9464 func rewriteValueMIPS64_OpSelect1_20(v *Value) bool { 9465 // match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 9466 // cond: 9467 // result: (MOVVconst [c/d]) 9468 for { 9469 v_0 := v.Args[0] 9470 if v_0.Op != OpMIPS64DIVV { 9471 break 9472 } 9473 _ = v_0.Args[1] 9474 v_0_0 := v_0.Args[0] 9475 if v_0_0.Op != OpMIPS64MOVVconst { 9476 break 9477 } 9478 c := v_0_0.AuxInt 9479 v_0_1 := v_0.Args[1] 9480 if v_0_1.Op != OpMIPS64MOVVconst { 9481 break 9482 } 9483 d := v_0_1.AuxInt 9484 v.reset(OpMIPS64MOVVconst) 9485 v.AuxInt = c / d 9486 return true 9487 } 9488 // match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 9489 // cond: 9490 // result: (MOVVconst [int64(uint64(c)/uint64(d))]) 9491 for { 9492 v_0 := v.Args[0] 9493 if v_0.Op != OpMIPS64DIVVU { 9494 break 9495 } 9496 _ = v_0.Args[1] 9497 v_0_0 := v_0.Args[0] 9498 if v_0_0.Op != OpMIPS64MOVVconst { 9499 break 9500 } 9501 c := v_0_0.AuxInt 9502 v_0_1 := v_0.Args[1] 9503 if v_0_1.Op != OpMIPS64MOVVconst { 9504 break 9505 } 9506 d := v_0_1.AuxInt 9507 v.reset(OpMIPS64MOVVconst) 9508 v.AuxInt = int64(uint64(c) / uint64(d)) 9509 return true 9510 } 9511 return false 9512 } 9513 func rewriteValueMIPS64_OpSignExt16to32_0(v *Value) bool { 9514 // match: (SignExt16to32 x) 9515 // cond: 9516 // result: (MOVHreg x) 9517 for { 9518 x := v.Args[0] 9519 v.reset(OpMIPS64MOVHreg) 9520 v.AddArg(x) 9521 return true 9522 } 9523 } 9524 func rewriteValueMIPS64_OpSignExt16to64_0(v *Value) bool { 9525 // match: (SignExt16to64 x) 9526 // cond: 9527 // result: (MOVHreg x) 9528 for { 9529 x := v.Args[0] 9530 v.reset(OpMIPS64MOVHreg) 9531 v.AddArg(x) 9532 return true 9533 } 9534 } 9535 func rewriteValueMIPS64_OpSignExt32to64_0(v *Value) bool { 9536 // match: (SignExt32to64 x) 9537 // cond: 9538 // result: (MOVWreg x) 9539 for { 9540 x := v.Args[0] 9541 v.reset(OpMIPS64MOVWreg) 9542 v.AddArg(x) 9543 return true 9544 } 9545 } 9546 func rewriteValueMIPS64_OpSignExt8to16_0(v *Value) bool { 9547 // match: (SignExt8to16 x) 9548 // cond: 9549 // result: (MOVBreg x) 9550 for { 9551 x := v.Args[0] 9552 v.reset(OpMIPS64MOVBreg) 9553 v.AddArg(x) 9554 return true 9555 } 9556 } 9557 func rewriteValueMIPS64_OpSignExt8to32_0(v *Value) bool { 9558 // match: (SignExt8to32 x) 9559 // cond: 9560 // result: (MOVBreg x) 9561 for { 9562 x := v.Args[0] 9563 v.reset(OpMIPS64MOVBreg) 9564 v.AddArg(x) 9565 return true 9566 } 9567 } 9568 func rewriteValueMIPS64_OpSignExt8to64_0(v *Value) bool { 9569 // match: (SignExt8to64 x) 9570 // cond: 9571 // result: (MOVBreg x) 9572 for { 9573 x := v.Args[0] 9574 v.reset(OpMIPS64MOVBreg) 9575 v.AddArg(x) 9576 return true 9577 } 9578 } 9579 func rewriteValueMIPS64_OpSlicemask_0(v *Value) bool { 9580 b := v.Block 9581 _ = b 9582 // match: (Slicemask <t> x) 9583 // cond: 9584 // result: (SRAVconst (NEGV <t> x) [63]) 9585 for { 9586 t := v.Type 9587 x := v.Args[0] 9588 v.reset(OpMIPS64SRAVconst) 9589 v.AuxInt = 63 9590 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t) 9591 v0.AddArg(x) 9592 v.AddArg(v0) 9593 return true 9594 } 9595 } 9596 func rewriteValueMIPS64_OpSqrt_0(v *Value) bool { 9597 // match: (Sqrt x) 9598 // cond: 9599 // result: (SQRTD x) 9600 for { 9601 x := v.Args[0] 9602 v.reset(OpMIPS64SQRTD) 9603 v.AddArg(x) 9604 return true 9605 } 9606 } 9607 func rewriteValueMIPS64_OpStaticCall_0(v *Value) bool { 9608 // match: (StaticCall [argwid] {target} mem) 9609 // cond: 9610 // result: (CALLstatic [argwid] {target} mem) 9611 for { 9612 argwid := v.AuxInt 9613 target := v.Aux 9614 mem := v.Args[0] 9615 v.reset(OpMIPS64CALLstatic) 9616 v.AuxInt = argwid 9617 v.Aux = target 9618 v.AddArg(mem) 9619 return true 9620 } 9621 } 9622 func rewriteValueMIPS64_OpStore_0(v *Value) bool { 9623 // match: (Store {t} ptr val mem) 9624 // cond: t.(*types.Type).Size() == 1 9625 // result: (MOVBstore ptr val mem) 9626 for { 9627 t := v.Aux 9628 _ = v.Args[2] 9629 ptr := v.Args[0] 9630 val := v.Args[1] 9631 mem := v.Args[2] 9632 if !(t.(*types.Type).Size() == 1) { 9633 break 9634 } 9635 v.reset(OpMIPS64MOVBstore) 9636 v.AddArg(ptr) 9637 v.AddArg(val) 9638 v.AddArg(mem) 9639 return true 9640 } 9641 // match: (Store {t} ptr val mem) 9642 // cond: t.(*types.Type).Size() == 2 9643 // result: (MOVHstore ptr val mem) 9644 for { 9645 t := v.Aux 9646 _ = v.Args[2] 9647 ptr := v.Args[0] 9648 val := v.Args[1] 9649 mem := v.Args[2] 9650 if !(t.(*types.Type).Size() == 2) { 9651 break 9652 } 9653 v.reset(OpMIPS64MOVHstore) 9654 v.AddArg(ptr) 9655 v.AddArg(val) 9656 v.AddArg(mem) 9657 return true 9658 } 9659 // match: (Store {t} ptr val mem) 9660 // cond: t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type) 9661 // result: (MOVWstore ptr val mem) 9662 for { 9663 t := v.Aux 9664 _ = v.Args[2] 9665 ptr := v.Args[0] 9666 val := v.Args[1] 9667 mem := v.Args[2] 9668 if !(t.(*types.Type).Size() == 4 && !is32BitFloat(val.Type)) { 9669 break 9670 } 9671 v.reset(OpMIPS64MOVWstore) 9672 v.AddArg(ptr) 9673 v.AddArg(val) 9674 v.AddArg(mem) 9675 return true 9676 } 9677 // match: (Store {t} ptr val mem) 9678 // cond: t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type) 9679 // result: (MOVVstore ptr val mem) 9680 for { 9681 t := v.Aux 9682 _ = v.Args[2] 9683 ptr := v.Args[0] 9684 val := v.Args[1] 9685 mem := v.Args[2] 9686 if !(t.(*types.Type).Size() == 8 && !is64BitFloat(val.Type)) { 9687 break 9688 } 9689 v.reset(OpMIPS64MOVVstore) 9690 v.AddArg(ptr) 9691 v.AddArg(val) 9692 v.AddArg(mem) 9693 return true 9694 } 9695 // match: (Store {t} ptr val mem) 9696 // cond: t.(*types.Type).Size() == 4 && is32BitFloat(val.Type) 9697 // result: (MOVFstore ptr val mem) 9698 for { 9699 t := v.Aux 9700 _ = v.Args[2] 9701 ptr := v.Args[0] 9702 val := v.Args[1] 9703 mem := v.Args[2] 9704 if !(t.(*types.Type).Size() == 4 && is32BitFloat(val.Type)) { 9705 break 9706 } 9707 v.reset(OpMIPS64MOVFstore) 9708 v.AddArg(ptr) 9709 v.AddArg(val) 9710 v.AddArg(mem) 9711 return true 9712 } 9713 // match: (Store {t} ptr val mem) 9714 // cond: t.(*types.Type).Size() == 8 && is64BitFloat(val.Type) 9715 // result: (MOVDstore ptr val mem) 9716 for { 9717 t := v.Aux 9718 _ = v.Args[2] 9719 ptr := v.Args[0] 9720 val := v.Args[1] 9721 mem := v.Args[2] 9722 if !(t.(*types.Type).Size() == 8 && is64BitFloat(val.Type)) { 9723 break 9724 } 9725 v.reset(OpMIPS64MOVDstore) 9726 v.AddArg(ptr) 9727 v.AddArg(val) 9728 v.AddArg(mem) 9729 return true 9730 } 9731 return false 9732 } 9733 func rewriteValueMIPS64_OpSub16_0(v *Value) bool { 9734 // match: (Sub16 x y) 9735 // cond: 9736 // result: (SUBV x y) 9737 for { 9738 _ = v.Args[1] 9739 x := v.Args[0] 9740 y := v.Args[1] 9741 v.reset(OpMIPS64SUBV) 9742 v.AddArg(x) 9743 v.AddArg(y) 9744 return true 9745 } 9746 } 9747 func rewriteValueMIPS64_OpSub32_0(v *Value) bool { 9748 // match: (Sub32 x y) 9749 // cond: 9750 // result: (SUBV x y) 9751 for { 9752 _ = v.Args[1] 9753 x := v.Args[0] 9754 y := v.Args[1] 9755 v.reset(OpMIPS64SUBV) 9756 v.AddArg(x) 9757 v.AddArg(y) 9758 return true 9759 } 9760 } 9761 func rewriteValueMIPS64_OpSub32F_0(v *Value) bool { 9762 // match: (Sub32F x y) 9763 // cond: 9764 // result: (SUBF x y) 9765 for { 9766 _ = v.Args[1] 9767 x := v.Args[0] 9768 y := v.Args[1] 9769 v.reset(OpMIPS64SUBF) 9770 v.AddArg(x) 9771 v.AddArg(y) 9772 return true 9773 } 9774 } 9775 func rewriteValueMIPS64_OpSub64_0(v *Value) bool { 9776 // match: (Sub64 x y) 9777 // cond: 9778 // result: (SUBV x y) 9779 for { 9780 _ = v.Args[1] 9781 x := v.Args[0] 9782 y := v.Args[1] 9783 v.reset(OpMIPS64SUBV) 9784 v.AddArg(x) 9785 v.AddArg(y) 9786 return true 9787 } 9788 } 9789 func rewriteValueMIPS64_OpSub64F_0(v *Value) bool { 9790 // match: (Sub64F x y) 9791 // cond: 9792 // result: (SUBD x y) 9793 for { 9794 _ = v.Args[1] 9795 x := v.Args[0] 9796 y := v.Args[1] 9797 v.reset(OpMIPS64SUBD) 9798 v.AddArg(x) 9799 v.AddArg(y) 9800 return true 9801 } 9802 } 9803 func rewriteValueMIPS64_OpSub8_0(v *Value) bool { 9804 // match: (Sub8 x y) 9805 // cond: 9806 // result: (SUBV x y) 9807 for { 9808 _ = v.Args[1] 9809 x := v.Args[0] 9810 y := v.Args[1] 9811 v.reset(OpMIPS64SUBV) 9812 v.AddArg(x) 9813 v.AddArg(y) 9814 return true 9815 } 9816 } 9817 func rewriteValueMIPS64_OpSubPtr_0(v *Value) bool { 9818 // match: (SubPtr x y) 9819 // cond: 9820 // result: (SUBV x y) 9821 for { 9822 _ = v.Args[1] 9823 x := v.Args[0] 9824 y := v.Args[1] 9825 v.reset(OpMIPS64SUBV) 9826 v.AddArg(x) 9827 v.AddArg(y) 9828 return true 9829 } 9830 } 9831 func rewriteValueMIPS64_OpTrunc16to8_0(v *Value) bool { 9832 // match: (Trunc16to8 x) 9833 // cond: 9834 // result: x 9835 for { 9836 x := v.Args[0] 9837 v.reset(OpCopy) 9838 v.Type = x.Type 9839 v.AddArg(x) 9840 return true 9841 } 9842 } 9843 func rewriteValueMIPS64_OpTrunc32to16_0(v *Value) bool { 9844 // match: (Trunc32to16 x) 9845 // cond: 9846 // result: x 9847 for { 9848 x := v.Args[0] 9849 v.reset(OpCopy) 9850 v.Type = x.Type 9851 v.AddArg(x) 9852 return true 9853 } 9854 } 9855 func rewriteValueMIPS64_OpTrunc32to8_0(v *Value) bool { 9856 // match: (Trunc32to8 x) 9857 // cond: 9858 // result: x 9859 for { 9860 x := v.Args[0] 9861 v.reset(OpCopy) 9862 v.Type = x.Type 9863 v.AddArg(x) 9864 return true 9865 } 9866 } 9867 func rewriteValueMIPS64_OpTrunc64to16_0(v *Value) bool { 9868 // match: (Trunc64to16 x) 9869 // cond: 9870 // result: x 9871 for { 9872 x := v.Args[0] 9873 v.reset(OpCopy) 9874 v.Type = x.Type 9875 v.AddArg(x) 9876 return true 9877 } 9878 } 9879 func rewriteValueMIPS64_OpTrunc64to32_0(v *Value) bool { 9880 // match: (Trunc64to32 x) 9881 // cond: 9882 // result: x 9883 for { 9884 x := v.Args[0] 9885 v.reset(OpCopy) 9886 v.Type = x.Type 9887 v.AddArg(x) 9888 return true 9889 } 9890 } 9891 func rewriteValueMIPS64_OpTrunc64to8_0(v *Value) bool { 9892 // match: (Trunc64to8 x) 9893 // cond: 9894 // result: x 9895 for { 9896 x := v.Args[0] 9897 v.reset(OpCopy) 9898 v.Type = x.Type 9899 v.AddArg(x) 9900 return true 9901 } 9902 } 9903 func rewriteValueMIPS64_OpWB_0(v *Value) bool { 9904 // match: (WB {fn} destptr srcptr mem) 9905 // cond: 9906 // result: (LoweredWB {fn} destptr srcptr mem) 9907 for { 9908 fn := v.Aux 9909 _ = v.Args[2] 9910 destptr := v.Args[0] 9911 srcptr := v.Args[1] 9912 mem := v.Args[2] 9913 v.reset(OpMIPS64LoweredWB) 9914 v.Aux = fn 9915 v.AddArg(destptr) 9916 v.AddArg(srcptr) 9917 v.AddArg(mem) 9918 return true 9919 } 9920 } 9921 func rewriteValueMIPS64_OpXor16_0(v *Value) bool { 9922 // match: (Xor16 x y) 9923 // cond: 9924 // result: (XOR x y) 9925 for { 9926 _ = v.Args[1] 9927 x := v.Args[0] 9928 y := v.Args[1] 9929 v.reset(OpMIPS64XOR) 9930 v.AddArg(x) 9931 v.AddArg(y) 9932 return true 9933 } 9934 } 9935 func rewriteValueMIPS64_OpXor32_0(v *Value) bool { 9936 // match: (Xor32 x y) 9937 // cond: 9938 // result: (XOR x y) 9939 for { 9940 _ = v.Args[1] 9941 x := v.Args[0] 9942 y := v.Args[1] 9943 v.reset(OpMIPS64XOR) 9944 v.AddArg(x) 9945 v.AddArg(y) 9946 return true 9947 } 9948 } 9949 func rewriteValueMIPS64_OpXor64_0(v *Value) bool { 9950 // match: (Xor64 x y) 9951 // cond: 9952 // result: (XOR x y) 9953 for { 9954 _ = v.Args[1] 9955 x := v.Args[0] 9956 y := v.Args[1] 9957 v.reset(OpMIPS64XOR) 9958 v.AddArg(x) 9959 v.AddArg(y) 9960 return true 9961 } 9962 } 9963 func rewriteValueMIPS64_OpXor8_0(v *Value) bool { 9964 // match: (Xor8 x y) 9965 // cond: 9966 // result: (XOR x y) 9967 for { 9968 _ = v.Args[1] 9969 x := v.Args[0] 9970 y := v.Args[1] 9971 v.reset(OpMIPS64XOR) 9972 v.AddArg(x) 9973 v.AddArg(y) 9974 return true 9975 } 9976 } 9977 func rewriteValueMIPS64_OpZero_0(v *Value) bool { 9978 b := v.Block 9979 _ = b 9980 typ := &b.Func.Config.Types 9981 _ = typ 9982 // match: (Zero [0] _ mem) 9983 // cond: 9984 // result: mem 9985 for { 9986 if v.AuxInt != 0 { 9987 break 9988 } 9989 _ = v.Args[1] 9990 mem := v.Args[1] 9991 v.reset(OpCopy) 9992 v.Type = mem.Type 9993 v.AddArg(mem) 9994 return true 9995 } 9996 // match: (Zero [1] ptr mem) 9997 // cond: 9998 // result: (MOVBstore ptr (MOVVconst [0]) mem) 9999 for { 10000 if v.AuxInt != 1 { 10001 break 10002 } 10003 _ = v.Args[1] 10004 ptr := v.Args[0] 10005 mem := v.Args[1] 10006 v.reset(OpMIPS64MOVBstore) 10007 v.AddArg(ptr) 10008 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10009 v0.AuxInt = 0 10010 v.AddArg(v0) 10011 v.AddArg(mem) 10012 return true 10013 } 10014 // match: (Zero [2] {t} ptr mem) 10015 // cond: t.(*types.Type).Alignment()%2 == 0 10016 // result: (MOVHstore ptr (MOVVconst [0]) mem) 10017 for { 10018 if v.AuxInt != 2 { 10019 break 10020 } 10021 t := v.Aux 10022 _ = v.Args[1] 10023 ptr := v.Args[0] 10024 mem := v.Args[1] 10025 if !(t.(*types.Type).Alignment()%2 == 0) { 10026 break 10027 } 10028 v.reset(OpMIPS64MOVHstore) 10029 v.AddArg(ptr) 10030 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10031 v0.AuxInt = 0 10032 v.AddArg(v0) 10033 v.AddArg(mem) 10034 return true 10035 } 10036 // match: (Zero [2] ptr mem) 10037 // cond: 10038 // result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)) 10039 for { 10040 if v.AuxInt != 2 { 10041 break 10042 } 10043 _ = v.Args[1] 10044 ptr := v.Args[0] 10045 mem := v.Args[1] 10046 v.reset(OpMIPS64MOVBstore) 10047 v.AuxInt = 1 10048 v.AddArg(ptr) 10049 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10050 v0.AuxInt = 0 10051 v.AddArg(v0) 10052 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10053 v1.AuxInt = 0 10054 v1.AddArg(ptr) 10055 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10056 v2.AuxInt = 0 10057 v1.AddArg(v2) 10058 v1.AddArg(mem) 10059 v.AddArg(v1) 10060 return true 10061 } 10062 // match: (Zero [4] {t} ptr mem) 10063 // cond: t.(*types.Type).Alignment()%4 == 0 10064 // result: (MOVWstore ptr (MOVVconst [0]) mem) 10065 for { 10066 if v.AuxInt != 4 { 10067 break 10068 } 10069 t := v.Aux 10070 _ = v.Args[1] 10071 ptr := v.Args[0] 10072 mem := v.Args[1] 10073 if !(t.(*types.Type).Alignment()%4 == 0) { 10074 break 10075 } 10076 v.reset(OpMIPS64MOVWstore) 10077 v.AddArg(ptr) 10078 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10079 v0.AuxInt = 0 10080 v.AddArg(v0) 10081 v.AddArg(mem) 10082 return true 10083 } 10084 // match: (Zero [4] {t} ptr mem) 10085 // cond: t.(*types.Type).Alignment()%2 == 0 10086 // result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)) 10087 for { 10088 if v.AuxInt != 4 { 10089 break 10090 } 10091 t := v.Aux 10092 _ = v.Args[1] 10093 ptr := v.Args[0] 10094 mem := v.Args[1] 10095 if !(t.(*types.Type).Alignment()%2 == 0) { 10096 break 10097 } 10098 v.reset(OpMIPS64MOVHstore) 10099 v.AuxInt = 2 10100 v.AddArg(ptr) 10101 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10102 v0.AuxInt = 0 10103 v.AddArg(v0) 10104 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10105 v1.AuxInt = 0 10106 v1.AddArg(ptr) 10107 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10108 v2.AuxInt = 0 10109 v1.AddArg(v2) 10110 v1.AddArg(mem) 10111 v.AddArg(v1) 10112 return true 10113 } 10114 // match: (Zero [4] ptr mem) 10115 // cond: 10116 // result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))) 10117 for { 10118 if v.AuxInt != 4 { 10119 break 10120 } 10121 _ = v.Args[1] 10122 ptr := v.Args[0] 10123 mem := v.Args[1] 10124 v.reset(OpMIPS64MOVBstore) 10125 v.AuxInt = 3 10126 v.AddArg(ptr) 10127 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10128 v0.AuxInt = 0 10129 v.AddArg(v0) 10130 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10131 v1.AuxInt = 2 10132 v1.AddArg(ptr) 10133 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10134 v2.AuxInt = 0 10135 v1.AddArg(v2) 10136 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10137 v3.AuxInt = 1 10138 v3.AddArg(ptr) 10139 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10140 v4.AuxInt = 0 10141 v3.AddArg(v4) 10142 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10143 v5.AuxInt = 0 10144 v5.AddArg(ptr) 10145 v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10146 v6.AuxInt = 0 10147 v5.AddArg(v6) 10148 v5.AddArg(mem) 10149 v3.AddArg(v5) 10150 v1.AddArg(v3) 10151 v.AddArg(v1) 10152 return true 10153 } 10154 // match: (Zero [8] {t} ptr mem) 10155 // cond: t.(*types.Type).Alignment()%8 == 0 10156 // result: (MOVVstore ptr (MOVVconst [0]) mem) 10157 for { 10158 if v.AuxInt != 8 { 10159 break 10160 } 10161 t := v.Aux 10162 _ = v.Args[1] 10163 ptr := v.Args[0] 10164 mem := v.Args[1] 10165 if !(t.(*types.Type).Alignment()%8 == 0) { 10166 break 10167 } 10168 v.reset(OpMIPS64MOVVstore) 10169 v.AddArg(ptr) 10170 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10171 v0.AuxInt = 0 10172 v.AddArg(v0) 10173 v.AddArg(mem) 10174 return true 10175 } 10176 // match: (Zero [8] {t} ptr mem) 10177 // cond: t.(*types.Type).Alignment()%4 == 0 10178 // result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)) 10179 for { 10180 if v.AuxInt != 8 { 10181 break 10182 } 10183 t := v.Aux 10184 _ = v.Args[1] 10185 ptr := v.Args[0] 10186 mem := v.Args[1] 10187 if !(t.(*types.Type).Alignment()%4 == 0) { 10188 break 10189 } 10190 v.reset(OpMIPS64MOVWstore) 10191 v.AuxInt = 4 10192 v.AddArg(ptr) 10193 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10194 v0.AuxInt = 0 10195 v.AddArg(v0) 10196 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 10197 v1.AuxInt = 0 10198 v1.AddArg(ptr) 10199 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10200 v2.AuxInt = 0 10201 v1.AddArg(v2) 10202 v1.AddArg(mem) 10203 v.AddArg(v1) 10204 return true 10205 } 10206 // match: (Zero [8] {t} ptr mem) 10207 // cond: t.(*types.Type).Alignment()%2 == 0 10208 // result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))) 10209 for { 10210 if v.AuxInt != 8 { 10211 break 10212 } 10213 t := v.Aux 10214 _ = v.Args[1] 10215 ptr := v.Args[0] 10216 mem := v.Args[1] 10217 if !(t.(*types.Type).Alignment()%2 == 0) { 10218 break 10219 } 10220 v.reset(OpMIPS64MOVHstore) 10221 v.AuxInt = 6 10222 v.AddArg(ptr) 10223 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10224 v0.AuxInt = 0 10225 v.AddArg(v0) 10226 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10227 v1.AuxInt = 4 10228 v1.AddArg(ptr) 10229 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10230 v2.AuxInt = 0 10231 v1.AddArg(v2) 10232 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10233 v3.AuxInt = 2 10234 v3.AddArg(ptr) 10235 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10236 v4.AuxInt = 0 10237 v3.AddArg(v4) 10238 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10239 v5.AuxInt = 0 10240 v5.AddArg(ptr) 10241 v6 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10242 v6.AuxInt = 0 10243 v5.AddArg(v6) 10244 v5.AddArg(mem) 10245 v3.AddArg(v5) 10246 v1.AddArg(v3) 10247 v.AddArg(v1) 10248 return true 10249 } 10250 return false 10251 } 10252 func rewriteValueMIPS64_OpZero_10(v *Value) bool { 10253 b := v.Block 10254 _ = b 10255 config := b.Func.Config 10256 _ = config 10257 typ := &b.Func.Config.Types 10258 _ = typ 10259 // match: (Zero [3] ptr mem) 10260 // cond: 10261 // result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))) 10262 for { 10263 if v.AuxInt != 3 { 10264 break 10265 } 10266 _ = v.Args[1] 10267 ptr := v.Args[0] 10268 mem := v.Args[1] 10269 v.reset(OpMIPS64MOVBstore) 10270 v.AuxInt = 2 10271 v.AddArg(ptr) 10272 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10273 v0.AuxInt = 0 10274 v.AddArg(v0) 10275 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10276 v1.AuxInt = 1 10277 v1.AddArg(ptr) 10278 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10279 v2.AuxInt = 0 10280 v1.AddArg(v2) 10281 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem) 10282 v3.AuxInt = 0 10283 v3.AddArg(ptr) 10284 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10285 v4.AuxInt = 0 10286 v3.AddArg(v4) 10287 v3.AddArg(mem) 10288 v1.AddArg(v3) 10289 v.AddArg(v1) 10290 return true 10291 } 10292 // match: (Zero [6] {t} ptr mem) 10293 // cond: t.(*types.Type).Alignment()%2 == 0 10294 // result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))) 10295 for { 10296 if v.AuxInt != 6 { 10297 break 10298 } 10299 t := v.Aux 10300 _ = v.Args[1] 10301 ptr := v.Args[0] 10302 mem := v.Args[1] 10303 if !(t.(*types.Type).Alignment()%2 == 0) { 10304 break 10305 } 10306 v.reset(OpMIPS64MOVHstore) 10307 v.AuxInt = 4 10308 v.AddArg(ptr) 10309 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10310 v0.AuxInt = 0 10311 v.AddArg(v0) 10312 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10313 v1.AuxInt = 2 10314 v1.AddArg(ptr) 10315 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10316 v2.AuxInt = 0 10317 v1.AddArg(v2) 10318 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem) 10319 v3.AuxInt = 0 10320 v3.AddArg(ptr) 10321 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10322 v4.AuxInt = 0 10323 v3.AddArg(v4) 10324 v3.AddArg(mem) 10325 v1.AddArg(v3) 10326 v.AddArg(v1) 10327 return true 10328 } 10329 // match: (Zero [12] {t} ptr mem) 10330 // cond: t.(*types.Type).Alignment()%4 == 0 10331 // result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))) 10332 for { 10333 if v.AuxInt != 12 { 10334 break 10335 } 10336 t := v.Aux 10337 _ = v.Args[1] 10338 ptr := v.Args[0] 10339 mem := v.Args[1] 10340 if !(t.(*types.Type).Alignment()%4 == 0) { 10341 break 10342 } 10343 v.reset(OpMIPS64MOVWstore) 10344 v.AuxInt = 8 10345 v.AddArg(ptr) 10346 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10347 v0.AuxInt = 0 10348 v.AddArg(v0) 10349 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 10350 v1.AuxInt = 4 10351 v1.AddArg(ptr) 10352 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10353 v2.AuxInt = 0 10354 v1.AddArg(v2) 10355 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem) 10356 v3.AuxInt = 0 10357 v3.AddArg(ptr) 10358 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10359 v4.AuxInt = 0 10360 v3.AddArg(v4) 10361 v3.AddArg(mem) 10362 v1.AddArg(v3) 10363 v.AddArg(v1) 10364 return true 10365 } 10366 // match: (Zero [16] {t} ptr mem) 10367 // cond: t.(*types.Type).Alignment()%8 == 0 10368 // result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)) 10369 for { 10370 if v.AuxInt != 16 { 10371 break 10372 } 10373 t := v.Aux 10374 _ = v.Args[1] 10375 ptr := v.Args[0] 10376 mem := v.Args[1] 10377 if !(t.(*types.Type).Alignment()%8 == 0) { 10378 break 10379 } 10380 v.reset(OpMIPS64MOVVstore) 10381 v.AuxInt = 8 10382 v.AddArg(ptr) 10383 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10384 v0.AuxInt = 0 10385 v.AddArg(v0) 10386 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10387 v1.AuxInt = 0 10388 v1.AddArg(ptr) 10389 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10390 v2.AuxInt = 0 10391 v1.AddArg(v2) 10392 v1.AddArg(mem) 10393 v.AddArg(v1) 10394 return true 10395 } 10396 // match: (Zero [24] {t} ptr mem) 10397 // cond: t.(*types.Type).Alignment()%8 == 0 10398 // result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))) 10399 for { 10400 if v.AuxInt != 24 { 10401 break 10402 } 10403 t := v.Aux 10404 _ = v.Args[1] 10405 ptr := v.Args[0] 10406 mem := v.Args[1] 10407 if !(t.(*types.Type).Alignment()%8 == 0) { 10408 break 10409 } 10410 v.reset(OpMIPS64MOVVstore) 10411 v.AuxInt = 16 10412 v.AddArg(ptr) 10413 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10414 v0.AuxInt = 0 10415 v.AddArg(v0) 10416 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10417 v1.AuxInt = 8 10418 v1.AddArg(ptr) 10419 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10420 v2.AuxInt = 0 10421 v1.AddArg(v2) 10422 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem) 10423 v3.AuxInt = 0 10424 v3.AddArg(ptr) 10425 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64) 10426 v4.AuxInt = 0 10427 v3.AddArg(v4) 10428 v3.AddArg(mem) 10429 v1.AddArg(v3) 10430 v.AddArg(v1) 10431 return true 10432 } 10433 // match: (Zero [s] {t} ptr mem) 10434 // cond: s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice 10435 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem) 10436 for { 10437 s := v.AuxInt 10438 t := v.Aux 10439 _ = v.Args[1] 10440 ptr := v.Args[0] 10441 mem := v.Args[1] 10442 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.(*types.Type).Alignment()%8 == 0 && !config.noDuffDevice) { 10443 break 10444 } 10445 v.reset(OpMIPS64DUFFZERO) 10446 v.AuxInt = 8 * (128 - s/8) 10447 v.AddArg(ptr) 10448 v.AddArg(mem) 10449 return true 10450 } 10451 // match: (Zero [s] {t} ptr mem) 10452 // cond: (s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0 10453 // result: (LoweredZero [t.(*types.Type).Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.(*types.Type).Alignment(), config)]) mem) 10454 for { 10455 s := v.AuxInt 10456 t := v.Aux 10457 _ = v.Args[1] 10458 ptr := v.Args[0] 10459 mem := v.Args[1] 10460 if !((s > 8*128 || config.noDuffDevice) || t.(*types.Type).Alignment()%8 != 0) { 10461 break 10462 } 10463 v.reset(OpMIPS64LoweredZero) 10464 v.AuxInt = t.(*types.Type).Alignment() 10465 v.AddArg(ptr) 10466 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type) 10467 v0.AuxInt = s - moveSize(t.(*types.Type).Alignment(), config) 10468 v0.AddArg(ptr) 10469 v.AddArg(v0) 10470 v.AddArg(mem) 10471 return true 10472 } 10473 return false 10474 } 10475 func rewriteValueMIPS64_OpZeroExt16to32_0(v *Value) bool { 10476 // match: (ZeroExt16to32 x) 10477 // cond: 10478 // result: (MOVHUreg x) 10479 for { 10480 x := v.Args[0] 10481 v.reset(OpMIPS64MOVHUreg) 10482 v.AddArg(x) 10483 return true 10484 } 10485 } 10486 func rewriteValueMIPS64_OpZeroExt16to64_0(v *Value) bool { 10487 // match: (ZeroExt16to64 x) 10488 // cond: 10489 // result: (MOVHUreg x) 10490 for { 10491 x := v.Args[0] 10492 v.reset(OpMIPS64MOVHUreg) 10493 v.AddArg(x) 10494 return true 10495 } 10496 } 10497 func rewriteValueMIPS64_OpZeroExt32to64_0(v *Value) bool { 10498 // match: (ZeroExt32to64 x) 10499 // cond: 10500 // result: (MOVWUreg x) 10501 for { 10502 x := v.Args[0] 10503 v.reset(OpMIPS64MOVWUreg) 10504 v.AddArg(x) 10505 return true 10506 } 10507 } 10508 func rewriteValueMIPS64_OpZeroExt8to16_0(v *Value) bool { 10509 // match: (ZeroExt8to16 x) 10510 // cond: 10511 // result: (MOVBUreg x) 10512 for { 10513 x := v.Args[0] 10514 v.reset(OpMIPS64MOVBUreg) 10515 v.AddArg(x) 10516 return true 10517 } 10518 } 10519 func rewriteValueMIPS64_OpZeroExt8to32_0(v *Value) bool { 10520 // match: (ZeroExt8to32 x) 10521 // cond: 10522 // result: (MOVBUreg x) 10523 for { 10524 x := v.Args[0] 10525 v.reset(OpMIPS64MOVBUreg) 10526 v.AddArg(x) 10527 return true 10528 } 10529 } 10530 func rewriteValueMIPS64_OpZeroExt8to64_0(v *Value) bool { 10531 // match: (ZeroExt8to64 x) 10532 // cond: 10533 // result: (MOVBUreg x) 10534 for { 10535 x := v.Args[0] 10536 v.reset(OpMIPS64MOVBUreg) 10537 v.AddArg(x) 10538 return true 10539 } 10540 } 10541 func rewriteBlockMIPS64(b *Block) bool { 10542 config := b.Func.Config 10543 _ = config 10544 fe := b.Func.fe 10545 _ = fe 10546 typ := &config.Types 10547 _ = typ 10548 switch b.Kind { 10549 case BlockMIPS64EQ: 10550 // match: (EQ (FPFlagTrue cmp) yes no) 10551 // cond: 10552 // result: (FPF cmp yes no) 10553 for { 10554 v := b.Control 10555 if v.Op != OpMIPS64FPFlagTrue { 10556 break 10557 } 10558 cmp := v.Args[0] 10559 b.Kind = BlockMIPS64FPF 10560 b.SetControl(cmp) 10561 b.Aux = nil 10562 return true 10563 } 10564 // match: (EQ (FPFlagFalse cmp) yes no) 10565 // cond: 10566 // result: (FPT cmp yes no) 10567 for { 10568 v := b.Control 10569 if v.Op != OpMIPS64FPFlagFalse { 10570 break 10571 } 10572 cmp := v.Args[0] 10573 b.Kind = BlockMIPS64FPT 10574 b.SetControl(cmp) 10575 b.Aux = nil 10576 return true 10577 } 10578 // match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no) 10579 // cond: 10580 // result: (NE cmp yes no) 10581 for { 10582 v := b.Control 10583 if v.Op != OpMIPS64XORconst { 10584 break 10585 } 10586 if v.AuxInt != 1 { 10587 break 10588 } 10589 cmp := v.Args[0] 10590 if cmp.Op != OpMIPS64SGT { 10591 break 10592 } 10593 _ = cmp.Args[1] 10594 b.Kind = BlockMIPS64NE 10595 b.SetControl(cmp) 10596 b.Aux = nil 10597 return true 10598 } 10599 // match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no) 10600 // cond: 10601 // result: (NE cmp yes no) 10602 for { 10603 v := b.Control 10604 if v.Op != OpMIPS64XORconst { 10605 break 10606 } 10607 if v.AuxInt != 1 { 10608 break 10609 } 10610 cmp := v.Args[0] 10611 if cmp.Op != OpMIPS64SGTU { 10612 break 10613 } 10614 _ = cmp.Args[1] 10615 b.Kind = BlockMIPS64NE 10616 b.SetControl(cmp) 10617 b.Aux = nil 10618 return true 10619 } 10620 // match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no) 10621 // cond: 10622 // result: (NE cmp yes no) 10623 for { 10624 v := b.Control 10625 if v.Op != OpMIPS64XORconst { 10626 break 10627 } 10628 if v.AuxInt != 1 { 10629 break 10630 } 10631 cmp := v.Args[0] 10632 if cmp.Op != OpMIPS64SGTconst { 10633 break 10634 } 10635 b.Kind = BlockMIPS64NE 10636 b.SetControl(cmp) 10637 b.Aux = nil 10638 return true 10639 } 10640 // match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no) 10641 // cond: 10642 // result: (NE cmp yes no) 10643 for { 10644 v := b.Control 10645 if v.Op != OpMIPS64XORconst { 10646 break 10647 } 10648 if v.AuxInt != 1 { 10649 break 10650 } 10651 cmp := v.Args[0] 10652 if cmp.Op != OpMIPS64SGTUconst { 10653 break 10654 } 10655 b.Kind = BlockMIPS64NE 10656 b.SetControl(cmp) 10657 b.Aux = nil 10658 return true 10659 } 10660 // match: (EQ (SGTUconst [1] x) yes no) 10661 // cond: 10662 // result: (NE x yes no) 10663 for { 10664 v := b.Control 10665 if v.Op != OpMIPS64SGTUconst { 10666 break 10667 } 10668 if v.AuxInt != 1 { 10669 break 10670 } 10671 x := v.Args[0] 10672 b.Kind = BlockMIPS64NE 10673 b.SetControl(x) 10674 b.Aux = nil 10675 return true 10676 } 10677 // match: (EQ (SGTU x (MOVVconst [0])) yes no) 10678 // cond: 10679 // result: (EQ x yes no) 10680 for { 10681 v := b.Control 10682 if v.Op != OpMIPS64SGTU { 10683 break 10684 } 10685 _ = v.Args[1] 10686 x := v.Args[0] 10687 v_1 := v.Args[1] 10688 if v_1.Op != OpMIPS64MOVVconst { 10689 break 10690 } 10691 if v_1.AuxInt != 0 { 10692 break 10693 } 10694 b.Kind = BlockMIPS64EQ 10695 b.SetControl(x) 10696 b.Aux = nil 10697 return true 10698 } 10699 // match: (EQ (SGTconst [0] x) yes no) 10700 // cond: 10701 // result: (GEZ x yes no) 10702 for { 10703 v := b.Control 10704 if v.Op != OpMIPS64SGTconst { 10705 break 10706 } 10707 if v.AuxInt != 0 { 10708 break 10709 } 10710 x := v.Args[0] 10711 b.Kind = BlockMIPS64GEZ 10712 b.SetControl(x) 10713 b.Aux = nil 10714 return true 10715 } 10716 // match: (EQ (SGT x (MOVVconst [0])) yes no) 10717 // cond: 10718 // result: (LEZ x yes no) 10719 for { 10720 v := b.Control 10721 if v.Op != OpMIPS64SGT { 10722 break 10723 } 10724 _ = v.Args[1] 10725 x := v.Args[0] 10726 v_1 := v.Args[1] 10727 if v_1.Op != OpMIPS64MOVVconst { 10728 break 10729 } 10730 if v_1.AuxInt != 0 { 10731 break 10732 } 10733 b.Kind = BlockMIPS64LEZ 10734 b.SetControl(x) 10735 b.Aux = nil 10736 return true 10737 } 10738 // match: (EQ (MOVVconst [0]) yes no) 10739 // cond: 10740 // result: (First nil yes no) 10741 for { 10742 v := b.Control 10743 if v.Op != OpMIPS64MOVVconst { 10744 break 10745 } 10746 if v.AuxInt != 0 { 10747 break 10748 } 10749 b.Kind = BlockFirst 10750 b.SetControl(nil) 10751 b.Aux = nil 10752 return true 10753 } 10754 // match: (EQ (MOVVconst [c]) yes no) 10755 // cond: c != 0 10756 // result: (First nil no yes) 10757 for { 10758 v := b.Control 10759 if v.Op != OpMIPS64MOVVconst { 10760 break 10761 } 10762 c := v.AuxInt 10763 if !(c != 0) { 10764 break 10765 } 10766 b.Kind = BlockFirst 10767 b.SetControl(nil) 10768 b.Aux = nil 10769 b.swapSuccessors() 10770 return true 10771 } 10772 case BlockMIPS64GEZ: 10773 // match: (GEZ (MOVVconst [c]) yes no) 10774 // cond: c >= 0 10775 // result: (First nil yes no) 10776 for { 10777 v := b.Control 10778 if v.Op != OpMIPS64MOVVconst { 10779 break 10780 } 10781 c := v.AuxInt 10782 if !(c >= 0) { 10783 break 10784 } 10785 b.Kind = BlockFirst 10786 b.SetControl(nil) 10787 b.Aux = nil 10788 return true 10789 } 10790 // match: (GEZ (MOVVconst [c]) yes no) 10791 // cond: c < 0 10792 // result: (First nil no yes) 10793 for { 10794 v := b.Control 10795 if v.Op != OpMIPS64MOVVconst { 10796 break 10797 } 10798 c := v.AuxInt 10799 if !(c < 0) { 10800 break 10801 } 10802 b.Kind = BlockFirst 10803 b.SetControl(nil) 10804 b.Aux = nil 10805 b.swapSuccessors() 10806 return true 10807 } 10808 case BlockMIPS64GTZ: 10809 // match: (GTZ (MOVVconst [c]) yes no) 10810 // cond: c > 0 10811 // result: (First nil yes no) 10812 for { 10813 v := b.Control 10814 if v.Op != OpMIPS64MOVVconst { 10815 break 10816 } 10817 c := v.AuxInt 10818 if !(c > 0) { 10819 break 10820 } 10821 b.Kind = BlockFirst 10822 b.SetControl(nil) 10823 b.Aux = nil 10824 return true 10825 } 10826 // match: (GTZ (MOVVconst [c]) yes no) 10827 // cond: c <= 0 10828 // result: (First nil no yes) 10829 for { 10830 v := b.Control 10831 if v.Op != OpMIPS64MOVVconst { 10832 break 10833 } 10834 c := v.AuxInt 10835 if !(c <= 0) { 10836 break 10837 } 10838 b.Kind = BlockFirst 10839 b.SetControl(nil) 10840 b.Aux = nil 10841 b.swapSuccessors() 10842 return true 10843 } 10844 case BlockIf: 10845 // match: (If cond yes no) 10846 // cond: 10847 // result: (NE cond yes no) 10848 for { 10849 v := b.Control 10850 _ = v 10851 cond := b.Control 10852 b.Kind = BlockMIPS64NE 10853 b.SetControl(cond) 10854 b.Aux = nil 10855 return true 10856 } 10857 case BlockMIPS64LEZ: 10858 // match: (LEZ (MOVVconst [c]) yes no) 10859 // cond: c <= 0 10860 // result: (First nil yes no) 10861 for { 10862 v := b.Control 10863 if v.Op != OpMIPS64MOVVconst { 10864 break 10865 } 10866 c := v.AuxInt 10867 if !(c <= 0) { 10868 break 10869 } 10870 b.Kind = BlockFirst 10871 b.SetControl(nil) 10872 b.Aux = nil 10873 return true 10874 } 10875 // match: (LEZ (MOVVconst [c]) yes no) 10876 // cond: c > 0 10877 // result: (First nil no yes) 10878 for { 10879 v := b.Control 10880 if v.Op != OpMIPS64MOVVconst { 10881 break 10882 } 10883 c := v.AuxInt 10884 if !(c > 0) { 10885 break 10886 } 10887 b.Kind = BlockFirst 10888 b.SetControl(nil) 10889 b.Aux = nil 10890 b.swapSuccessors() 10891 return true 10892 } 10893 case BlockMIPS64LTZ: 10894 // match: (LTZ (MOVVconst [c]) yes no) 10895 // cond: c < 0 10896 // result: (First nil yes no) 10897 for { 10898 v := b.Control 10899 if v.Op != OpMIPS64MOVVconst { 10900 break 10901 } 10902 c := v.AuxInt 10903 if !(c < 0) { 10904 break 10905 } 10906 b.Kind = BlockFirst 10907 b.SetControl(nil) 10908 b.Aux = nil 10909 return true 10910 } 10911 // match: (LTZ (MOVVconst [c]) yes no) 10912 // cond: c >= 0 10913 // result: (First nil no yes) 10914 for { 10915 v := b.Control 10916 if v.Op != OpMIPS64MOVVconst { 10917 break 10918 } 10919 c := v.AuxInt 10920 if !(c >= 0) { 10921 break 10922 } 10923 b.Kind = BlockFirst 10924 b.SetControl(nil) 10925 b.Aux = nil 10926 b.swapSuccessors() 10927 return true 10928 } 10929 case BlockMIPS64NE: 10930 // match: (NE (FPFlagTrue cmp) yes no) 10931 // cond: 10932 // result: (FPT cmp yes no) 10933 for { 10934 v := b.Control 10935 if v.Op != OpMIPS64FPFlagTrue { 10936 break 10937 } 10938 cmp := v.Args[0] 10939 b.Kind = BlockMIPS64FPT 10940 b.SetControl(cmp) 10941 b.Aux = nil 10942 return true 10943 } 10944 // match: (NE (FPFlagFalse cmp) yes no) 10945 // cond: 10946 // result: (FPF cmp yes no) 10947 for { 10948 v := b.Control 10949 if v.Op != OpMIPS64FPFlagFalse { 10950 break 10951 } 10952 cmp := v.Args[0] 10953 b.Kind = BlockMIPS64FPF 10954 b.SetControl(cmp) 10955 b.Aux = nil 10956 return true 10957 } 10958 // match: (NE (XORconst [1] cmp:(SGT _ _)) yes no) 10959 // cond: 10960 // result: (EQ cmp yes no) 10961 for { 10962 v := b.Control 10963 if v.Op != OpMIPS64XORconst { 10964 break 10965 } 10966 if v.AuxInt != 1 { 10967 break 10968 } 10969 cmp := v.Args[0] 10970 if cmp.Op != OpMIPS64SGT { 10971 break 10972 } 10973 _ = cmp.Args[1] 10974 b.Kind = BlockMIPS64EQ 10975 b.SetControl(cmp) 10976 b.Aux = nil 10977 return true 10978 } 10979 // match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no) 10980 // cond: 10981 // result: (EQ cmp yes no) 10982 for { 10983 v := b.Control 10984 if v.Op != OpMIPS64XORconst { 10985 break 10986 } 10987 if v.AuxInt != 1 { 10988 break 10989 } 10990 cmp := v.Args[0] 10991 if cmp.Op != OpMIPS64SGTU { 10992 break 10993 } 10994 _ = cmp.Args[1] 10995 b.Kind = BlockMIPS64EQ 10996 b.SetControl(cmp) 10997 b.Aux = nil 10998 return true 10999 } 11000 // match: (NE (XORconst [1] cmp:(SGTconst _)) yes no) 11001 // cond: 11002 // result: (EQ cmp yes no) 11003 for { 11004 v := b.Control 11005 if v.Op != OpMIPS64XORconst { 11006 break 11007 } 11008 if v.AuxInt != 1 { 11009 break 11010 } 11011 cmp := v.Args[0] 11012 if cmp.Op != OpMIPS64SGTconst { 11013 break 11014 } 11015 b.Kind = BlockMIPS64EQ 11016 b.SetControl(cmp) 11017 b.Aux = nil 11018 return true 11019 } 11020 // match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no) 11021 // cond: 11022 // result: (EQ cmp yes no) 11023 for { 11024 v := b.Control 11025 if v.Op != OpMIPS64XORconst { 11026 break 11027 } 11028 if v.AuxInt != 1 { 11029 break 11030 } 11031 cmp := v.Args[0] 11032 if cmp.Op != OpMIPS64SGTUconst { 11033 break 11034 } 11035 b.Kind = BlockMIPS64EQ 11036 b.SetControl(cmp) 11037 b.Aux = nil 11038 return true 11039 } 11040 // match: (NE (SGTUconst [1] x) yes no) 11041 // cond: 11042 // result: (EQ x yes no) 11043 for { 11044 v := b.Control 11045 if v.Op != OpMIPS64SGTUconst { 11046 break 11047 } 11048 if v.AuxInt != 1 { 11049 break 11050 } 11051 x := v.Args[0] 11052 b.Kind = BlockMIPS64EQ 11053 b.SetControl(x) 11054 b.Aux = nil 11055 return true 11056 } 11057 // match: (NE (SGTU x (MOVVconst [0])) yes no) 11058 // cond: 11059 // result: (NE x yes no) 11060 for { 11061 v := b.Control 11062 if v.Op != OpMIPS64SGTU { 11063 break 11064 } 11065 _ = v.Args[1] 11066 x := v.Args[0] 11067 v_1 := v.Args[1] 11068 if v_1.Op != OpMIPS64MOVVconst { 11069 break 11070 } 11071 if v_1.AuxInt != 0 { 11072 break 11073 } 11074 b.Kind = BlockMIPS64NE 11075 b.SetControl(x) 11076 b.Aux = nil 11077 return true 11078 } 11079 // match: (NE (SGTconst [0] x) yes no) 11080 // cond: 11081 // result: (LTZ x yes no) 11082 for { 11083 v := b.Control 11084 if v.Op != OpMIPS64SGTconst { 11085 break 11086 } 11087 if v.AuxInt != 0 { 11088 break 11089 } 11090 x := v.Args[0] 11091 b.Kind = BlockMIPS64LTZ 11092 b.SetControl(x) 11093 b.Aux = nil 11094 return true 11095 } 11096 // match: (NE (SGT x (MOVVconst [0])) yes no) 11097 // cond: 11098 // result: (GTZ x yes no) 11099 for { 11100 v := b.Control 11101 if v.Op != OpMIPS64SGT { 11102 break 11103 } 11104 _ = v.Args[1] 11105 x := v.Args[0] 11106 v_1 := v.Args[1] 11107 if v_1.Op != OpMIPS64MOVVconst { 11108 break 11109 } 11110 if v_1.AuxInt != 0 { 11111 break 11112 } 11113 b.Kind = BlockMIPS64GTZ 11114 b.SetControl(x) 11115 b.Aux = nil 11116 return true 11117 } 11118 // match: (NE (MOVVconst [0]) yes no) 11119 // cond: 11120 // result: (First nil no yes) 11121 for { 11122 v := b.Control 11123 if v.Op != OpMIPS64MOVVconst { 11124 break 11125 } 11126 if v.AuxInt != 0 { 11127 break 11128 } 11129 b.Kind = BlockFirst 11130 b.SetControl(nil) 11131 b.Aux = nil 11132 b.swapSuccessors() 11133 return true 11134 } 11135 // match: (NE (MOVVconst [c]) yes no) 11136 // cond: c != 0 11137 // result: (First nil yes no) 11138 for { 11139 v := b.Control 11140 if v.Op != OpMIPS64MOVVconst { 11141 break 11142 } 11143 c := v.AuxInt 11144 if !(c != 0) { 11145 break 11146 } 11147 b.Kind = BlockFirst 11148 b.SetControl(nil) 11149 b.Aux = nil 11150 return true 11151 } 11152 } 11153 return false 11154 }