github.com/bir3/gocompiler@v0.3.205/src/cmd/compile/internal/ssa/rewriteLOONG64.go (about) 1 // Code generated from _gen/LOONG64.rules; DO NOT EDIT. 2 // generated with: cd _gen; go run . 3 4 package ssa 5 6 import "github.com/bir3/gocompiler/src/cmd/compile/internal/types" 7 8 func rewriteValueLOONG64(v *Value) bool { 9 switch v.Op { 10 case OpAdd16: 11 v.Op = OpLOONG64ADDV 12 return true 13 case OpAdd32: 14 v.Op = OpLOONG64ADDV 15 return true 16 case OpAdd32F: 17 v.Op = OpLOONG64ADDF 18 return true 19 case OpAdd64: 20 v.Op = OpLOONG64ADDV 21 return true 22 case OpAdd64F: 23 v.Op = OpLOONG64ADDD 24 return true 25 case OpAdd8: 26 v.Op = OpLOONG64ADDV 27 return true 28 case OpAddPtr: 29 v.Op = OpLOONG64ADDV 30 return true 31 case OpAddr: 32 return rewriteValueLOONG64_OpAddr(v) 33 case OpAnd16: 34 v.Op = OpLOONG64AND 35 return true 36 case OpAnd32: 37 v.Op = OpLOONG64AND 38 return true 39 case OpAnd64: 40 v.Op = OpLOONG64AND 41 return true 42 case OpAnd8: 43 v.Op = OpLOONG64AND 44 return true 45 case OpAndB: 46 v.Op = OpLOONG64AND 47 return true 48 case OpAtomicAdd32: 49 v.Op = OpLOONG64LoweredAtomicAdd32 50 return true 51 case OpAtomicAdd64: 52 v.Op = OpLOONG64LoweredAtomicAdd64 53 return true 54 case OpAtomicCompareAndSwap32: 55 return rewriteValueLOONG64_OpAtomicCompareAndSwap32(v) 56 case OpAtomicCompareAndSwap64: 57 v.Op = OpLOONG64LoweredAtomicCas64 58 return true 59 case OpAtomicExchange32: 60 v.Op = OpLOONG64LoweredAtomicExchange32 61 return true 62 case OpAtomicExchange64: 63 v.Op = OpLOONG64LoweredAtomicExchange64 64 return true 65 case OpAtomicLoad32: 66 v.Op = OpLOONG64LoweredAtomicLoad32 67 return true 68 case OpAtomicLoad64: 69 v.Op = OpLOONG64LoweredAtomicLoad64 70 return true 71 case OpAtomicLoad8: 72 v.Op = OpLOONG64LoweredAtomicLoad8 73 return true 74 case OpAtomicLoadPtr: 75 v.Op = OpLOONG64LoweredAtomicLoad64 76 return true 77 case OpAtomicStore32: 78 v.Op = OpLOONG64LoweredAtomicStore32 79 return true 80 case OpAtomicStore64: 81 v.Op = OpLOONG64LoweredAtomicStore64 82 return true 83 case OpAtomicStore8: 84 v.Op = OpLOONG64LoweredAtomicStore8 85 return true 86 case OpAtomicStorePtrNoWB: 87 v.Op = OpLOONG64LoweredAtomicStore64 88 return true 89 case OpAvg64u: 90 return rewriteValueLOONG64_OpAvg64u(v) 91 case OpClosureCall: 92 v.Op = OpLOONG64CALLclosure 93 return true 94 case OpCom16: 95 return rewriteValueLOONG64_OpCom16(v) 96 case OpCom32: 97 return rewriteValueLOONG64_OpCom32(v) 98 case OpCom64: 99 return rewriteValueLOONG64_OpCom64(v) 100 case OpCom8: 101 return rewriteValueLOONG64_OpCom8(v) 102 case OpCondSelect: 103 return rewriteValueLOONG64_OpCondSelect(v) 104 case OpConst16: 105 return rewriteValueLOONG64_OpConst16(v) 106 case OpConst32: 107 return rewriteValueLOONG64_OpConst32(v) 108 case OpConst32F: 109 return rewriteValueLOONG64_OpConst32F(v) 110 case OpConst64: 111 return rewriteValueLOONG64_OpConst64(v) 112 case OpConst64F: 113 return rewriteValueLOONG64_OpConst64F(v) 114 case OpConst8: 115 return rewriteValueLOONG64_OpConst8(v) 116 case OpConstBool: 117 return rewriteValueLOONG64_OpConstBool(v) 118 case OpConstNil: 119 return rewriteValueLOONG64_OpConstNil(v) 120 case OpCvt32Fto32: 121 v.Op = OpLOONG64TRUNCFW 122 return true 123 case OpCvt32Fto64: 124 v.Op = OpLOONG64TRUNCFV 125 return true 126 case OpCvt32Fto64F: 127 v.Op = OpLOONG64MOVFD 128 return true 129 case OpCvt32to32F: 130 v.Op = OpLOONG64MOVWF 131 return true 132 case OpCvt32to64F: 133 v.Op = OpLOONG64MOVWD 134 return true 135 case OpCvt64Fto32: 136 v.Op = OpLOONG64TRUNCDW 137 return true 138 case OpCvt64Fto32F: 139 v.Op = OpLOONG64MOVDF 140 return true 141 case OpCvt64Fto64: 142 v.Op = OpLOONG64TRUNCDV 143 return true 144 case OpCvt64to32F: 145 v.Op = OpLOONG64MOVVF 146 return true 147 case OpCvt64to64F: 148 v.Op = OpLOONG64MOVVD 149 return true 150 case OpCvtBoolToUint8: 151 v.Op = OpCopy 152 return true 153 case OpDiv16: 154 return rewriteValueLOONG64_OpDiv16(v) 155 case OpDiv16u: 156 return rewriteValueLOONG64_OpDiv16u(v) 157 case OpDiv32: 158 return rewriteValueLOONG64_OpDiv32(v) 159 case OpDiv32F: 160 v.Op = OpLOONG64DIVF 161 return true 162 case OpDiv32u: 163 return rewriteValueLOONG64_OpDiv32u(v) 164 case OpDiv64: 165 return rewriteValueLOONG64_OpDiv64(v) 166 case OpDiv64F: 167 v.Op = OpLOONG64DIVD 168 return true 169 case OpDiv64u: 170 return rewriteValueLOONG64_OpDiv64u(v) 171 case OpDiv8: 172 return rewriteValueLOONG64_OpDiv8(v) 173 case OpDiv8u: 174 return rewriteValueLOONG64_OpDiv8u(v) 175 case OpEq16: 176 return rewriteValueLOONG64_OpEq16(v) 177 case OpEq32: 178 return rewriteValueLOONG64_OpEq32(v) 179 case OpEq32F: 180 return rewriteValueLOONG64_OpEq32F(v) 181 case OpEq64: 182 return rewriteValueLOONG64_OpEq64(v) 183 case OpEq64F: 184 return rewriteValueLOONG64_OpEq64F(v) 185 case OpEq8: 186 return rewriteValueLOONG64_OpEq8(v) 187 case OpEqB: 188 return rewriteValueLOONG64_OpEqB(v) 189 case OpEqPtr: 190 return rewriteValueLOONG64_OpEqPtr(v) 191 case OpGetCallerPC: 192 v.Op = OpLOONG64LoweredGetCallerPC 193 return true 194 case OpGetCallerSP: 195 v.Op = OpLOONG64LoweredGetCallerSP 196 return true 197 case OpGetClosurePtr: 198 v.Op = OpLOONG64LoweredGetClosurePtr 199 return true 200 case OpHmul32: 201 return rewriteValueLOONG64_OpHmul32(v) 202 case OpHmul32u: 203 return rewriteValueLOONG64_OpHmul32u(v) 204 case OpHmul64: 205 return rewriteValueLOONG64_OpHmul64(v) 206 case OpHmul64u: 207 return rewriteValueLOONG64_OpHmul64u(v) 208 case OpInterCall: 209 v.Op = OpLOONG64CALLinter 210 return true 211 case OpIsInBounds: 212 return rewriteValueLOONG64_OpIsInBounds(v) 213 case OpIsNonNil: 214 return rewriteValueLOONG64_OpIsNonNil(v) 215 case OpIsSliceInBounds: 216 return rewriteValueLOONG64_OpIsSliceInBounds(v) 217 case OpLOONG64ADDV: 218 return rewriteValueLOONG64_OpLOONG64ADDV(v) 219 case OpLOONG64ADDVconst: 220 return rewriteValueLOONG64_OpLOONG64ADDVconst(v) 221 case OpLOONG64AND: 222 return rewriteValueLOONG64_OpLOONG64AND(v) 223 case OpLOONG64ANDconst: 224 return rewriteValueLOONG64_OpLOONG64ANDconst(v) 225 case OpLOONG64LoweredAtomicAdd32: 226 return rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd32(v) 227 case OpLOONG64LoweredAtomicAdd64: 228 return rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd64(v) 229 case OpLOONG64LoweredAtomicStore32: 230 return rewriteValueLOONG64_OpLOONG64LoweredAtomicStore32(v) 231 case OpLOONG64LoweredAtomicStore64: 232 return rewriteValueLOONG64_OpLOONG64LoweredAtomicStore64(v) 233 case OpLOONG64MASKEQZ: 234 return rewriteValueLOONG64_OpLOONG64MASKEQZ(v) 235 case OpLOONG64MASKNEZ: 236 return rewriteValueLOONG64_OpLOONG64MASKNEZ(v) 237 case OpLOONG64MOVBUload: 238 return rewriteValueLOONG64_OpLOONG64MOVBUload(v) 239 case OpLOONG64MOVBUreg: 240 return rewriteValueLOONG64_OpLOONG64MOVBUreg(v) 241 case OpLOONG64MOVBload: 242 return rewriteValueLOONG64_OpLOONG64MOVBload(v) 243 case OpLOONG64MOVBreg: 244 return rewriteValueLOONG64_OpLOONG64MOVBreg(v) 245 case OpLOONG64MOVBstore: 246 return rewriteValueLOONG64_OpLOONG64MOVBstore(v) 247 case OpLOONG64MOVBstorezero: 248 return rewriteValueLOONG64_OpLOONG64MOVBstorezero(v) 249 case OpLOONG64MOVDload: 250 return rewriteValueLOONG64_OpLOONG64MOVDload(v) 251 case OpLOONG64MOVDstore: 252 return rewriteValueLOONG64_OpLOONG64MOVDstore(v) 253 case OpLOONG64MOVFload: 254 return rewriteValueLOONG64_OpLOONG64MOVFload(v) 255 case OpLOONG64MOVFstore: 256 return rewriteValueLOONG64_OpLOONG64MOVFstore(v) 257 case OpLOONG64MOVHUload: 258 return rewriteValueLOONG64_OpLOONG64MOVHUload(v) 259 case OpLOONG64MOVHUreg: 260 return rewriteValueLOONG64_OpLOONG64MOVHUreg(v) 261 case OpLOONG64MOVHload: 262 return rewriteValueLOONG64_OpLOONG64MOVHload(v) 263 case OpLOONG64MOVHreg: 264 return rewriteValueLOONG64_OpLOONG64MOVHreg(v) 265 case OpLOONG64MOVHstore: 266 return rewriteValueLOONG64_OpLOONG64MOVHstore(v) 267 case OpLOONG64MOVHstorezero: 268 return rewriteValueLOONG64_OpLOONG64MOVHstorezero(v) 269 case OpLOONG64MOVVload: 270 return rewriteValueLOONG64_OpLOONG64MOVVload(v) 271 case OpLOONG64MOVVreg: 272 return rewriteValueLOONG64_OpLOONG64MOVVreg(v) 273 case OpLOONG64MOVVstore: 274 return rewriteValueLOONG64_OpLOONG64MOVVstore(v) 275 case OpLOONG64MOVVstorezero: 276 return rewriteValueLOONG64_OpLOONG64MOVVstorezero(v) 277 case OpLOONG64MOVWUload: 278 return rewriteValueLOONG64_OpLOONG64MOVWUload(v) 279 case OpLOONG64MOVWUreg: 280 return rewriteValueLOONG64_OpLOONG64MOVWUreg(v) 281 case OpLOONG64MOVWload: 282 return rewriteValueLOONG64_OpLOONG64MOVWload(v) 283 case OpLOONG64MOVWreg: 284 return rewriteValueLOONG64_OpLOONG64MOVWreg(v) 285 case OpLOONG64MOVWstore: 286 return rewriteValueLOONG64_OpLOONG64MOVWstore(v) 287 case OpLOONG64MOVWstorezero: 288 return rewriteValueLOONG64_OpLOONG64MOVWstorezero(v) 289 case OpLOONG64NEGV: 290 return rewriteValueLOONG64_OpLOONG64NEGV(v) 291 case OpLOONG64NOR: 292 return rewriteValueLOONG64_OpLOONG64NOR(v) 293 case OpLOONG64NORconst: 294 return rewriteValueLOONG64_OpLOONG64NORconst(v) 295 case OpLOONG64OR: 296 return rewriteValueLOONG64_OpLOONG64OR(v) 297 case OpLOONG64ORconst: 298 return rewriteValueLOONG64_OpLOONG64ORconst(v) 299 case OpLOONG64ROTR: 300 return rewriteValueLOONG64_OpLOONG64ROTR(v) 301 case OpLOONG64ROTRV: 302 return rewriteValueLOONG64_OpLOONG64ROTRV(v) 303 case OpLOONG64SGT: 304 return rewriteValueLOONG64_OpLOONG64SGT(v) 305 case OpLOONG64SGTU: 306 return rewriteValueLOONG64_OpLOONG64SGTU(v) 307 case OpLOONG64SGTUconst: 308 return rewriteValueLOONG64_OpLOONG64SGTUconst(v) 309 case OpLOONG64SGTconst: 310 return rewriteValueLOONG64_OpLOONG64SGTconst(v) 311 case OpLOONG64SLLV: 312 return rewriteValueLOONG64_OpLOONG64SLLV(v) 313 case OpLOONG64SLLVconst: 314 return rewriteValueLOONG64_OpLOONG64SLLVconst(v) 315 case OpLOONG64SRAV: 316 return rewriteValueLOONG64_OpLOONG64SRAV(v) 317 case OpLOONG64SRAVconst: 318 return rewriteValueLOONG64_OpLOONG64SRAVconst(v) 319 case OpLOONG64SRLV: 320 return rewriteValueLOONG64_OpLOONG64SRLV(v) 321 case OpLOONG64SRLVconst: 322 return rewriteValueLOONG64_OpLOONG64SRLVconst(v) 323 case OpLOONG64SUBV: 324 return rewriteValueLOONG64_OpLOONG64SUBV(v) 325 case OpLOONG64SUBVconst: 326 return rewriteValueLOONG64_OpLOONG64SUBVconst(v) 327 case OpLOONG64XOR: 328 return rewriteValueLOONG64_OpLOONG64XOR(v) 329 case OpLOONG64XORconst: 330 return rewriteValueLOONG64_OpLOONG64XORconst(v) 331 case OpLeq16: 332 return rewriteValueLOONG64_OpLeq16(v) 333 case OpLeq16U: 334 return rewriteValueLOONG64_OpLeq16U(v) 335 case OpLeq32: 336 return rewriteValueLOONG64_OpLeq32(v) 337 case OpLeq32F: 338 return rewriteValueLOONG64_OpLeq32F(v) 339 case OpLeq32U: 340 return rewriteValueLOONG64_OpLeq32U(v) 341 case OpLeq64: 342 return rewriteValueLOONG64_OpLeq64(v) 343 case OpLeq64F: 344 return rewriteValueLOONG64_OpLeq64F(v) 345 case OpLeq64U: 346 return rewriteValueLOONG64_OpLeq64U(v) 347 case OpLeq8: 348 return rewriteValueLOONG64_OpLeq8(v) 349 case OpLeq8U: 350 return rewriteValueLOONG64_OpLeq8U(v) 351 case OpLess16: 352 return rewriteValueLOONG64_OpLess16(v) 353 case OpLess16U: 354 return rewriteValueLOONG64_OpLess16U(v) 355 case OpLess32: 356 return rewriteValueLOONG64_OpLess32(v) 357 case OpLess32F: 358 return rewriteValueLOONG64_OpLess32F(v) 359 case OpLess32U: 360 return rewriteValueLOONG64_OpLess32U(v) 361 case OpLess64: 362 return rewriteValueLOONG64_OpLess64(v) 363 case OpLess64F: 364 return rewriteValueLOONG64_OpLess64F(v) 365 case OpLess64U: 366 return rewriteValueLOONG64_OpLess64U(v) 367 case OpLess8: 368 return rewriteValueLOONG64_OpLess8(v) 369 case OpLess8U: 370 return rewriteValueLOONG64_OpLess8U(v) 371 case OpLoad: 372 return rewriteValueLOONG64_OpLoad(v) 373 case OpLocalAddr: 374 return rewriteValueLOONG64_OpLocalAddr(v) 375 case OpLsh16x16: 376 return rewriteValueLOONG64_OpLsh16x16(v) 377 case OpLsh16x32: 378 return rewriteValueLOONG64_OpLsh16x32(v) 379 case OpLsh16x64: 380 return rewriteValueLOONG64_OpLsh16x64(v) 381 case OpLsh16x8: 382 return rewriteValueLOONG64_OpLsh16x8(v) 383 case OpLsh32x16: 384 return rewriteValueLOONG64_OpLsh32x16(v) 385 case OpLsh32x32: 386 return rewriteValueLOONG64_OpLsh32x32(v) 387 case OpLsh32x64: 388 return rewriteValueLOONG64_OpLsh32x64(v) 389 case OpLsh32x8: 390 return rewriteValueLOONG64_OpLsh32x8(v) 391 case OpLsh64x16: 392 return rewriteValueLOONG64_OpLsh64x16(v) 393 case OpLsh64x32: 394 return rewriteValueLOONG64_OpLsh64x32(v) 395 case OpLsh64x64: 396 return rewriteValueLOONG64_OpLsh64x64(v) 397 case OpLsh64x8: 398 return rewriteValueLOONG64_OpLsh64x8(v) 399 case OpLsh8x16: 400 return rewriteValueLOONG64_OpLsh8x16(v) 401 case OpLsh8x32: 402 return rewriteValueLOONG64_OpLsh8x32(v) 403 case OpLsh8x64: 404 return rewriteValueLOONG64_OpLsh8x64(v) 405 case OpLsh8x8: 406 return rewriteValueLOONG64_OpLsh8x8(v) 407 case OpMod16: 408 return rewriteValueLOONG64_OpMod16(v) 409 case OpMod16u: 410 return rewriteValueLOONG64_OpMod16u(v) 411 case OpMod32: 412 return rewriteValueLOONG64_OpMod32(v) 413 case OpMod32u: 414 return rewriteValueLOONG64_OpMod32u(v) 415 case OpMod64: 416 return rewriteValueLOONG64_OpMod64(v) 417 case OpMod64u: 418 return rewriteValueLOONG64_OpMod64u(v) 419 case OpMod8: 420 return rewriteValueLOONG64_OpMod8(v) 421 case OpMod8u: 422 return rewriteValueLOONG64_OpMod8u(v) 423 case OpMove: 424 return rewriteValueLOONG64_OpMove(v) 425 case OpMul16: 426 return rewriteValueLOONG64_OpMul16(v) 427 case OpMul32: 428 return rewriteValueLOONG64_OpMul32(v) 429 case OpMul32F: 430 v.Op = OpLOONG64MULF 431 return true 432 case OpMul64: 433 return rewriteValueLOONG64_OpMul64(v) 434 case OpMul64F: 435 v.Op = OpLOONG64MULD 436 return true 437 case OpMul64uhilo: 438 v.Op = OpLOONG64MULVU 439 return true 440 case OpMul8: 441 return rewriteValueLOONG64_OpMul8(v) 442 case OpNeg16: 443 v.Op = OpLOONG64NEGV 444 return true 445 case OpNeg32: 446 v.Op = OpLOONG64NEGV 447 return true 448 case OpNeg32F: 449 v.Op = OpLOONG64NEGF 450 return true 451 case OpNeg64: 452 v.Op = OpLOONG64NEGV 453 return true 454 case OpNeg64F: 455 v.Op = OpLOONG64NEGD 456 return true 457 case OpNeg8: 458 v.Op = OpLOONG64NEGV 459 return true 460 case OpNeq16: 461 return rewriteValueLOONG64_OpNeq16(v) 462 case OpNeq32: 463 return rewriteValueLOONG64_OpNeq32(v) 464 case OpNeq32F: 465 return rewriteValueLOONG64_OpNeq32F(v) 466 case OpNeq64: 467 return rewriteValueLOONG64_OpNeq64(v) 468 case OpNeq64F: 469 return rewriteValueLOONG64_OpNeq64F(v) 470 case OpNeq8: 471 return rewriteValueLOONG64_OpNeq8(v) 472 case OpNeqB: 473 v.Op = OpLOONG64XOR 474 return true 475 case OpNeqPtr: 476 return rewriteValueLOONG64_OpNeqPtr(v) 477 case OpNilCheck: 478 v.Op = OpLOONG64LoweredNilCheck 479 return true 480 case OpNot: 481 return rewriteValueLOONG64_OpNot(v) 482 case OpOffPtr: 483 return rewriteValueLOONG64_OpOffPtr(v) 484 case OpOr16: 485 v.Op = OpLOONG64OR 486 return true 487 case OpOr32: 488 v.Op = OpLOONG64OR 489 return true 490 case OpOr64: 491 v.Op = OpLOONG64OR 492 return true 493 case OpOr8: 494 v.Op = OpLOONG64OR 495 return true 496 case OpOrB: 497 v.Op = OpLOONG64OR 498 return true 499 case OpPanicBounds: 500 return rewriteValueLOONG64_OpPanicBounds(v) 501 case OpRotateLeft16: 502 return rewriteValueLOONG64_OpRotateLeft16(v) 503 case OpRotateLeft32: 504 return rewriteValueLOONG64_OpRotateLeft32(v) 505 case OpRotateLeft64: 506 return rewriteValueLOONG64_OpRotateLeft64(v) 507 case OpRotateLeft8: 508 return rewriteValueLOONG64_OpRotateLeft8(v) 509 case OpRound32F: 510 v.Op = OpCopy 511 return true 512 case OpRound64F: 513 v.Op = OpCopy 514 return true 515 case OpRsh16Ux16: 516 return rewriteValueLOONG64_OpRsh16Ux16(v) 517 case OpRsh16Ux32: 518 return rewriteValueLOONG64_OpRsh16Ux32(v) 519 case OpRsh16Ux64: 520 return rewriteValueLOONG64_OpRsh16Ux64(v) 521 case OpRsh16Ux8: 522 return rewriteValueLOONG64_OpRsh16Ux8(v) 523 case OpRsh16x16: 524 return rewriteValueLOONG64_OpRsh16x16(v) 525 case OpRsh16x32: 526 return rewriteValueLOONG64_OpRsh16x32(v) 527 case OpRsh16x64: 528 return rewriteValueLOONG64_OpRsh16x64(v) 529 case OpRsh16x8: 530 return rewriteValueLOONG64_OpRsh16x8(v) 531 case OpRsh32Ux16: 532 return rewriteValueLOONG64_OpRsh32Ux16(v) 533 case OpRsh32Ux32: 534 return rewriteValueLOONG64_OpRsh32Ux32(v) 535 case OpRsh32Ux64: 536 return rewriteValueLOONG64_OpRsh32Ux64(v) 537 case OpRsh32Ux8: 538 return rewriteValueLOONG64_OpRsh32Ux8(v) 539 case OpRsh32x16: 540 return rewriteValueLOONG64_OpRsh32x16(v) 541 case OpRsh32x32: 542 return rewriteValueLOONG64_OpRsh32x32(v) 543 case OpRsh32x64: 544 return rewriteValueLOONG64_OpRsh32x64(v) 545 case OpRsh32x8: 546 return rewriteValueLOONG64_OpRsh32x8(v) 547 case OpRsh64Ux16: 548 return rewriteValueLOONG64_OpRsh64Ux16(v) 549 case OpRsh64Ux32: 550 return rewriteValueLOONG64_OpRsh64Ux32(v) 551 case OpRsh64Ux64: 552 return rewriteValueLOONG64_OpRsh64Ux64(v) 553 case OpRsh64Ux8: 554 return rewriteValueLOONG64_OpRsh64Ux8(v) 555 case OpRsh64x16: 556 return rewriteValueLOONG64_OpRsh64x16(v) 557 case OpRsh64x32: 558 return rewriteValueLOONG64_OpRsh64x32(v) 559 case OpRsh64x64: 560 return rewriteValueLOONG64_OpRsh64x64(v) 561 case OpRsh64x8: 562 return rewriteValueLOONG64_OpRsh64x8(v) 563 case OpRsh8Ux16: 564 return rewriteValueLOONG64_OpRsh8Ux16(v) 565 case OpRsh8Ux32: 566 return rewriteValueLOONG64_OpRsh8Ux32(v) 567 case OpRsh8Ux64: 568 return rewriteValueLOONG64_OpRsh8Ux64(v) 569 case OpRsh8Ux8: 570 return rewriteValueLOONG64_OpRsh8Ux8(v) 571 case OpRsh8x16: 572 return rewriteValueLOONG64_OpRsh8x16(v) 573 case OpRsh8x32: 574 return rewriteValueLOONG64_OpRsh8x32(v) 575 case OpRsh8x64: 576 return rewriteValueLOONG64_OpRsh8x64(v) 577 case OpRsh8x8: 578 return rewriteValueLOONG64_OpRsh8x8(v) 579 case OpSelect0: 580 return rewriteValueLOONG64_OpSelect0(v) 581 case OpSelect1: 582 return rewriteValueLOONG64_OpSelect1(v) 583 case OpSignExt16to32: 584 v.Op = OpLOONG64MOVHreg 585 return true 586 case OpSignExt16to64: 587 v.Op = OpLOONG64MOVHreg 588 return true 589 case OpSignExt32to64: 590 v.Op = OpLOONG64MOVWreg 591 return true 592 case OpSignExt8to16: 593 v.Op = OpLOONG64MOVBreg 594 return true 595 case OpSignExt8to32: 596 v.Op = OpLOONG64MOVBreg 597 return true 598 case OpSignExt8to64: 599 v.Op = OpLOONG64MOVBreg 600 return true 601 case OpSlicemask: 602 return rewriteValueLOONG64_OpSlicemask(v) 603 case OpSqrt: 604 v.Op = OpLOONG64SQRTD 605 return true 606 case OpSqrt32: 607 v.Op = OpLOONG64SQRTF 608 return true 609 case OpStaticCall: 610 v.Op = OpLOONG64CALLstatic 611 return true 612 case OpStore: 613 return rewriteValueLOONG64_OpStore(v) 614 case OpSub16: 615 v.Op = OpLOONG64SUBV 616 return true 617 case OpSub32: 618 v.Op = OpLOONG64SUBV 619 return true 620 case OpSub32F: 621 v.Op = OpLOONG64SUBF 622 return true 623 case OpSub64: 624 v.Op = OpLOONG64SUBV 625 return true 626 case OpSub64F: 627 v.Op = OpLOONG64SUBD 628 return true 629 case OpSub8: 630 v.Op = OpLOONG64SUBV 631 return true 632 case OpSubPtr: 633 v.Op = OpLOONG64SUBV 634 return true 635 case OpTailCall: 636 v.Op = OpLOONG64CALLtail 637 return true 638 case OpTrunc16to8: 639 v.Op = OpCopy 640 return true 641 case OpTrunc32to16: 642 v.Op = OpCopy 643 return true 644 case OpTrunc32to8: 645 v.Op = OpCopy 646 return true 647 case OpTrunc64to16: 648 v.Op = OpCopy 649 return true 650 case OpTrunc64to32: 651 v.Op = OpCopy 652 return true 653 case OpTrunc64to8: 654 v.Op = OpCopy 655 return true 656 case OpWB: 657 v.Op = OpLOONG64LoweredWB 658 return true 659 case OpXor16: 660 v.Op = OpLOONG64XOR 661 return true 662 case OpXor32: 663 v.Op = OpLOONG64XOR 664 return true 665 case OpXor64: 666 v.Op = OpLOONG64XOR 667 return true 668 case OpXor8: 669 v.Op = OpLOONG64XOR 670 return true 671 case OpZero: 672 return rewriteValueLOONG64_OpZero(v) 673 case OpZeroExt16to32: 674 v.Op = OpLOONG64MOVHUreg 675 return true 676 case OpZeroExt16to64: 677 v.Op = OpLOONG64MOVHUreg 678 return true 679 case OpZeroExt32to64: 680 v.Op = OpLOONG64MOVWUreg 681 return true 682 case OpZeroExt8to16: 683 v.Op = OpLOONG64MOVBUreg 684 return true 685 case OpZeroExt8to32: 686 v.Op = OpLOONG64MOVBUreg 687 return true 688 case OpZeroExt8to64: 689 v.Op = OpLOONG64MOVBUreg 690 return true 691 } 692 return false 693 } 694 func rewriteValueLOONG64_OpAddr(v *Value) bool { 695 v_0 := v.Args[0] 696 // match: (Addr {sym} base) 697 // result: (MOVVaddr {sym} base) 698 for { 699 sym := auxToSym(v.Aux) 700 base := v_0 701 v.reset(OpLOONG64MOVVaddr) 702 v.Aux = symToAux(sym) 703 v.AddArg(base) 704 return true 705 } 706 } 707 func rewriteValueLOONG64_OpAtomicCompareAndSwap32(v *Value) bool { 708 v_3 := v.Args[3] 709 v_2 := v.Args[2] 710 v_1 := v.Args[1] 711 v_0 := v.Args[0] 712 b := v.Block 713 typ := &b.Func.Config.Types 714 // match: (AtomicCompareAndSwap32 ptr old new mem) 715 // result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem) 716 for { 717 ptr := v_0 718 old := v_1 719 new := v_2 720 mem := v_3 721 v.reset(OpLOONG64LoweredAtomicCas32) 722 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 723 v0.AddArg(old) 724 v.AddArg4(ptr, v0, new, mem) 725 return true 726 } 727 } 728 func rewriteValueLOONG64_OpAvg64u(v *Value) bool { 729 v_1 := v.Args[1] 730 v_0 := v.Args[0] 731 b := v.Block 732 // match: (Avg64u <t> x y) 733 // result: (ADDV (SRLVconst <t> (SUBV <t> x y) [1]) y) 734 for { 735 t := v.Type 736 x := v_0 737 y := v_1 738 v.reset(OpLOONG64ADDV) 739 v0 := b.NewValue0(v.Pos, OpLOONG64SRLVconst, t) 740 v0.AuxInt = int64ToAuxInt(1) 741 v1 := b.NewValue0(v.Pos, OpLOONG64SUBV, t) 742 v1.AddArg2(x, y) 743 v0.AddArg(v1) 744 v.AddArg2(v0, y) 745 return true 746 } 747 } 748 func rewriteValueLOONG64_OpCom16(v *Value) bool { 749 v_0 := v.Args[0] 750 b := v.Block 751 typ := &b.Func.Config.Types 752 // match: (Com16 x) 753 // result: (NOR (MOVVconst [0]) x) 754 for { 755 x := v_0 756 v.reset(OpLOONG64NOR) 757 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 758 v0.AuxInt = int64ToAuxInt(0) 759 v.AddArg2(v0, x) 760 return true 761 } 762 } 763 func rewriteValueLOONG64_OpCom32(v *Value) bool { 764 v_0 := v.Args[0] 765 b := v.Block 766 typ := &b.Func.Config.Types 767 // match: (Com32 x) 768 // result: (NOR (MOVVconst [0]) x) 769 for { 770 x := v_0 771 v.reset(OpLOONG64NOR) 772 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 773 v0.AuxInt = int64ToAuxInt(0) 774 v.AddArg2(v0, x) 775 return true 776 } 777 } 778 func rewriteValueLOONG64_OpCom64(v *Value) bool { 779 v_0 := v.Args[0] 780 b := v.Block 781 typ := &b.Func.Config.Types 782 // match: (Com64 x) 783 // result: (NOR (MOVVconst [0]) x) 784 for { 785 x := v_0 786 v.reset(OpLOONG64NOR) 787 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 788 v0.AuxInt = int64ToAuxInt(0) 789 v.AddArg2(v0, x) 790 return true 791 } 792 } 793 func rewriteValueLOONG64_OpCom8(v *Value) bool { 794 v_0 := v.Args[0] 795 b := v.Block 796 typ := &b.Func.Config.Types 797 // match: (Com8 x) 798 // result: (NOR (MOVVconst [0]) x) 799 for { 800 x := v_0 801 v.reset(OpLOONG64NOR) 802 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 803 v0.AuxInt = int64ToAuxInt(0) 804 v.AddArg2(v0, x) 805 return true 806 } 807 } 808 func rewriteValueLOONG64_OpCondSelect(v *Value) bool { 809 v_2 := v.Args[2] 810 v_1 := v.Args[1] 811 v_0 := v.Args[0] 812 b := v.Block 813 // match: (CondSelect <t> x y cond) 814 // result: (OR (MASKEQZ <t> x cond) (MASKNEZ <t> y cond)) 815 for { 816 t := v.Type 817 x := v_0 818 y := v_1 819 cond := v_2 820 v.reset(OpLOONG64OR) 821 v0 := b.NewValue0(v.Pos, OpLOONG64MASKEQZ, t) 822 v0.AddArg2(x, cond) 823 v1 := b.NewValue0(v.Pos, OpLOONG64MASKNEZ, t) 824 v1.AddArg2(y, cond) 825 v.AddArg2(v0, v1) 826 return true 827 } 828 } 829 func rewriteValueLOONG64_OpConst16(v *Value) bool { 830 // match: (Const16 [val]) 831 // result: (MOVVconst [int64(val)]) 832 for { 833 val := auxIntToInt16(v.AuxInt) 834 v.reset(OpLOONG64MOVVconst) 835 v.AuxInt = int64ToAuxInt(int64(val)) 836 return true 837 } 838 } 839 func rewriteValueLOONG64_OpConst32(v *Value) bool { 840 // match: (Const32 [val]) 841 // result: (MOVVconst [int64(val)]) 842 for { 843 val := auxIntToInt32(v.AuxInt) 844 v.reset(OpLOONG64MOVVconst) 845 v.AuxInt = int64ToAuxInt(int64(val)) 846 return true 847 } 848 } 849 func rewriteValueLOONG64_OpConst32F(v *Value) bool { 850 // match: (Const32F [val]) 851 // result: (MOVFconst [float64(val)]) 852 for { 853 val := auxIntToFloat32(v.AuxInt) 854 v.reset(OpLOONG64MOVFconst) 855 v.AuxInt = float64ToAuxInt(float64(val)) 856 return true 857 } 858 } 859 func rewriteValueLOONG64_OpConst64(v *Value) bool { 860 // match: (Const64 [val]) 861 // result: (MOVVconst [int64(val)]) 862 for { 863 val := auxIntToInt64(v.AuxInt) 864 v.reset(OpLOONG64MOVVconst) 865 v.AuxInt = int64ToAuxInt(int64(val)) 866 return true 867 } 868 } 869 func rewriteValueLOONG64_OpConst64F(v *Value) bool { 870 // match: (Const64F [val]) 871 // result: (MOVDconst [float64(val)]) 872 for { 873 val := auxIntToFloat64(v.AuxInt) 874 v.reset(OpLOONG64MOVDconst) 875 v.AuxInt = float64ToAuxInt(float64(val)) 876 return true 877 } 878 } 879 func rewriteValueLOONG64_OpConst8(v *Value) bool { 880 // match: (Const8 [val]) 881 // result: (MOVVconst [int64(val)]) 882 for { 883 val := auxIntToInt8(v.AuxInt) 884 v.reset(OpLOONG64MOVVconst) 885 v.AuxInt = int64ToAuxInt(int64(val)) 886 return true 887 } 888 } 889 func rewriteValueLOONG64_OpConstBool(v *Value) bool { 890 // match: (ConstBool [t]) 891 // result: (MOVVconst [int64(b2i(t))]) 892 for { 893 t := auxIntToBool(v.AuxInt) 894 v.reset(OpLOONG64MOVVconst) 895 v.AuxInt = int64ToAuxInt(int64(b2i(t))) 896 return true 897 } 898 } 899 func rewriteValueLOONG64_OpConstNil(v *Value) bool { 900 // match: (ConstNil) 901 // result: (MOVVconst [0]) 902 for { 903 v.reset(OpLOONG64MOVVconst) 904 v.AuxInt = int64ToAuxInt(0) 905 return true 906 } 907 } 908 func rewriteValueLOONG64_OpDiv16(v *Value) bool { 909 v_1 := v.Args[1] 910 v_0 := v.Args[0] 911 b := v.Block 912 typ := &b.Func.Config.Types 913 // match: (Div16 x y) 914 // result: (Select1 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 915 for { 916 x := v_0 917 y := v_1 918 v.reset(OpSelect1) 919 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 920 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 921 v1.AddArg(x) 922 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 923 v2.AddArg(y) 924 v0.AddArg2(v1, v2) 925 v.AddArg(v0) 926 return true 927 } 928 } 929 func rewriteValueLOONG64_OpDiv16u(v *Value) bool { 930 v_1 := v.Args[1] 931 v_0 := v.Args[0] 932 b := v.Block 933 typ := &b.Func.Config.Types 934 // match: (Div16u x y) 935 // result: (Select1 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 936 for { 937 x := v_0 938 y := v_1 939 v.reset(OpSelect1) 940 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 941 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 942 v1.AddArg(x) 943 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 944 v2.AddArg(y) 945 v0.AddArg2(v1, v2) 946 v.AddArg(v0) 947 return true 948 } 949 } 950 func rewriteValueLOONG64_OpDiv32(v *Value) bool { 951 v_1 := v.Args[1] 952 v_0 := v.Args[0] 953 b := v.Block 954 typ := &b.Func.Config.Types 955 // match: (Div32 x y) 956 // result: (Select1 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 957 for { 958 x := v_0 959 y := v_1 960 v.reset(OpSelect1) 961 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 962 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 963 v1.AddArg(x) 964 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 965 v2.AddArg(y) 966 v0.AddArg2(v1, v2) 967 v.AddArg(v0) 968 return true 969 } 970 } 971 func rewriteValueLOONG64_OpDiv32u(v *Value) bool { 972 v_1 := v.Args[1] 973 v_0 := v.Args[0] 974 b := v.Block 975 typ := &b.Func.Config.Types 976 // match: (Div32u x y) 977 // result: (Select1 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 978 for { 979 x := v_0 980 y := v_1 981 v.reset(OpSelect1) 982 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 983 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 984 v1.AddArg(x) 985 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 986 v2.AddArg(y) 987 v0.AddArg2(v1, v2) 988 v.AddArg(v0) 989 return true 990 } 991 } 992 func rewriteValueLOONG64_OpDiv64(v *Value) bool { 993 v_1 := v.Args[1] 994 v_0 := v.Args[0] 995 b := v.Block 996 typ := &b.Func.Config.Types 997 // match: (Div64 x y) 998 // result: (Select1 (DIVV x y)) 999 for { 1000 x := v_0 1001 y := v_1 1002 v.reset(OpSelect1) 1003 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1004 v0.AddArg2(x, y) 1005 v.AddArg(v0) 1006 return true 1007 } 1008 } 1009 func rewriteValueLOONG64_OpDiv64u(v *Value) bool { 1010 v_1 := v.Args[1] 1011 v_0 := v.Args[0] 1012 b := v.Block 1013 typ := &b.Func.Config.Types 1014 // match: (Div64u x y) 1015 // result: (Select1 (DIVVU x y)) 1016 for { 1017 x := v_0 1018 y := v_1 1019 v.reset(OpSelect1) 1020 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1021 v0.AddArg2(x, y) 1022 v.AddArg(v0) 1023 return true 1024 } 1025 } 1026 func rewriteValueLOONG64_OpDiv8(v *Value) bool { 1027 v_1 := v.Args[1] 1028 v_0 := v.Args[0] 1029 b := v.Block 1030 typ := &b.Func.Config.Types 1031 // match: (Div8 x y) 1032 // result: (Select1 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 1033 for { 1034 x := v_0 1035 y := v_1 1036 v.reset(OpSelect1) 1037 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 1038 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1039 v1.AddArg(x) 1040 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 1041 v2.AddArg(y) 1042 v0.AddArg2(v1, v2) 1043 v.AddArg(v0) 1044 return true 1045 } 1046 } 1047 func rewriteValueLOONG64_OpDiv8u(v *Value) bool { 1048 v_1 := v.Args[1] 1049 v_0 := v.Args[0] 1050 b := v.Block 1051 typ := &b.Func.Config.Types 1052 // match: (Div8u x y) 1053 // result: (Select1 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 1054 for { 1055 x := v_0 1056 y := v_1 1057 v.reset(OpSelect1) 1058 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1059 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1060 v1.AddArg(x) 1061 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1062 v2.AddArg(y) 1063 v0.AddArg2(v1, v2) 1064 v.AddArg(v0) 1065 return true 1066 } 1067 } 1068 func rewriteValueLOONG64_OpEq16(v *Value) bool { 1069 v_1 := v.Args[1] 1070 v_0 := v.Args[0] 1071 b := v.Block 1072 typ := &b.Func.Config.Types 1073 // match: (Eq16 x y) 1074 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt16to64 x) (ZeroExt16to64 y))) 1075 for { 1076 x := v_0 1077 y := v_1 1078 v.reset(OpLOONG64SGTU) 1079 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1080 v0.AuxInt = int64ToAuxInt(1) 1081 v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 1082 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1083 v2.AddArg(x) 1084 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 1085 v3.AddArg(y) 1086 v1.AddArg2(v2, v3) 1087 v.AddArg2(v0, v1) 1088 return true 1089 } 1090 } 1091 func rewriteValueLOONG64_OpEq32(v *Value) bool { 1092 v_1 := v.Args[1] 1093 v_0 := v.Args[0] 1094 b := v.Block 1095 typ := &b.Func.Config.Types 1096 // match: (Eq32 x y) 1097 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt32to64 x) (ZeroExt32to64 y))) 1098 for { 1099 x := v_0 1100 y := v_1 1101 v.reset(OpLOONG64SGTU) 1102 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1103 v0.AuxInt = int64ToAuxInt(1) 1104 v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 1105 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1106 v2.AddArg(x) 1107 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1108 v3.AddArg(y) 1109 v1.AddArg2(v2, v3) 1110 v.AddArg2(v0, v1) 1111 return true 1112 } 1113 } 1114 func rewriteValueLOONG64_OpEq32F(v *Value) bool { 1115 v_1 := v.Args[1] 1116 v_0 := v.Args[0] 1117 b := v.Block 1118 // match: (Eq32F x y) 1119 // result: (FPFlagTrue (CMPEQF x y)) 1120 for { 1121 x := v_0 1122 y := v_1 1123 v.reset(OpLOONG64FPFlagTrue) 1124 v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags) 1125 v0.AddArg2(x, y) 1126 v.AddArg(v0) 1127 return true 1128 } 1129 } 1130 func rewriteValueLOONG64_OpEq64(v *Value) bool { 1131 v_1 := v.Args[1] 1132 v_0 := v.Args[0] 1133 b := v.Block 1134 typ := &b.Func.Config.Types 1135 // match: (Eq64 x y) 1136 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1137 for { 1138 x := v_0 1139 y := v_1 1140 v.reset(OpLOONG64SGTU) 1141 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1142 v0.AuxInt = int64ToAuxInt(1) 1143 v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 1144 v1.AddArg2(x, y) 1145 v.AddArg2(v0, v1) 1146 return true 1147 } 1148 } 1149 func rewriteValueLOONG64_OpEq64F(v *Value) bool { 1150 v_1 := v.Args[1] 1151 v_0 := v.Args[0] 1152 b := v.Block 1153 // match: (Eq64F x y) 1154 // result: (FPFlagTrue (CMPEQD x y)) 1155 for { 1156 x := v_0 1157 y := v_1 1158 v.reset(OpLOONG64FPFlagTrue) 1159 v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags) 1160 v0.AddArg2(x, y) 1161 v.AddArg(v0) 1162 return true 1163 } 1164 } 1165 func rewriteValueLOONG64_OpEq8(v *Value) bool { 1166 v_1 := v.Args[1] 1167 v_0 := v.Args[0] 1168 b := v.Block 1169 typ := &b.Func.Config.Types 1170 // match: (Eq8 x y) 1171 // result: (SGTU (MOVVconst [1]) (XOR (ZeroExt8to64 x) (ZeroExt8to64 y))) 1172 for { 1173 x := v_0 1174 y := v_1 1175 v.reset(OpLOONG64SGTU) 1176 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1177 v0.AuxInt = int64ToAuxInt(1) 1178 v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 1179 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1180 v2.AddArg(x) 1181 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 1182 v3.AddArg(y) 1183 v1.AddArg2(v2, v3) 1184 v.AddArg2(v0, v1) 1185 return true 1186 } 1187 } 1188 func rewriteValueLOONG64_OpEqB(v *Value) bool { 1189 v_1 := v.Args[1] 1190 v_0 := v.Args[0] 1191 b := v.Block 1192 typ := &b.Func.Config.Types 1193 // match: (EqB x y) 1194 // result: (XOR (MOVVconst [1]) (XOR <typ.Bool> x y)) 1195 for { 1196 x := v_0 1197 y := v_1 1198 v.reset(OpLOONG64XOR) 1199 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1200 v0.AuxInt = int64ToAuxInt(1) 1201 v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.Bool) 1202 v1.AddArg2(x, y) 1203 v.AddArg2(v0, v1) 1204 return true 1205 } 1206 } 1207 func rewriteValueLOONG64_OpEqPtr(v *Value) bool { 1208 v_1 := v.Args[1] 1209 v_0 := v.Args[0] 1210 b := v.Block 1211 typ := &b.Func.Config.Types 1212 // match: (EqPtr x y) 1213 // result: (SGTU (MOVVconst [1]) (XOR x y)) 1214 for { 1215 x := v_0 1216 y := v_1 1217 v.reset(OpLOONG64SGTU) 1218 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1219 v0.AuxInt = int64ToAuxInt(1) 1220 v1 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 1221 v1.AddArg2(x, y) 1222 v.AddArg2(v0, v1) 1223 return true 1224 } 1225 } 1226 func rewriteValueLOONG64_OpHmul32(v *Value) bool { 1227 v_1 := v.Args[1] 1228 v_0 := v.Args[0] 1229 b := v.Block 1230 typ := &b.Func.Config.Types 1231 // match: (Hmul32 x y) 1232 // result: (SRAVconst (Select1 <typ.Int64> (MULV (SignExt32to64 x) (SignExt32to64 y))) [32]) 1233 for { 1234 x := v_0 1235 y := v_1 1236 v.reset(OpLOONG64SRAVconst) 1237 v.AuxInt = int64ToAuxInt(32) 1238 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64) 1239 v1 := b.NewValue0(v.Pos, OpLOONG64MULV, types.NewTuple(typ.Int64, typ.Int64)) 1240 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1241 v2.AddArg(x) 1242 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 1243 v3.AddArg(y) 1244 v1.AddArg2(v2, v3) 1245 v0.AddArg(v1) 1246 v.AddArg(v0) 1247 return true 1248 } 1249 } 1250 func rewriteValueLOONG64_OpHmul32u(v *Value) bool { 1251 v_1 := v.Args[1] 1252 v_0 := v.Args[0] 1253 b := v.Block 1254 typ := &b.Func.Config.Types 1255 // match: (Hmul32u x y) 1256 // result: (SRLVconst (Select1 <typ.UInt64> (MULVU (ZeroExt32to64 x) (ZeroExt32to64 y))) [32]) 1257 for { 1258 x := v_0 1259 y := v_1 1260 v.reset(OpLOONG64SRLVconst) 1261 v.AuxInt = int64ToAuxInt(32) 1262 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64) 1263 v1 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1264 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1265 v2.AddArg(x) 1266 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 1267 v3.AddArg(y) 1268 v1.AddArg2(v2, v3) 1269 v0.AddArg(v1) 1270 v.AddArg(v0) 1271 return true 1272 } 1273 } 1274 func rewriteValueLOONG64_OpHmul64(v *Value) bool { 1275 v_1 := v.Args[1] 1276 v_0 := v.Args[0] 1277 b := v.Block 1278 typ := &b.Func.Config.Types 1279 // match: (Hmul64 x y) 1280 // result: (Select0 (MULV x y)) 1281 for { 1282 x := v_0 1283 y := v_1 1284 v.reset(OpSelect0) 1285 v0 := b.NewValue0(v.Pos, OpLOONG64MULV, types.NewTuple(typ.Int64, typ.Int64)) 1286 v0.AddArg2(x, y) 1287 v.AddArg(v0) 1288 return true 1289 } 1290 } 1291 func rewriteValueLOONG64_OpHmul64u(v *Value) bool { 1292 v_1 := v.Args[1] 1293 v_0 := v.Args[0] 1294 b := v.Block 1295 typ := &b.Func.Config.Types 1296 // match: (Hmul64u x y) 1297 // result: (Select0 (MULVU x y)) 1298 for { 1299 x := v_0 1300 y := v_1 1301 v.reset(OpSelect0) 1302 v0 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 1303 v0.AddArg2(x, y) 1304 v.AddArg(v0) 1305 return true 1306 } 1307 } 1308 func rewriteValueLOONG64_OpIsInBounds(v *Value) bool { 1309 v_1 := v.Args[1] 1310 v_0 := v.Args[0] 1311 // match: (IsInBounds idx len) 1312 // result: (SGTU len idx) 1313 for { 1314 idx := v_0 1315 len := v_1 1316 v.reset(OpLOONG64SGTU) 1317 v.AddArg2(len, idx) 1318 return true 1319 } 1320 } 1321 func rewriteValueLOONG64_OpIsNonNil(v *Value) bool { 1322 v_0 := v.Args[0] 1323 b := v.Block 1324 typ := &b.Func.Config.Types 1325 // match: (IsNonNil ptr) 1326 // result: (SGTU ptr (MOVVconst [0])) 1327 for { 1328 ptr := v_0 1329 v.reset(OpLOONG64SGTU) 1330 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1331 v0.AuxInt = int64ToAuxInt(0) 1332 v.AddArg2(ptr, v0) 1333 return true 1334 } 1335 } 1336 func rewriteValueLOONG64_OpIsSliceInBounds(v *Value) bool { 1337 v_1 := v.Args[1] 1338 v_0 := v.Args[0] 1339 b := v.Block 1340 typ := &b.Func.Config.Types 1341 // match: (IsSliceInBounds idx len) 1342 // result: (XOR (MOVVconst [1]) (SGTU idx len)) 1343 for { 1344 idx := v_0 1345 len := v_1 1346 v.reset(OpLOONG64XOR) 1347 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 1348 v0.AuxInt = int64ToAuxInt(1) 1349 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 1350 v1.AddArg2(idx, len) 1351 v.AddArg2(v0, v1) 1352 return true 1353 } 1354 } 1355 func rewriteValueLOONG64_OpLOONG64ADDV(v *Value) bool { 1356 v_1 := v.Args[1] 1357 v_0 := v.Args[0] 1358 // match: (ADDV x (MOVVconst [c])) 1359 // cond: is32Bit(c) 1360 // result: (ADDVconst [c] x) 1361 for { 1362 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1363 x := v_0 1364 if v_1.Op != OpLOONG64MOVVconst { 1365 continue 1366 } 1367 c := auxIntToInt64(v_1.AuxInt) 1368 if !(is32Bit(c)) { 1369 continue 1370 } 1371 v.reset(OpLOONG64ADDVconst) 1372 v.AuxInt = int64ToAuxInt(c) 1373 v.AddArg(x) 1374 return true 1375 } 1376 break 1377 } 1378 // match: (ADDV x (NEGV y)) 1379 // result: (SUBV x y) 1380 for { 1381 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1382 x := v_0 1383 if v_1.Op != OpLOONG64NEGV { 1384 continue 1385 } 1386 y := v_1.Args[0] 1387 v.reset(OpLOONG64SUBV) 1388 v.AddArg2(x, y) 1389 return true 1390 } 1391 break 1392 } 1393 return false 1394 } 1395 func rewriteValueLOONG64_OpLOONG64ADDVconst(v *Value) bool { 1396 v_0 := v.Args[0] 1397 // match: (ADDVconst [off1] (MOVVaddr [off2] {sym} ptr)) 1398 // cond: is32Bit(off1+int64(off2)) 1399 // result: (MOVVaddr [int32(off1)+int32(off2)] {sym} ptr) 1400 for { 1401 off1 := auxIntToInt64(v.AuxInt) 1402 if v_0.Op != OpLOONG64MOVVaddr { 1403 break 1404 } 1405 off2 := auxIntToInt32(v_0.AuxInt) 1406 sym := auxToSym(v_0.Aux) 1407 ptr := v_0.Args[0] 1408 if !(is32Bit(off1 + int64(off2))) { 1409 break 1410 } 1411 v.reset(OpLOONG64MOVVaddr) 1412 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2)) 1413 v.Aux = symToAux(sym) 1414 v.AddArg(ptr) 1415 return true 1416 } 1417 // match: (ADDVconst [0] x) 1418 // result: x 1419 for { 1420 if auxIntToInt64(v.AuxInt) != 0 { 1421 break 1422 } 1423 x := v_0 1424 v.copyOf(x) 1425 return true 1426 } 1427 // match: (ADDVconst [c] (MOVVconst [d])) 1428 // result: (MOVVconst [c+d]) 1429 for { 1430 c := auxIntToInt64(v.AuxInt) 1431 if v_0.Op != OpLOONG64MOVVconst { 1432 break 1433 } 1434 d := auxIntToInt64(v_0.AuxInt) 1435 v.reset(OpLOONG64MOVVconst) 1436 v.AuxInt = int64ToAuxInt(c + d) 1437 return true 1438 } 1439 // match: (ADDVconst [c] (ADDVconst [d] x)) 1440 // cond: is32Bit(c+d) 1441 // result: (ADDVconst [c+d] x) 1442 for { 1443 c := auxIntToInt64(v.AuxInt) 1444 if v_0.Op != OpLOONG64ADDVconst { 1445 break 1446 } 1447 d := auxIntToInt64(v_0.AuxInt) 1448 x := v_0.Args[0] 1449 if !(is32Bit(c + d)) { 1450 break 1451 } 1452 v.reset(OpLOONG64ADDVconst) 1453 v.AuxInt = int64ToAuxInt(c + d) 1454 v.AddArg(x) 1455 return true 1456 } 1457 // match: (ADDVconst [c] (SUBVconst [d] x)) 1458 // cond: is32Bit(c-d) 1459 // result: (ADDVconst [c-d] x) 1460 for { 1461 c := auxIntToInt64(v.AuxInt) 1462 if v_0.Op != OpLOONG64SUBVconst { 1463 break 1464 } 1465 d := auxIntToInt64(v_0.AuxInt) 1466 x := v_0.Args[0] 1467 if !(is32Bit(c - d)) { 1468 break 1469 } 1470 v.reset(OpLOONG64ADDVconst) 1471 v.AuxInt = int64ToAuxInt(c - d) 1472 v.AddArg(x) 1473 return true 1474 } 1475 return false 1476 } 1477 func rewriteValueLOONG64_OpLOONG64AND(v *Value) bool { 1478 v_1 := v.Args[1] 1479 v_0 := v.Args[0] 1480 // match: (AND x (MOVVconst [c])) 1481 // cond: is32Bit(c) 1482 // result: (ANDconst [c] x) 1483 for { 1484 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 1485 x := v_0 1486 if v_1.Op != OpLOONG64MOVVconst { 1487 continue 1488 } 1489 c := auxIntToInt64(v_1.AuxInt) 1490 if !(is32Bit(c)) { 1491 continue 1492 } 1493 v.reset(OpLOONG64ANDconst) 1494 v.AuxInt = int64ToAuxInt(c) 1495 v.AddArg(x) 1496 return true 1497 } 1498 break 1499 } 1500 // match: (AND x x) 1501 // result: x 1502 for { 1503 x := v_0 1504 if x != v_1 { 1505 break 1506 } 1507 v.copyOf(x) 1508 return true 1509 } 1510 return false 1511 } 1512 func rewriteValueLOONG64_OpLOONG64ANDconst(v *Value) bool { 1513 v_0 := v.Args[0] 1514 // match: (ANDconst [0] _) 1515 // result: (MOVVconst [0]) 1516 for { 1517 if auxIntToInt64(v.AuxInt) != 0 { 1518 break 1519 } 1520 v.reset(OpLOONG64MOVVconst) 1521 v.AuxInt = int64ToAuxInt(0) 1522 return true 1523 } 1524 // match: (ANDconst [-1] x) 1525 // result: x 1526 for { 1527 if auxIntToInt64(v.AuxInt) != -1 { 1528 break 1529 } 1530 x := v_0 1531 v.copyOf(x) 1532 return true 1533 } 1534 // match: (ANDconst [c] (MOVVconst [d])) 1535 // result: (MOVVconst [c&d]) 1536 for { 1537 c := auxIntToInt64(v.AuxInt) 1538 if v_0.Op != OpLOONG64MOVVconst { 1539 break 1540 } 1541 d := auxIntToInt64(v_0.AuxInt) 1542 v.reset(OpLOONG64MOVVconst) 1543 v.AuxInt = int64ToAuxInt(c & d) 1544 return true 1545 } 1546 // match: (ANDconst [c] (ANDconst [d] x)) 1547 // result: (ANDconst [c&d] x) 1548 for { 1549 c := auxIntToInt64(v.AuxInt) 1550 if v_0.Op != OpLOONG64ANDconst { 1551 break 1552 } 1553 d := auxIntToInt64(v_0.AuxInt) 1554 x := v_0.Args[0] 1555 v.reset(OpLOONG64ANDconst) 1556 v.AuxInt = int64ToAuxInt(c & d) 1557 v.AddArg(x) 1558 return true 1559 } 1560 return false 1561 } 1562 func rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd32(v *Value) bool { 1563 v_2 := v.Args[2] 1564 v_1 := v.Args[1] 1565 v_0 := v.Args[0] 1566 // match: (LoweredAtomicAdd32 ptr (MOVVconst [c]) mem) 1567 // cond: is32Bit(c) 1568 // result: (LoweredAtomicAddconst32 [int32(c)] ptr mem) 1569 for { 1570 ptr := v_0 1571 if v_1.Op != OpLOONG64MOVVconst { 1572 break 1573 } 1574 c := auxIntToInt64(v_1.AuxInt) 1575 mem := v_2 1576 if !(is32Bit(c)) { 1577 break 1578 } 1579 v.reset(OpLOONG64LoweredAtomicAddconst32) 1580 v.AuxInt = int32ToAuxInt(int32(c)) 1581 v.AddArg2(ptr, mem) 1582 return true 1583 } 1584 return false 1585 } 1586 func rewriteValueLOONG64_OpLOONG64LoweredAtomicAdd64(v *Value) bool { 1587 v_2 := v.Args[2] 1588 v_1 := v.Args[1] 1589 v_0 := v.Args[0] 1590 // match: (LoweredAtomicAdd64 ptr (MOVVconst [c]) mem) 1591 // cond: is32Bit(c) 1592 // result: (LoweredAtomicAddconst64 [c] ptr mem) 1593 for { 1594 ptr := v_0 1595 if v_1.Op != OpLOONG64MOVVconst { 1596 break 1597 } 1598 c := auxIntToInt64(v_1.AuxInt) 1599 mem := v_2 1600 if !(is32Bit(c)) { 1601 break 1602 } 1603 v.reset(OpLOONG64LoweredAtomicAddconst64) 1604 v.AuxInt = int64ToAuxInt(c) 1605 v.AddArg2(ptr, mem) 1606 return true 1607 } 1608 return false 1609 } 1610 func rewriteValueLOONG64_OpLOONG64LoweredAtomicStore32(v *Value) bool { 1611 v_2 := v.Args[2] 1612 v_1 := v.Args[1] 1613 v_0 := v.Args[0] 1614 // match: (LoweredAtomicStore32 ptr (MOVVconst [0]) mem) 1615 // result: (LoweredAtomicStorezero32 ptr mem) 1616 for { 1617 ptr := v_0 1618 if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 { 1619 break 1620 } 1621 mem := v_2 1622 v.reset(OpLOONG64LoweredAtomicStorezero32) 1623 v.AddArg2(ptr, mem) 1624 return true 1625 } 1626 return false 1627 } 1628 func rewriteValueLOONG64_OpLOONG64LoweredAtomicStore64(v *Value) bool { 1629 v_2 := v.Args[2] 1630 v_1 := v.Args[1] 1631 v_0 := v.Args[0] 1632 // match: (LoweredAtomicStore64 ptr (MOVVconst [0]) mem) 1633 // result: (LoweredAtomicStorezero64 ptr mem) 1634 for { 1635 ptr := v_0 1636 if v_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 { 1637 break 1638 } 1639 mem := v_2 1640 v.reset(OpLOONG64LoweredAtomicStorezero64) 1641 v.AddArg2(ptr, mem) 1642 return true 1643 } 1644 return false 1645 } 1646 func rewriteValueLOONG64_OpLOONG64MASKEQZ(v *Value) bool { 1647 v_0 := v.Args[0] 1648 // match: (MASKEQZ (MOVVconst [0]) cond) 1649 // result: (MOVVconst [0]) 1650 for { 1651 if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 { 1652 break 1653 } 1654 v.reset(OpLOONG64MOVVconst) 1655 v.AuxInt = int64ToAuxInt(0) 1656 return true 1657 } 1658 return false 1659 } 1660 func rewriteValueLOONG64_OpLOONG64MASKNEZ(v *Value) bool { 1661 v_0 := v.Args[0] 1662 // match: (MASKNEZ (MOVVconst [0]) cond) 1663 // result: (MOVVconst [0]) 1664 for { 1665 if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 { 1666 break 1667 } 1668 v.reset(OpLOONG64MOVVconst) 1669 v.AuxInt = int64ToAuxInt(0) 1670 return true 1671 } 1672 return false 1673 } 1674 func rewriteValueLOONG64_OpLOONG64MOVBUload(v *Value) bool { 1675 v_1 := v.Args[1] 1676 v_0 := v.Args[0] 1677 // match: (MOVBUload [off1] {sym} (ADDVconst [off2] ptr) mem) 1678 // cond: is32Bit(int64(off1)+off2) 1679 // result: (MOVBUload [off1+int32(off2)] {sym} ptr mem) 1680 for { 1681 off1 := auxIntToInt32(v.AuxInt) 1682 sym := auxToSym(v.Aux) 1683 if v_0.Op != OpLOONG64ADDVconst { 1684 break 1685 } 1686 off2 := auxIntToInt64(v_0.AuxInt) 1687 ptr := v_0.Args[0] 1688 mem := v_1 1689 if !(is32Bit(int64(off1) + off2)) { 1690 break 1691 } 1692 v.reset(OpLOONG64MOVBUload) 1693 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 1694 v.Aux = symToAux(sym) 1695 v.AddArg2(ptr, mem) 1696 return true 1697 } 1698 // match: (MOVBUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 1699 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 1700 // result: (MOVBUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 1701 for { 1702 off1 := auxIntToInt32(v.AuxInt) 1703 sym1 := auxToSym(v.Aux) 1704 if v_0.Op != OpLOONG64MOVVaddr { 1705 break 1706 } 1707 off2 := auxIntToInt32(v_0.AuxInt) 1708 sym2 := auxToSym(v_0.Aux) 1709 ptr := v_0.Args[0] 1710 mem := v_1 1711 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 1712 break 1713 } 1714 v.reset(OpLOONG64MOVBUload) 1715 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 1716 v.Aux = symToAux(mergeSym(sym1, sym2)) 1717 v.AddArg2(ptr, mem) 1718 return true 1719 } 1720 return false 1721 } 1722 func rewriteValueLOONG64_OpLOONG64MOVBUreg(v *Value) bool { 1723 v_0 := v.Args[0] 1724 // match: (MOVBUreg x:(MOVBUload _ _)) 1725 // result: (MOVVreg x) 1726 for { 1727 x := v_0 1728 if x.Op != OpLOONG64MOVBUload { 1729 break 1730 } 1731 v.reset(OpLOONG64MOVVreg) 1732 v.AddArg(x) 1733 return true 1734 } 1735 // match: (MOVBUreg x:(MOVBUreg _)) 1736 // result: (MOVVreg x) 1737 for { 1738 x := v_0 1739 if x.Op != OpLOONG64MOVBUreg { 1740 break 1741 } 1742 v.reset(OpLOONG64MOVVreg) 1743 v.AddArg(x) 1744 return true 1745 } 1746 // match: (MOVBUreg (MOVVconst [c])) 1747 // result: (MOVVconst [int64(uint8(c))]) 1748 for { 1749 if v_0.Op != OpLOONG64MOVVconst { 1750 break 1751 } 1752 c := auxIntToInt64(v_0.AuxInt) 1753 v.reset(OpLOONG64MOVVconst) 1754 v.AuxInt = int64ToAuxInt(int64(uint8(c))) 1755 return true 1756 } 1757 return false 1758 } 1759 func rewriteValueLOONG64_OpLOONG64MOVBload(v *Value) bool { 1760 v_1 := v.Args[1] 1761 v_0 := v.Args[0] 1762 // match: (MOVBload [off1] {sym} (ADDVconst [off2] ptr) mem) 1763 // cond: is32Bit(int64(off1)+off2) 1764 // result: (MOVBload [off1+int32(off2)] {sym} ptr mem) 1765 for { 1766 off1 := auxIntToInt32(v.AuxInt) 1767 sym := auxToSym(v.Aux) 1768 if v_0.Op != OpLOONG64ADDVconst { 1769 break 1770 } 1771 off2 := auxIntToInt64(v_0.AuxInt) 1772 ptr := v_0.Args[0] 1773 mem := v_1 1774 if !(is32Bit(int64(off1) + off2)) { 1775 break 1776 } 1777 v.reset(OpLOONG64MOVBload) 1778 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 1779 v.Aux = symToAux(sym) 1780 v.AddArg2(ptr, mem) 1781 return true 1782 } 1783 // match: (MOVBload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 1784 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 1785 // result: (MOVBload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 1786 for { 1787 off1 := auxIntToInt32(v.AuxInt) 1788 sym1 := auxToSym(v.Aux) 1789 if v_0.Op != OpLOONG64MOVVaddr { 1790 break 1791 } 1792 off2 := auxIntToInt32(v_0.AuxInt) 1793 sym2 := auxToSym(v_0.Aux) 1794 ptr := v_0.Args[0] 1795 mem := v_1 1796 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 1797 break 1798 } 1799 v.reset(OpLOONG64MOVBload) 1800 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 1801 v.Aux = symToAux(mergeSym(sym1, sym2)) 1802 v.AddArg2(ptr, mem) 1803 return true 1804 } 1805 return false 1806 } 1807 func rewriteValueLOONG64_OpLOONG64MOVBreg(v *Value) bool { 1808 v_0 := v.Args[0] 1809 // match: (MOVBreg x:(MOVBload _ _)) 1810 // result: (MOVVreg x) 1811 for { 1812 x := v_0 1813 if x.Op != OpLOONG64MOVBload { 1814 break 1815 } 1816 v.reset(OpLOONG64MOVVreg) 1817 v.AddArg(x) 1818 return true 1819 } 1820 // match: (MOVBreg x:(MOVBreg _)) 1821 // result: (MOVVreg x) 1822 for { 1823 x := v_0 1824 if x.Op != OpLOONG64MOVBreg { 1825 break 1826 } 1827 v.reset(OpLOONG64MOVVreg) 1828 v.AddArg(x) 1829 return true 1830 } 1831 // match: (MOVBreg (MOVVconst [c])) 1832 // result: (MOVVconst [int64(int8(c))]) 1833 for { 1834 if v_0.Op != OpLOONG64MOVVconst { 1835 break 1836 } 1837 c := auxIntToInt64(v_0.AuxInt) 1838 v.reset(OpLOONG64MOVVconst) 1839 v.AuxInt = int64ToAuxInt(int64(int8(c))) 1840 return true 1841 } 1842 return false 1843 } 1844 func rewriteValueLOONG64_OpLOONG64MOVBstore(v *Value) bool { 1845 v_2 := v.Args[2] 1846 v_1 := v.Args[1] 1847 v_0 := v.Args[0] 1848 // match: (MOVBstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 1849 // cond: is32Bit(int64(off1)+off2) 1850 // result: (MOVBstore [off1+int32(off2)] {sym} ptr val mem) 1851 for { 1852 off1 := auxIntToInt32(v.AuxInt) 1853 sym := auxToSym(v.Aux) 1854 if v_0.Op != OpLOONG64ADDVconst { 1855 break 1856 } 1857 off2 := auxIntToInt64(v_0.AuxInt) 1858 ptr := v_0.Args[0] 1859 val := v_1 1860 mem := v_2 1861 if !(is32Bit(int64(off1) + off2)) { 1862 break 1863 } 1864 v.reset(OpLOONG64MOVBstore) 1865 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 1866 v.Aux = symToAux(sym) 1867 v.AddArg3(ptr, val, mem) 1868 return true 1869 } 1870 // match: (MOVBstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 1871 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 1872 // result: (MOVBstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem) 1873 for { 1874 off1 := auxIntToInt32(v.AuxInt) 1875 sym1 := auxToSym(v.Aux) 1876 if v_0.Op != OpLOONG64MOVVaddr { 1877 break 1878 } 1879 off2 := auxIntToInt32(v_0.AuxInt) 1880 sym2 := auxToSym(v_0.Aux) 1881 ptr := v_0.Args[0] 1882 val := v_1 1883 mem := v_2 1884 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 1885 break 1886 } 1887 v.reset(OpLOONG64MOVBstore) 1888 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 1889 v.Aux = symToAux(mergeSym(sym1, sym2)) 1890 v.AddArg3(ptr, val, mem) 1891 return true 1892 } 1893 // match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem) 1894 // result: (MOVBstore [off] {sym} ptr x mem) 1895 for { 1896 off := auxIntToInt32(v.AuxInt) 1897 sym := auxToSym(v.Aux) 1898 ptr := v_0 1899 if v_1.Op != OpLOONG64MOVBreg { 1900 break 1901 } 1902 x := v_1.Args[0] 1903 mem := v_2 1904 v.reset(OpLOONG64MOVBstore) 1905 v.AuxInt = int32ToAuxInt(off) 1906 v.Aux = symToAux(sym) 1907 v.AddArg3(ptr, x, mem) 1908 return true 1909 } 1910 // match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem) 1911 // result: (MOVBstore [off] {sym} ptr x mem) 1912 for { 1913 off := auxIntToInt32(v.AuxInt) 1914 sym := auxToSym(v.Aux) 1915 ptr := v_0 1916 if v_1.Op != OpLOONG64MOVBUreg { 1917 break 1918 } 1919 x := v_1.Args[0] 1920 mem := v_2 1921 v.reset(OpLOONG64MOVBstore) 1922 v.AuxInt = int32ToAuxInt(off) 1923 v.Aux = symToAux(sym) 1924 v.AddArg3(ptr, x, mem) 1925 return true 1926 } 1927 // match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem) 1928 // result: (MOVBstore [off] {sym} ptr x mem) 1929 for { 1930 off := auxIntToInt32(v.AuxInt) 1931 sym := auxToSym(v.Aux) 1932 ptr := v_0 1933 if v_1.Op != OpLOONG64MOVHreg { 1934 break 1935 } 1936 x := v_1.Args[0] 1937 mem := v_2 1938 v.reset(OpLOONG64MOVBstore) 1939 v.AuxInt = int32ToAuxInt(off) 1940 v.Aux = symToAux(sym) 1941 v.AddArg3(ptr, x, mem) 1942 return true 1943 } 1944 // match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem) 1945 // result: (MOVBstore [off] {sym} ptr x mem) 1946 for { 1947 off := auxIntToInt32(v.AuxInt) 1948 sym := auxToSym(v.Aux) 1949 ptr := v_0 1950 if v_1.Op != OpLOONG64MOVHUreg { 1951 break 1952 } 1953 x := v_1.Args[0] 1954 mem := v_2 1955 v.reset(OpLOONG64MOVBstore) 1956 v.AuxInt = int32ToAuxInt(off) 1957 v.Aux = symToAux(sym) 1958 v.AddArg3(ptr, x, mem) 1959 return true 1960 } 1961 // match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem) 1962 // result: (MOVBstore [off] {sym} ptr x mem) 1963 for { 1964 off := auxIntToInt32(v.AuxInt) 1965 sym := auxToSym(v.Aux) 1966 ptr := v_0 1967 if v_1.Op != OpLOONG64MOVWreg { 1968 break 1969 } 1970 x := v_1.Args[0] 1971 mem := v_2 1972 v.reset(OpLOONG64MOVBstore) 1973 v.AuxInt = int32ToAuxInt(off) 1974 v.Aux = symToAux(sym) 1975 v.AddArg3(ptr, x, mem) 1976 return true 1977 } 1978 // match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem) 1979 // result: (MOVBstore [off] {sym} ptr x mem) 1980 for { 1981 off := auxIntToInt32(v.AuxInt) 1982 sym := auxToSym(v.Aux) 1983 ptr := v_0 1984 if v_1.Op != OpLOONG64MOVWUreg { 1985 break 1986 } 1987 x := v_1.Args[0] 1988 mem := v_2 1989 v.reset(OpLOONG64MOVBstore) 1990 v.AuxInt = int32ToAuxInt(off) 1991 v.Aux = symToAux(sym) 1992 v.AddArg3(ptr, x, mem) 1993 return true 1994 } 1995 return false 1996 } 1997 func rewriteValueLOONG64_OpLOONG64MOVBstorezero(v *Value) bool { 1998 v_1 := v.Args[1] 1999 v_0 := v.Args[0] 2000 // match: (MOVBstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 2001 // cond: is32Bit(int64(off1)+off2) 2002 // result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem) 2003 for { 2004 off1 := auxIntToInt32(v.AuxInt) 2005 sym := auxToSym(v.Aux) 2006 if v_0.Op != OpLOONG64ADDVconst { 2007 break 2008 } 2009 off2 := auxIntToInt64(v_0.AuxInt) 2010 ptr := v_0.Args[0] 2011 mem := v_1 2012 if !(is32Bit(int64(off1) + off2)) { 2013 break 2014 } 2015 v.reset(OpLOONG64MOVBstorezero) 2016 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2017 v.Aux = symToAux(sym) 2018 v.AddArg2(ptr, mem) 2019 return true 2020 } 2021 // match: (MOVBstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2022 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2023 // result: (MOVBstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2024 for { 2025 off1 := auxIntToInt32(v.AuxInt) 2026 sym1 := auxToSym(v.Aux) 2027 if v_0.Op != OpLOONG64MOVVaddr { 2028 break 2029 } 2030 off2 := auxIntToInt32(v_0.AuxInt) 2031 sym2 := auxToSym(v_0.Aux) 2032 ptr := v_0.Args[0] 2033 mem := v_1 2034 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2035 break 2036 } 2037 v.reset(OpLOONG64MOVBstorezero) 2038 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2039 v.Aux = symToAux(mergeSym(sym1, sym2)) 2040 v.AddArg2(ptr, mem) 2041 return true 2042 } 2043 return false 2044 } 2045 func rewriteValueLOONG64_OpLOONG64MOVDload(v *Value) bool { 2046 v_1 := v.Args[1] 2047 v_0 := v.Args[0] 2048 // match: (MOVDload [off1] {sym} (ADDVconst [off2] ptr) mem) 2049 // cond: is32Bit(int64(off1)+off2) 2050 // result: (MOVDload [off1+int32(off2)] {sym} ptr mem) 2051 for { 2052 off1 := auxIntToInt32(v.AuxInt) 2053 sym := auxToSym(v.Aux) 2054 if v_0.Op != OpLOONG64ADDVconst { 2055 break 2056 } 2057 off2 := auxIntToInt64(v_0.AuxInt) 2058 ptr := v_0.Args[0] 2059 mem := v_1 2060 if !(is32Bit(int64(off1) + off2)) { 2061 break 2062 } 2063 v.reset(OpLOONG64MOVDload) 2064 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2065 v.Aux = symToAux(sym) 2066 v.AddArg2(ptr, mem) 2067 return true 2068 } 2069 // match: (MOVDload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2070 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2071 // result: (MOVDload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2072 for { 2073 off1 := auxIntToInt32(v.AuxInt) 2074 sym1 := auxToSym(v.Aux) 2075 if v_0.Op != OpLOONG64MOVVaddr { 2076 break 2077 } 2078 off2 := auxIntToInt32(v_0.AuxInt) 2079 sym2 := auxToSym(v_0.Aux) 2080 ptr := v_0.Args[0] 2081 mem := v_1 2082 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2083 break 2084 } 2085 v.reset(OpLOONG64MOVDload) 2086 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2087 v.Aux = symToAux(mergeSym(sym1, sym2)) 2088 v.AddArg2(ptr, mem) 2089 return true 2090 } 2091 return false 2092 } 2093 func rewriteValueLOONG64_OpLOONG64MOVDstore(v *Value) bool { 2094 v_2 := v.Args[2] 2095 v_1 := v.Args[1] 2096 v_0 := v.Args[0] 2097 // match: (MOVDstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 2098 // cond: is32Bit(int64(off1)+off2) 2099 // result: (MOVDstore [off1+int32(off2)] {sym} ptr val mem) 2100 for { 2101 off1 := auxIntToInt32(v.AuxInt) 2102 sym := auxToSym(v.Aux) 2103 if v_0.Op != OpLOONG64ADDVconst { 2104 break 2105 } 2106 off2 := auxIntToInt64(v_0.AuxInt) 2107 ptr := v_0.Args[0] 2108 val := v_1 2109 mem := v_2 2110 if !(is32Bit(int64(off1) + off2)) { 2111 break 2112 } 2113 v.reset(OpLOONG64MOVDstore) 2114 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2115 v.Aux = symToAux(sym) 2116 v.AddArg3(ptr, val, mem) 2117 return true 2118 } 2119 // match: (MOVDstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 2120 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2121 // result: (MOVDstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem) 2122 for { 2123 off1 := auxIntToInt32(v.AuxInt) 2124 sym1 := auxToSym(v.Aux) 2125 if v_0.Op != OpLOONG64MOVVaddr { 2126 break 2127 } 2128 off2 := auxIntToInt32(v_0.AuxInt) 2129 sym2 := auxToSym(v_0.Aux) 2130 ptr := v_0.Args[0] 2131 val := v_1 2132 mem := v_2 2133 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2134 break 2135 } 2136 v.reset(OpLOONG64MOVDstore) 2137 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2138 v.Aux = symToAux(mergeSym(sym1, sym2)) 2139 v.AddArg3(ptr, val, mem) 2140 return true 2141 } 2142 return false 2143 } 2144 func rewriteValueLOONG64_OpLOONG64MOVFload(v *Value) bool { 2145 v_1 := v.Args[1] 2146 v_0 := v.Args[0] 2147 // match: (MOVFload [off1] {sym} (ADDVconst [off2] ptr) mem) 2148 // cond: is32Bit(int64(off1)+off2) 2149 // result: (MOVFload [off1+int32(off2)] {sym} ptr mem) 2150 for { 2151 off1 := auxIntToInt32(v.AuxInt) 2152 sym := auxToSym(v.Aux) 2153 if v_0.Op != OpLOONG64ADDVconst { 2154 break 2155 } 2156 off2 := auxIntToInt64(v_0.AuxInt) 2157 ptr := v_0.Args[0] 2158 mem := v_1 2159 if !(is32Bit(int64(off1) + off2)) { 2160 break 2161 } 2162 v.reset(OpLOONG64MOVFload) 2163 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2164 v.Aux = symToAux(sym) 2165 v.AddArg2(ptr, mem) 2166 return true 2167 } 2168 // match: (MOVFload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2169 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2170 // result: (MOVFload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2171 for { 2172 off1 := auxIntToInt32(v.AuxInt) 2173 sym1 := auxToSym(v.Aux) 2174 if v_0.Op != OpLOONG64MOVVaddr { 2175 break 2176 } 2177 off2 := auxIntToInt32(v_0.AuxInt) 2178 sym2 := auxToSym(v_0.Aux) 2179 ptr := v_0.Args[0] 2180 mem := v_1 2181 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2182 break 2183 } 2184 v.reset(OpLOONG64MOVFload) 2185 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2186 v.Aux = symToAux(mergeSym(sym1, sym2)) 2187 v.AddArg2(ptr, mem) 2188 return true 2189 } 2190 return false 2191 } 2192 func rewriteValueLOONG64_OpLOONG64MOVFstore(v *Value) bool { 2193 v_2 := v.Args[2] 2194 v_1 := v.Args[1] 2195 v_0 := v.Args[0] 2196 // match: (MOVFstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 2197 // cond: is32Bit(int64(off1)+off2) 2198 // result: (MOVFstore [off1+int32(off2)] {sym} ptr val mem) 2199 for { 2200 off1 := auxIntToInt32(v.AuxInt) 2201 sym := auxToSym(v.Aux) 2202 if v_0.Op != OpLOONG64ADDVconst { 2203 break 2204 } 2205 off2 := auxIntToInt64(v_0.AuxInt) 2206 ptr := v_0.Args[0] 2207 val := v_1 2208 mem := v_2 2209 if !(is32Bit(int64(off1) + off2)) { 2210 break 2211 } 2212 v.reset(OpLOONG64MOVFstore) 2213 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2214 v.Aux = symToAux(sym) 2215 v.AddArg3(ptr, val, mem) 2216 return true 2217 } 2218 // match: (MOVFstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 2219 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2220 // result: (MOVFstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem) 2221 for { 2222 off1 := auxIntToInt32(v.AuxInt) 2223 sym1 := auxToSym(v.Aux) 2224 if v_0.Op != OpLOONG64MOVVaddr { 2225 break 2226 } 2227 off2 := auxIntToInt32(v_0.AuxInt) 2228 sym2 := auxToSym(v_0.Aux) 2229 ptr := v_0.Args[0] 2230 val := v_1 2231 mem := v_2 2232 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2233 break 2234 } 2235 v.reset(OpLOONG64MOVFstore) 2236 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2237 v.Aux = symToAux(mergeSym(sym1, sym2)) 2238 v.AddArg3(ptr, val, mem) 2239 return true 2240 } 2241 return false 2242 } 2243 func rewriteValueLOONG64_OpLOONG64MOVHUload(v *Value) bool { 2244 v_1 := v.Args[1] 2245 v_0 := v.Args[0] 2246 // match: (MOVHUload [off1] {sym} (ADDVconst [off2] ptr) mem) 2247 // cond: is32Bit(int64(off1)+off2) 2248 // result: (MOVHUload [off1+int32(off2)] {sym} ptr mem) 2249 for { 2250 off1 := auxIntToInt32(v.AuxInt) 2251 sym := auxToSym(v.Aux) 2252 if v_0.Op != OpLOONG64ADDVconst { 2253 break 2254 } 2255 off2 := auxIntToInt64(v_0.AuxInt) 2256 ptr := v_0.Args[0] 2257 mem := v_1 2258 if !(is32Bit(int64(off1) + off2)) { 2259 break 2260 } 2261 v.reset(OpLOONG64MOVHUload) 2262 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2263 v.Aux = symToAux(sym) 2264 v.AddArg2(ptr, mem) 2265 return true 2266 } 2267 // match: (MOVHUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2268 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2269 // result: (MOVHUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2270 for { 2271 off1 := auxIntToInt32(v.AuxInt) 2272 sym1 := auxToSym(v.Aux) 2273 if v_0.Op != OpLOONG64MOVVaddr { 2274 break 2275 } 2276 off2 := auxIntToInt32(v_0.AuxInt) 2277 sym2 := auxToSym(v_0.Aux) 2278 ptr := v_0.Args[0] 2279 mem := v_1 2280 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2281 break 2282 } 2283 v.reset(OpLOONG64MOVHUload) 2284 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2285 v.Aux = symToAux(mergeSym(sym1, sym2)) 2286 v.AddArg2(ptr, mem) 2287 return true 2288 } 2289 return false 2290 } 2291 func rewriteValueLOONG64_OpLOONG64MOVHUreg(v *Value) bool { 2292 v_0 := v.Args[0] 2293 // match: (MOVHUreg x:(MOVBUload _ _)) 2294 // result: (MOVVreg x) 2295 for { 2296 x := v_0 2297 if x.Op != OpLOONG64MOVBUload { 2298 break 2299 } 2300 v.reset(OpLOONG64MOVVreg) 2301 v.AddArg(x) 2302 return true 2303 } 2304 // match: (MOVHUreg x:(MOVHUload _ _)) 2305 // result: (MOVVreg x) 2306 for { 2307 x := v_0 2308 if x.Op != OpLOONG64MOVHUload { 2309 break 2310 } 2311 v.reset(OpLOONG64MOVVreg) 2312 v.AddArg(x) 2313 return true 2314 } 2315 // match: (MOVHUreg x:(MOVBUreg _)) 2316 // result: (MOVVreg x) 2317 for { 2318 x := v_0 2319 if x.Op != OpLOONG64MOVBUreg { 2320 break 2321 } 2322 v.reset(OpLOONG64MOVVreg) 2323 v.AddArg(x) 2324 return true 2325 } 2326 // match: (MOVHUreg x:(MOVHUreg _)) 2327 // result: (MOVVreg x) 2328 for { 2329 x := v_0 2330 if x.Op != OpLOONG64MOVHUreg { 2331 break 2332 } 2333 v.reset(OpLOONG64MOVVreg) 2334 v.AddArg(x) 2335 return true 2336 } 2337 // match: (MOVHUreg (MOVVconst [c])) 2338 // result: (MOVVconst [int64(uint16(c))]) 2339 for { 2340 if v_0.Op != OpLOONG64MOVVconst { 2341 break 2342 } 2343 c := auxIntToInt64(v_0.AuxInt) 2344 v.reset(OpLOONG64MOVVconst) 2345 v.AuxInt = int64ToAuxInt(int64(uint16(c))) 2346 return true 2347 } 2348 return false 2349 } 2350 func rewriteValueLOONG64_OpLOONG64MOVHload(v *Value) bool { 2351 v_1 := v.Args[1] 2352 v_0 := v.Args[0] 2353 // match: (MOVHload [off1] {sym} (ADDVconst [off2] ptr) mem) 2354 // cond: is32Bit(int64(off1)+off2) 2355 // result: (MOVHload [off1+int32(off2)] {sym} ptr mem) 2356 for { 2357 off1 := auxIntToInt32(v.AuxInt) 2358 sym := auxToSym(v.Aux) 2359 if v_0.Op != OpLOONG64ADDVconst { 2360 break 2361 } 2362 off2 := auxIntToInt64(v_0.AuxInt) 2363 ptr := v_0.Args[0] 2364 mem := v_1 2365 if !(is32Bit(int64(off1) + off2)) { 2366 break 2367 } 2368 v.reset(OpLOONG64MOVHload) 2369 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2370 v.Aux = symToAux(sym) 2371 v.AddArg2(ptr, mem) 2372 return true 2373 } 2374 // match: (MOVHload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2375 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2376 // result: (MOVHload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2377 for { 2378 off1 := auxIntToInt32(v.AuxInt) 2379 sym1 := auxToSym(v.Aux) 2380 if v_0.Op != OpLOONG64MOVVaddr { 2381 break 2382 } 2383 off2 := auxIntToInt32(v_0.AuxInt) 2384 sym2 := auxToSym(v_0.Aux) 2385 ptr := v_0.Args[0] 2386 mem := v_1 2387 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2388 break 2389 } 2390 v.reset(OpLOONG64MOVHload) 2391 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2392 v.Aux = symToAux(mergeSym(sym1, sym2)) 2393 v.AddArg2(ptr, mem) 2394 return true 2395 } 2396 return false 2397 } 2398 func rewriteValueLOONG64_OpLOONG64MOVHreg(v *Value) bool { 2399 v_0 := v.Args[0] 2400 // match: (MOVHreg x:(MOVBload _ _)) 2401 // result: (MOVVreg x) 2402 for { 2403 x := v_0 2404 if x.Op != OpLOONG64MOVBload { 2405 break 2406 } 2407 v.reset(OpLOONG64MOVVreg) 2408 v.AddArg(x) 2409 return true 2410 } 2411 // match: (MOVHreg x:(MOVBUload _ _)) 2412 // result: (MOVVreg x) 2413 for { 2414 x := v_0 2415 if x.Op != OpLOONG64MOVBUload { 2416 break 2417 } 2418 v.reset(OpLOONG64MOVVreg) 2419 v.AddArg(x) 2420 return true 2421 } 2422 // match: (MOVHreg x:(MOVHload _ _)) 2423 // result: (MOVVreg x) 2424 for { 2425 x := v_0 2426 if x.Op != OpLOONG64MOVHload { 2427 break 2428 } 2429 v.reset(OpLOONG64MOVVreg) 2430 v.AddArg(x) 2431 return true 2432 } 2433 // match: (MOVHreg x:(MOVBreg _)) 2434 // result: (MOVVreg x) 2435 for { 2436 x := v_0 2437 if x.Op != OpLOONG64MOVBreg { 2438 break 2439 } 2440 v.reset(OpLOONG64MOVVreg) 2441 v.AddArg(x) 2442 return true 2443 } 2444 // match: (MOVHreg x:(MOVBUreg _)) 2445 // result: (MOVVreg x) 2446 for { 2447 x := v_0 2448 if x.Op != OpLOONG64MOVBUreg { 2449 break 2450 } 2451 v.reset(OpLOONG64MOVVreg) 2452 v.AddArg(x) 2453 return true 2454 } 2455 // match: (MOVHreg x:(MOVHreg _)) 2456 // result: (MOVVreg x) 2457 for { 2458 x := v_0 2459 if x.Op != OpLOONG64MOVHreg { 2460 break 2461 } 2462 v.reset(OpLOONG64MOVVreg) 2463 v.AddArg(x) 2464 return true 2465 } 2466 // match: (MOVHreg (MOVVconst [c])) 2467 // result: (MOVVconst [int64(int16(c))]) 2468 for { 2469 if v_0.Op != OpLOONG64MOVVconst { 2470 break 2471 } 2472 c := auxIntToInt64(v_0.AuxInt) 2473 v.reset(OpLOONG64MOVVconst) 2474 v.AuxInt = int64ToAuxInt(int64(int16(c))) 2475 return true 2476 } 2477 return false 2478 } 2479 func rewriteValueLOONG64_OpLOONG64MOVHstore(v *Value) bool { 2480 v_2 := v.Args[2] 2481 v_1 := v.Args[1] 2482 v_0 := v.Args[0] 2483 // match: (MOVHstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 2484 // cond: is32Bit(int64(off1)+off2) 2485 // result: (MOVHstore [off1+int32(off2)] {sym} ptr val mem) 2486 for { 2487 off1 := auxIntToInt32(v.AuxInt) 2488 sym := auxToSym(v.Aux) 2489 if v_0.Op != OpLOONG64ADDVconst { 2490 break 2491 } 2492 off2 := auxIntToInt64(v_0.AuxInt) 2493 ptr := v_0.Args[0] 2494 val := v_1 2495 mem := v_2 2496 if !(is32Bit(int64(off1) + off2)) { 2497 break 2498 } 2499 v.reset(OpLOONG64MOVHstore) 2500 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2501 v.Aux = symToAux(sym) 2502 v.AddArg3(ptr, val, mem) 2503 return true 2504 } 2505 // match: (MOVHstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 2506 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2507 // result: (MOVHstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem) 2508 for { 2509 off1 := auxIntToInt32(v.AuxInt) 2510 sym1 := auxToSym(v.Aux) 2511 if v_0.Op != OpLOONG64MOVVaddr { 2512 break 2513 } 2514 off2 := auxIntToInt32(v_0.AuxInt) 2515 sym2 := auxToSym(v_0.Aux) 2516 ptr := v_0.Args[0] 2517 val := v_1 2518 mem := v_2 2519 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2520 break 2521 } 2522 v.reset(OpLOONG64MOVHstore) 2523 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2524 v.Aux = symToAux(mergeSym(sym1, sym2)) 2525 v.AddArg3(ptr, val, mem) 2526 return true 2527 } 2528 // match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem) 2529 // result: (MOVHstore [off] {sym} ptr x mem) 2530 for { 2531 off := auxIntToInt32(v.AuxInt) 2532 sym := auxToSym(v.Aux) 2533 ptr := v_0 2534 if v_1.Op != OpLOONG64MOVHreg { 2535 break 2536 } 2537 x := v_1.Args[0] 2538 mem := v_2 2539 v.reset(OpLOONG64MOVHstore) 2540 v.AuxInt = int32ToAuxInt(off) 2541 v.Aux = symToAux(sym) 2542 v.AddArg3(ptr, x, mem) 2543 return true 2544 } 2545 // match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem) 2546 // result: (MOVHstore [off] {sym} ptr x mem) 2547 for { 2548 off := auxIntToInt32(v.AuxInt) 2549 sym := auxToSym(v.Aux) 2550 ptr := v_0 2551 if v_1.Op != OpLOONG64MOVHUreg { 2552 break 2553 } 2554 x := v_1.Args[0] 2555 mem := v_2 2556 v.reset(OpLOONG64MOVHstore) 2557 v.AuxInt = int32ToAuxInt(off) 2558 v.Aux = symToAux(sym) 2559 v.AddArg3(ptr, x, mem) 2560 return true 2561 } 2562 // match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem) 2563 // result: (MOVHstore [off] {sym} ptr x mem) 2564 for { 2565 off := auxIntToInt32(v.AuxInt) 2566 sym := auxToSym(v.Aux) 2567 ptr := v_0 2568 if v_1.Op != OpLOONG64MOVWreg { 2569 break 2570 } 2571 x := v_1.Args[0] 2572 mem := v_2 2573 v.reset(OpLOONG64MOVHstore) 2574 v.AuxInt = int32ToAuxInt(off) 2575 v.Aux = symToAux(sym) 2576 v.AddArg3(ptr, x, mem) 2577 return true 2578 } 2579 // match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem) 2580 // result: (MOVHstore [off] {sym} ptr x mem) 2581 for { 2582 off := auxIntToInt32(v.AuxInt) 2583 sym := auxToSym(v.Aux) 2584 ptr := v_0 2585 if v_1.Op != OpLOONG64MOVWUreg { 2586 break 2587 } 2588 x := v_1.Args[0] 2589 mem := v_2 2590 v.reset(OpLOONG64MOVHstore) 2591 v.AuxInt = int32ToAuxInt(off) 2592 v.Aux = symToAux(sym) 2593 v.AddArg3(ptr, x, mem) 2594 return true 2595 } 2596 return false 2597 } 2598 func rewriteValueLOONG64_OpLOONG64MOVHstorezero(v *Value) bool { 2599 v_1 := v.Args[1] 2600 v_0 := v.Args[0] 2601 // match: (MOVHstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 2602 // cond: is32Bit(int64(off1)+off2) 2603 // result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem) 2604 for { 2605 off1 := auxIntToInt32(v.AuxInt) 2606 sym := auxToSym(v.Aux) 2607 if v_0.Op != OpLOONG64ADDVconst { 2608 break 2609 } 2610 off2 := auxIntToInt64(v_0.AuxInt) 2611 ptr := v_0.Args[0] 2612 mem := v_1 2613 if !(is32Bit(int64(off1) + off2)) { 2614 break 2615 } 2616 v.reset(OpLOONG64MOVHstorezero) 2617 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2618 v.Aux = symToAux(sym) 2619 v.AddArg2(ptr, mem) 2620 return true 2621 } 2622 // match: (MOVHstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2623 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2624 // result: (MOVHstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2625 for { 2626 off1 := auxIntToInt32(v.AuxInt) 2627 sym1 := auxToSym(v.Aux) 2628 if v_0.Op != OpLOONG64MOVVaddr { 2629 break 2630 } 2631 off2 := auxIntToInt32(v_0.AuxInt) 2632 sym2 := auxToSym(v_0.Aux) 2633 ptr := v_0.Args[0] 2634 mem := v_1 2635 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2636 break 2637 } 2638 v.reset(OpLOONG64MOVHstorezero) 2639 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2640 v.Aux = symToAux(mergeSym(sym1, sym2)) 2641 v.AddArg2(ptr, mem) 2642 return true 2643 } 2644 return false 2645 } 2646 func rewriteValueLOONG64_OpLOONG64MOVVload(v *Value) bool { 2647 v_1 := v.Args[1] 2648 v_0 := v.Args[0] 2649 // match: (MOVVload [off1] {sym} (ADDVconst [off2] ptr) mem) 2650 // cond: is32Bit(int64(off1)+off2) 2651 // result: (MOVVload [off1+int32(off2)] {sym} ptr mem) 2652 for { 2653 off1 := auxIntToInt32(v.AuxInt) 2654 sym := auxToSym(v.Aux) 2655 if v_0.Op != OpLOONG64ADDVconst { 2656 break 2657 } 2658 off2 := auxIntToInt64(v_0.AuxInt) 2659 ptr := v_0.Args[0] 2660 mem := v_1 2661 if !(is32Bit(int64(off1) + off2)) { 2662 break 2663 } 2664 v.reset(OpLOONG64MOVVload) 2665 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2666 v.Aux = symToAux(sym) 2667 v.AddArg2(ptr, mem) 2668 return true 2669 } 2670 // match: (MOVVload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2671 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2672 // result: (MOVVload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2673 for { 2674 off1 := auxIntToInt32(v.AuxInt) 2675 sym1 := auxToSym(v.Aux) 2676 if v_0.Op != OpLOONG64MOVVaddr { 2677 break 2678 } 2679 off2 := auxIntToInt32(v_0.AuxInt) 2680 sym2 := auxToSym(v_0.Aux) 2681 ptr := v_0.Args[0] 2682 mem := v_1 2683 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2684 break 2685 } 2686 v.reset(OpLOONG64MOVVload) 2687 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2688 v.Aux = symToAux(mergeSym(sym1, sym2)) 2689 v.AddArg2(ptr, mem) 2690 return true 2691 } 2692 return false 2693 } 2694 func rewriteValueLOONG64_OpLOONG64MOVVreg(v *Value) bool { 2695 v_0 := v.Args[0] 2696 // match: (MOVVreg x) 2697 // cond: x.Uses == 1 2698 // result: (MOVVnop x) 2699 for { 2700 x := v_0 2701 if !(x.Uses == 1) { 2702 break 2703 } 2704 v.reset(OpLOONG64MOVVnop) 2705 v.AddArg(x) 2706 return true 2707 } 2708 // match: (MOVVreg (MOVVconst [c])) 2709 // result: (MOVVconst [c]) 2710 for { 2711 if v_0.Op != OpLOONG64MOVVconst { 2712 break 2713 } 2714 c := auxIntToInt64(v_0.AuxInt) 2715 v.reset(OpLOONG64MOVVconst) 2716 v.AuxInt = int64ToAuxInt(c) 2717 return true 2718 } 2719 return false 2720 } 2721 func rewriteValueLOONG64_OpLOONG64MOVVstore(v *Value) bool { 2722 v_2 := v.Args[2] 2723 v_1 := v.Args[1] 2724 v_0 := v.Args[0] 2725 // match: (MOVVstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 2726 // cond: is32Bit(int64(off1)+off2) 2727 // result: (MOVVstore [off1+int32(off2)] {sym} ptr val mem) 2728 for { 2729 off1 := auxIntToInt32(v.AuxInt) 2730 sym := auxToSym(v.Aux) 2731 if v_0.Op != OpLOONG64ADDVconst { 2732 break 2733 } 2734 off2 := auxIntToInt64(v_0.AuxInt) 2735 ptr := v_0.Args[0] 2736 val := v_1 2737 mem := v_2 2738 if !(is32Bit(int64(off1) + off2)) { 2739 break 2740 } 2741 v.reset(OpLOONG64MOVVstore) 2742 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2743 v.Aux = symToAux(sym) 2744 v.AddArg3(ptr, val, mem) 2745 return true 2746 } 2747 // match: (MOVVstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 2748 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2749 // result: (MOVVstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem) 2750 for { 2751 off1 := auxIntToInt32(v.AuxInt) 2752 sym1 := auxToSym(v.Aux) 2753 if v_0.Op != OpLOONG64MOVVaddr { 2754 break 2755 } 2756 off2 := auxIntToInt32(v_0.AuxInt) 2757 sym2 := auxToSym(v_0.Aux) 2758 ptr := v_0.Args[0] 2759 val := v_1 2760 mem := v_2 2761 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2762 break 2763 } 2764 v.reset(OpLOONG64MOVVstore) 2765 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2766 v.Aux = symToAux(mergeSym(sym1, sym2)) 2767 v.AddArg3(ptr, val, mem) 2768 return true 2769 } 2770 return false 2771 } 2772 func rewriteValueLOONG64_OpLOONG64MOVVstorezero(v *Value) bool { 2773 v_1 := v.Args[1] 2774 v_0 := v.Args[0] 2775 // match: (MOVVstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 2776 // cond: is32Bit(int64(off1)+off2) 2777 // result: (MOVVstorezero [off1+int32(off2)] {sym} ptr mem) 2778 for { 2779 off1 := auxIntToInt32(v.AuxInt) 2780 sym := auxToSym(v.Aux) 2781 if v_0.Op != OpLOONG64ADDVconst { 2782 break 2783 } 2784 off2 := auxIntToInt64(v_0.AuxInt) 2785 ptr := v_0.Args[0] 2786 mem := v_1 2787 if !(is32Bit(int64(off1) + off2)) { 2788 break 2789 } 2790 v.reset(OpLOONG64MOVVstorezero) 2791 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2792 v.Aux = symToAux(sym) 2793 v.AddArg2(ptr, mem) 2794 return true 2795 } 2796 // match: (MOVVstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2797 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2798 // result: (MOVVstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2799 for { 2800 off1 := auxIntToInt32(v.AuxInt) 2801 sym1 := auxToSym(v.Aux) 2802 if v_0.Op != OpLOONG64MOVVaddr { 2803 break 2804 } 2805 off2 := auxIntToInt32(v_0.AuxInt) 2806 sym2 := auxToSym(v_0.Aux) 2807 ptr := v_0.Args[0] 2808 mem := v_1 2809 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2810 break 2811 } 2812 v.reset(OpLOONG64MOVVstorezero) 2813 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2814 v.Aux = symToAux(mergeSym(sym1, sym2)) 2815 v.AddArg2(ptr, mem) 2816 return true 2817 } 2818 return false 2819 } 2820 func rewriteValueLOONG64_OpLOONG64MOVWUload(v *Value) bool { 2821 v_1 := v.Args[1] 2822 v_0 := v.Args[0] 2823 // match: (MOVWUload [off1] {sym} (ADDVconst [off2] ptr) mem) 2824 // cond: is32Bit(int64(off1)+off2) 2825 // result: (MOVWUload [off1+int32(off2)] {sym} ptr mem) 2826 for { 2827 off1 := auxIntToInt32(v.AuxInt) 2828 sym := auxToSym(v.Aux) 2829 if v_0.Op != OpLOONG64ADDVconst { 2830 break 2831 } 2832 off2 := auxIntToInt64(v_0.AuxInt) 2833 ptr := v_0.Args[0] 2834 mem := v_1 2835 if !(is32Bit(int64(off1) + off2)) { 2836 break 2837 } 2838 v.reset(OpLOONG64MOVWUload) 2839 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2840 v.Aux = symToAux(sym) 2841 v.AddArg2(ptr, mem) 2842 return true 2843 } 2844 // match: (MOVWUload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2845 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2846 // result: (MOVWUload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2847 for { 2848 off1 := auxIntToInt32(v.AuxInt) 2849 sym1 := auxToSym(v.Aux) 2850 if v_0.Op != OpLOONG64MOVVaddr { 2851 break 2852 } 2853 off2 := auxIntToInt32(v_0.AuxInt) 2854 sym2 := auxToSym(v_0.Aux) 2855 ptr := v_0.Args[0] 2856 mem := v_1 2857 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2858 break 2859 } 2860 v.reset(OpLOONG64MOVWUload) 2861 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2862 v.Aux = symToAux(mergeSym(sym1, sym2)) 2863 v.AddArg2(ptr, mem) 2864 return true 2865 } 2866 return false 2867 } 2868 func rewriteValueLOONG64_OpLOONG64MOVWUreg(v *Value) bool { 2869 v_0 := v.Args[0] 2870 // match: (MOVWUreg x:(MOVBUload _ _)) 2871 // result: (MOVVreg x) 2872 for { 2873 x := v_0 2874 if x.Op != OpLOONG64MOVBUload { 2875 break 2876 } 2877 v.reset(OpLOONG64MOVVreg) 2878 v.AddArg(x) 2879 return true 2880 } 2881 // match: (MOVWUreg x:(MOVHUload _ _)) 2882 // result: (MOVVreg x) 2883 for { 2884 x := v_0 2885 if x.Op != OpLOONG64MOVHUload { 2886 break 2887 } 2888 v.reset(OpLOONG64MOVVreg) 2889 v.AddArg(x) 2890 return true 2891 } 2892 // match: (MOVWUreg x:(MOVWUload _ _)) 2893 // result: (MOVVreg x) 2894 for { 2895 x := v_0 2896 if x.Op != OpLOONG64MOVWUload { 2897 break 2898 } 2899 v.reset(OpLOONG64MOVVreg) 2900 v.AddArg(x) 2901 return true 2902 } 2903 // match: (MOVWUreg x:(MOVBUreg _)) 2904 // result: (MOVVreg x) 2905 for { 2906 x := v_0 2907 if x.Op != OpLOONG64MOVBUreg { 2908 break 2909 } 2910 v.reset(OpLOONG64MOVVreg) 2911 v.AddArg(x) 2912 return true 2913 } 2914 // match: (MOVWUreg x:(MOVHUreg _)) 2915 // result: (MOVVreg x) 2916 for { 2917 x := v_0 2918 if x.Op != OpLOONG64MOVHUreg { 2919 break 2920 } 2921 v.reset(OpLOONG64MOVVreg) 2922 v.AddArg(x) 2923 return true 2924 } 2925 // match: (MOVWUreg x:(MOVWUreg _)) 2926 // result: (MOVVreg x) 2927 for { 2928 x := v_0 2929 if x.Op != OpLOONG64MOVWUreg { 2930 break 2931 } 2932 v.reset(OpLOONG64MOVVreg) 2933 v.AddArg(x) 2934 return true 2935 } 2936 // match: (MOVWUreg (MOVVconst [c])) 2937 // result: (MOVVconst [int64(uint32(c))]) 2938 for { 2939 if v_0.Op != OpLOONG64MOVVconst { 2940 break 2941 } 2942 c := auxIntToInt64(v_0.AuxInt) 2943 v.reset(OpLOONG64MOVVconst) 2944 v.AuxInt = int64ToAuxInt(int64(uint32(c))) 2945 return true 2946 } 2947 return false 2948 } 2949 func rewriteValueLOONG64_OpLOONG64MOVWload(v *Value) bool { 2950 v_1 := v.Args[1] 2951 v_0 := v.Args[0] 2952 // match: (MOVWload [off1] {sym} (ADDVconst [off2] ptr) mem) 2953 // cond: is32Bit(int64(off1)+off2) 2954 // result: (MOVWload [off1+int32(off2)] {sym} ptr mem) 2955 for { 2956 off1 := auxIntToInt32(v.AuxInt) 2957 sym := auxToSym(v.Aux) 2958 if v_0.Op != OpLOONG64ADDVconst { 2959 break 2960 } 2961 off2 := auxIntToInt64(v_0.AuxInt) 2962 ptr := v_0.Args[0] 2963 mem := v_1 2964 if !(is32Bit(int64(off1) + off2)) { 2965 break 2966 } 2967 v.reset(OpLOONG64MOVWload) 2968 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2969 v.Aux = symToAux(sym) 2970 v.AddArg2(ptr, mem) 2971 return true 2972 } 2973 // match: (MOVWload [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 2974 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 2975 // result: (MOVWload [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 2976 for { 2977 off1 := auxIntToInt32(v.AuxInt) 2978 sym1 := auxToSym(v.Aux) 2979 if v_0.Op != OpLOONG64MOVVaddr { 2980 break 2981 } 2982 off2 := auxIntToInt32(v_0.AuxInt) 2983 sym2 := auxToSym(v_0.Aux) 2984 ptr := v_0.Args[0] 2985 mem := v_1 2986 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 2987 break 2988 } 2989 v.reset(OpLOONG64MOVWload) 2990 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 2991 v.Aux = symToAux(mergeSym(sym1, sym2)) 2992 v.AddArg2(ptr, mem) 2993 return true 2994 } 2995 return false 2996 } 2997 func rewriteValueLOONG64_OpLOONG64MOVWreg(v *Value) bool { 2998 v_0 := v.Args[0] 2999 // match: (MOVWreg x:(MOVBload _ _)) 3000 // result: (MOVVreg x) 3001 for { 3002 x := v_0 3003 if x.Op != OpLOONG64MOVBload { 3004 break 3005 } 3006 v.reset(OpLOONG64MOVVreg) 3007 v.AddArg(x) 3008 return true 3009 } 3010 // match: (MOVWreg x:(MOVBUload _ _)) 3011 // result: (MOVVreg x) 3012 for { 3013 x := v_0 3014 if x.Op != OpLOONG64MOVBUload { 3015 break 3016 } 3017 v.reset(OpLOONG64MOVVreg) 3018 v.AddArg(x) 3019 return true 3020 } 3021 // match: (MOVWreg x:(MOVHload _ _)) 3022 // result: (MOVVreg x) 3023 for { 3024 x := v_0 3025 if x.Op != OpLOONG64MOVHload { 3026 break 3027 } 3028 v.reset(OpLOONG64MOVVreg) 3029 v.AddArg(x) 3030 return true 3031 } 3032 // match: (MOVWreg x:(MOVHUload _ _)) 3033 // result: (MOVVreg x) 3034 for { 3035 x := v_0 3036 if x.Op != OpLOONG64MOVHUload { 3037 break 3038 } 3039 v.reset(OpLOONG64MOVVreg) 3040 v.AddArg(x) 3041 return true 3042 } 3043 // match: (MOVWreg x:(MOVWload _ _)) 3044 // result: (MOVVreg x) 3045 for { 3046 x := v_0 3047 if x.Op != OpLOONG64MOVWload { 3048 break 3049 } 3050 v.reset(OpLOONG64MOVVreg) 3051 v.AddArg(x) 3052 return true 3053 } 3054 // match: (MOVWreg x:(MOVBreg _)) 3055 // result: (MOVVreg x) 3056 for { 3057 x := v_0 3058 if x.Op != OpLOONG64MOVBreg { 3059 break 3060 } 3061 v.reset(OpLOONG64MOVVreg) 3062 v.AddArg(x) 3063 return true 3064 } 3065 // match: (MOVWreg x:(MOVBUreg _)) 3066 // result: (MOVVreg x) 3067 for { 3068 x := v_0 3069 if x.Op != OpLOONG64MOVBUreg { 3070 break 3071 } 3072 v.reset(OpLOONG64MOVVreg) 3073 v.AddArg(x) 3074 return true 3075 } 3076 // match: (MOVWreg x:(MOVHreg _)) 3077 // result: (MOVVreg x) 3078 for { 3079 x := v_0 3080 if x.Op != OpLOONG64MOVHreg { 3081 break 3082 } 3083 v.reset(OpLOONG64MOVVreg) 3084 v.AddArg(x) 3085 return true 3086 } 3087 // match: (MOVWreg x:(MOVWreg _)) 3088 // result: (MOVVreg x) 3089 for { 3090 x := v_0 3091 if x.Op != OpLOONG64MOVWreg { 3092 break 3093 } 3094 v.reset(OpLOONG64MOVVreg) 3095 v.AddArg(x) 3096 return true 3097 } 3098 // match: (MOVWreg (MOVVconst [c])) 3099 // result: (MOVVconst [int64(int32(c))]) 3100 for { 3101 if v_0.Op != OpLOONG64MOVVconst { 3102 break 3103 } 3104 c := auxIntToInt64(v_0.AuxInt) 3105 v.reset(OpLOONG64MOVVconst) 3106 v.AuxInt = int64ToAuxInt(int64(int32(c))) 3107 return true 3108 } 3109 return false 3110 } 3111 func rewriteValueLOONG64_OpLOONG64MOVWstore(v *Value) bool { 3112 v_2 := v.Args[2] 3113 v_1 := v.Args[1] 3114 v_0 := v.Args[0] 3115 // match: (MOVWstore [off1] {sym} (ADDVconst [off2] ptr) val mem) 3116 // cond: is32Bit(int64(off1)+off2) 3117 // result: (MOVWstore [off1+int32(off2)] {sym} ptr val mem) 3118 for { 3119 off1 := auxIntToInt32(v.AuxInt) 3120 sym := auxToSym(v.Aux) 3121 if v_0.Op != OpLOONG64ADDVconst { 3122 break 3123 } 3124 off2 := auxIntToInt64(v_0.AuxInt) 3125 ptr := v_0.Args[0] 3126 val := v_1 3127 mem := v_2 3128 if !(is32Bit(int64(off1) + off2)) { 3129 break 3130 } 3131 v.reset(OpLOONG64MOVWstore) 3132 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 3133 v.Aux = symToAux(sym) 3134 v.AddArg3(ptr, val, mem) 3135 return true 3136 } 3137 // match: (MOVWstore [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) val mem) 3138 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 3139 // result: (MOVWstore [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr val mem) 3140 for { 3141 off1 := auxIntToInt32(v.AuxInt) 3142 sym1 := auxToSym(v.Aux) 3143 if v_0.Op != OpLOONG64MOVVaddr { 3144 break 3145 } 3146 off2 := auxIntToInt32(v_0.AuxInt) 3147 sym2 := auxToSym(v_0.Aux) 3148 ptr := v_0.Args[0] 3149 val := v_1 3150 mem := v_2 3151 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 3152 break 3153 } 3154 v.reset(OpLOONG64MOVWstore) 3155 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 3156 v.Aux = symToAux(mergeSym(sym1, sym2)) 3157 v.AddArg3(ptr, val, mem) 3158 return true 3159 } 3160 // match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem) 3161 // result: (MOVWstore [off] {sym} ptr x mem) 3162 for { 3163 off := auxIntToInt32(v.AuxInt) 3164 sym := auxToSym(v.Aux) 3165 ptr := v_0 3166 if v_1.Op != OpLOONG64MOVWreg { 3167 break 3168 } 3169 x := v_1.Args[0] 3170 mem := v_2 3171 v.reset(OpLOONG64MOVWstore) 3172 v.AuxInt = int32ToAuxInt(off) 3173 v.Aux = symToAux(sym) 3174 v.AddArg3(ptr, x, mem) 3175 return true 3176 } 3177 // match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem) 3178 // result: (MOVWstore [off] {sym} ptr x mem) 3179 for { 3180 off := auxIntToInt32(v.AuxInt) 3181 sym := auxToSym(v.Aux) 3182 ptr := v_0 3183 if v_1.Op != OpLOONG64MOVWUreg { 3184 break 3185 } 3186 x := v_1.Args[0] 3187 mem := v_2 3188 v.reset(OpLOONG64MOVWstore) 3189 v.AuxInt = int32ToAuxInt(off) 3190 v.Aux = symToAux(sym) 3191 v.AddArg3(ptr, x, mem) 3192 return true 3193 } 3194 return false 3195 } 3196 func rewriteValueLOONG64_OpLOONG64MOVWstorezero(v *Value) bool { 3197 v_1 := v.Args[1] 3198 v_0 := v.Args[0] 3199 // match: (MOVWstorezero [off1] {sym} (ADDVconst [off2] ptr) mem) 3200 // cond: is32Bit(int64(off1)+off2) 3201 // result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem) 3202 for { 3203 off1 := auxIntToInt32(v.AuxInt) 3204 sym := auxToSym(v.Aux) 3205 if v_0.Op != OpLOONG64ADDVconst { 3206 break 3207 } 3208 off2 := auxIntToInt64(v_0.AuxInt) 3209 ptr := v_0.Args[0] 3210 mem := v_1 3211 if !(is32Bit(int64(off1) + off2)) { 3212 break 3213 } 3214 v.reset(OpLOONG64MOVWstorezero) 3215 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 3216 v.Aux = symToAux(sym) 3217 v.AddArg2(ptr, mem) 3218 return true 3219 } 3220 // match: (MOVWstorezero [off1] {sym1} (MOVVaddr [off2] {sym2} ptr) mem) 3221 // cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2)) 3222 // result: (MOVWstorezero [off1+int32(off2)] {mergeSym(sym1,sym2)} ptr mem) 3223 for { 3224 off1 := auxIntToInt32(v.AuxInt) 3225 sym1 := auxToSym(v.Aux) 3226 if v_0.Op != OpLOONG64MOVVaddr { 3227 break 3228 } 3229 off2 := auxIntToInt32(v_0.AuxInt) 3230 sym2 := auxToSym(v_0.Aux) 3231 ptr := v_0.Args[0] 3232 mem := v_1 3233 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) { 3234 break 3235 } 3236 v.reset(OpLOONG64MOVWstorezero) 3237 v.AuxInt = int32ToAuxInt(off1 + int32(off2)) 3238 v.Aux = symToAux(mergeSym(sym1, sym2)) 3239 v.AddArg2(ptr, mem) 3240 return true 3241 } 3242 return false 3243 } 3244 func rewriteValueLOONG64_OpLOONG64NEGV(v *Value) bool { 3245 v_0 := v.Args[0] 3246 // match: (NEGV (MOVVconst [c])) 3247 // result: (MOVVconst [-c]) 3248 for { 3249 if v_0.Op != OpLOONG64MOVVconst { 3250 break 3251 } 3252 c := auxIntToInt64(v_0.AuxInt) 3253 v.reset(OpLOONG64MOVVconst) 3254 v.AuxInt = int64ToAuxInt(-c) 3255 return true 3256 } 3257 return false 3258 } 3259 func rewriteValueLOONG64_OpLOONG64NOR(v *Value) bool { 3260 v_1 := v.Args[1] 3261 v_0 := v.Args[0] 3262 // match: (NOR x (MOVVconst [c])) 3263 // cond: is32Bit(c) 3264 // result: (NORconst [c] x) 3265 for { 3266 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 3267 x := v_0 3268 if v_1.Op != OpLOONG64MOVVconst { 3269 continue 3270 } 3271 c := auxIntToInt64(v_1.AuxInt) 3272 if !(is32Bit(c)) { 3273 continue 3274 } 3275 v.reset(OpLOONG64NORconst) 3276 v.AuxInt = int64ToAuxInt(c) 3277 v.AddArg(x) 3278 return true 3279 } 3280 break 3281 } 3282 return false 3283 } 3284 func rewriteValueLOONG64_OpLOONG64NORconst(v *Value) bool { 3285 v_0 := v.Args[0] 3286 // match: (NORconst [c] (MOVVconst [d])) 3287 // result: (MOVVconst [^(c|d)]) 3288 for { 3289 c := auxIntToInt64(v.AuxInt) 3290 if v_0.Op != OpLOONG64MOVVconst { 3291 break 3292 } 3293 d := auxIntToInt64(v_0.AuxInt) 3294 v.reset(OpLOONG64MOVVconst) 3295 v.AuxInt = int64ToAuxInt(^(c | d)) 3296 return true 3297 } 3298 return false 3299 } 3300 func rewriteValueLOONG64_OpLOONG64OR(v *Value) bool { 3301 v_1 := v.Args[1] 3302 v_0 := v.Args[0] 3303 // match: (OR x (MOVVconst [c])) 3304 // cond: is32Bit(c) 3305 // result: (ORconst [c] x) 3306 for { 3307 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 3308 x := v_0 3309 if v_1.Op != OpLOONG64MOVVconst { 3310 continue 3311 } 3312 c := auxIntToInt64(v_1.AuxInt) 3313 if !(is32Bit(c)) { 3314 continue 3315 } 3316 v.reset(OpLOONG64ORconst) 3317 v.AuxInt = int64ToAuxInt(c) 3318 v.AddArg(x) 3319 return true 3320 } 3321 break 3322 } 3323 // match: (OR x x) 3324 // result: x 3325 for { 3326 x := v_0 3327 if x != v_1 { 3328 break 3329 } 3330 v.copyOf(x) 3331 return true 3332 } 3333 return false 3334 } 3335 func rewriteValueLOONG64_OpLOONG64ORconst(v *Value) bool { 3336 v_0 := v.Args[0] 3337 // match: (ORconst [0] x) 3338 // result: x 3339 for { 3340 if auxIntToInt64(v.AuxInt) != 0 { 3341 break 3342 } 3343 x := v_0 3344 v.copyOf(x) 3345 return true 3346 } 3347 // match: (ORconst [-1] _) 3348 // result: (MOVVconst [-1]) 3349 for { 3350 if auxIntToInt64(v.AuxInt) != -1 { 3351 break 3352 } 3353 v.reset(OpLOONG64MOVVconst) 3354 v.AuxInt = int64ToAuxInt(-1) 3355 return true 3356 } 3357 // match: (ORconst [c] (MOVVconst [d])) 3358 // result: (MOVVconst [c|d]) 3359 for { 3360 c := auxIntToInt64(v.AuxInt) 3361 if v_0.Op != OpLOONG64MOVVconst { 3362 break 3363 } 3364 d := auxIntToInt64(v_0.AuxInt) 3365 v.reset(OpLOONG64MOVVconst) 3366 v.AuxInt = int64ToAuxInt(c | d) 3367 return true 3368 } 3369 // match: (ORconst [c] (ORconst [d] x)) 3370 // cond: is32Bit(c|d) 3371 // result: (ORconst [c|d] x) 3372 for { 3373 c := auxIntToInt64(v.AuxInt) 3374 if v_0.Op != OpLOONG64ORconst { 3375 break 3376 } 3377 d := auxIntToInt64(v_0.AuxInt) 3378 x := v_0.Args[0] 3379 if !(is32Bit(c | d)) { 3380 break 3381 } 3382 v.reset(OpLOONG64ORconst) 3383 v.AuxInt = int64ToAuxInt(c | d) 3384 v.AddArg(x) 3385 return true 3386 } 3387 return false 3388 } 3389 func rewriteValueLOONG64_OpLOONG64ROTR(v *Value) bool { 3390 v_1 := v.Args[1] 3391 v_0 := v.Args[0] 3392 // match: (ROTR x (MOVVconst [c])) 3393 // result: (ROTRconst x [c&31]) 3394 for { 3395 x := v_0 3396 if v_1.Op != OpLOONG64MOVVconst { 3397 break 3398 } 3399 c := auxIntToInt64(v_1.AuxInt) 3400 v.reset(OpLOONG64ROTRconst) 3401 v.AuxInt = int64ToAuxInt(c & 31) 3402 v.AddArg(x) 3403 return true 3404 } 3405 return false 3406 } 3407 func rewriteValueLOONG64_OpLOONG64ROTRV(v *Value) bool { 3408 v_1 := v.Args[1] 3409 v_0 := v.Args[0] 3410 // match: (ROTRV x (MOVVconst [c])) 3411 // result: (ROTRVconst x [c&63]) 3412 for { 3413 x := v_0 3414 if v_1.Op != OpLOONG64MOVVconst { 3415 break 3416 } 3417 c := auxIntToInt64(v_1.AuxInt) 3418 v.reset(OpLOONG64ROTRVconst) 3419 v.AuxInt = int64ToAuxInt(c & 63) 3420 v.AddArg(x) 3421 return true 3422 } 3423 return false 3424 } 3425 func rewriteValueLOONG64_OpLOONG64SGT(v *Value) bool { 3426 v_1 := v.Args[1] 3427 v_0 := v.Args[0] 3428 // match: (SGT (MOVVconst [c]) x) 3429 // cond: is32Bit(c) 3430 // result: (SGTconst [c] x) 3431 for { 3432 if v_0.Op != OpLOONG64MOVVconst { 3433 break 3434 } 3435 c := auxIntToInt64(v_0.AuxInt) 3436 x := v_1 3437 if !(is32Bit(c)) { 3438 break 3439 } 3440 v.reset(OpLOONG64SGTconst) 3441 v.AuxInt = int64ToAuxInt(c) 3442 v.AddArg(x) 3443 return true 3444 } 3445 // match: (SGT x x) 3446 // result: (MOVVconst [0]) 3447 for { 3448 x := v_0 3449 if x != v_1 { 3450 break 3451 } 3452 v.reset(OpLOONG64MOVVconst) 3453 v.AuxInt = int64ToAuxInt(0) 3454 return true 3455 } 3456 return false 3457 } 3458 func rewriteValueLOONG64_OpLOONG64SGTU(v *Value) bool { 3459 v_1 := v.Args[1] 3460 v_0 := v.Args[0] 3461 // match: (SGTU (MOVVconst [c]) x) 3462 // cond: is32Bit(c) 3463 // result: (SGTUconst [c] x) 3464 for { 3465 if v_0.Op != OpLOONG64MOVVconst { 3466 break 3467 } 3468 c := auxIntToInt64(v_0.AuxInt) 3469 x := v_1 3470 if !(is32Bit(c)) { 3471 break 3472 } 3473 v.reset(OpLOONG64SGTUconst) 3474 v.AuxInt = int64ToAuxInt(c) 3475 v.AddArg(x) 3476 return true 3477 } 3478 // match: (SGTU x x) 3479 // result: (MOVVconst [0]) 3480 for { 3481 x := v_0 3482 if x != v_1 { 3483 break 3484 } 3485 v.reset(OpLOONG64MOVVconst) 3486 v.AuxInt = int64ToAuxInt(0) 3487 return true 3488 } 3489 return false 3490 } 3491 func rewriteValueLOONG64_OpLOONG64SGTUconst(v *Value) bool { 3492 v_0 := v.Args[0] 3493 // match: (SGTUconst [c] (MOVVconst [d])) 3494 // cond: uint64(c)>uint64(d) 3495 // result: (MOVVconst [1]) 3496 for { 3497 c := auxIntToInt64(v.AuxInt) 3498 if v_0.Op != OpLOONG64MOVVconst { 3499 break 3500 } 3501 d := auxIntToInt64(v_0.AuxInt) 3502 if !(uint64(c) > uint64(d)) { 3503 break 3504 } 3505 v.reset(OpLOONG64MOVVconst) 3506 v.AuxInt = int64ToAuxInt(1) 3507 return true 3508 } 3509 // match: (SGTUconst [c] (MOVVconst [d])) 3510 // cond: uint64(c)<=uint64(d) 3511 // result: (MOVVconst [0]) 3512 for { 3513 c := auxIntToInt64(v.AuxInt) 3514 if v_0.Op != OpLOONG64MOVVconst { 3515 break 3516 } 3517 d := auxIntToInt64(v_0.AuxInt) 3518 if !(uint64(c) <= uint64(d)) { 3519 break 3520 } 3521 v.reset(OpLOONG64MOVVconst) 3522 v.AuxInt = int64ToAuxInt(0) 3523 return true 3524 } 3525 // match: (SGTUconst [c] (MOVBUreg _)) 3526 // cond: 0xff < uint64(c) 3527 // result: (MOVVconst [1]) 3528 for { 3529 c := auxIntToInt64(v.AuxInt) 3530 if v_0.Op != OpLOONG64MOVBUreg || !(0xff < uint64(c)) { 3531 break 3532 } 3533 v.reset(OpLOONG64MOVVconst) 3534 v.AuxInt = int64ToAuxInt(1) 3535 return true 3536 } 3537 // match: (SGTUconst [c] (MOVHUreg _)) 3538 // cond: 0xffff < uint64(c) 3539 // result: (MOVVconst [1]) 3540 for { 3541 c := auxIntToInt64(v.AuxInt) 3542 if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < uint64(c)) { 3543 break 3544 } 3545 v.reset(OpLOONG64MOVVconst) 3546 v.AuxInt = int64ToAuxInt(1) 3547 return true 3548 } 3549 // match: (SGTUconst [c] (ANDconst [m] _)) 3550 // cond: uint64(m) < uint64(c) 3551 // result: (MOVVconst [1]) 3552 for { 3553 c := auxIntToInt64(v.AuxInt) 3554 if v_0.Op != OpLOONG64ANDconst { 3555 break 3556 } 3557 m := auxIntToInt64(v_0.AuxInt) 3558 if !(uint64(m) < uint64(c)) { 3559 break 3560 } 3561 v.reset(OpLOONG64MOVVconst) 3562 v.AuxInt = int64ToAuxInt(1) 3563 return true 3564 } 3565 // match: (SGTUconst [c] (SRLVconst _ [d])) 3566 // cond: 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c) 3567 // result: (MOVVconst [1]) 3568 for { 3569 c := auxIntToInt64(v.AuxInt) 3570 if v_0.Op != OpLOONG64SRLVconst { 3571 break 3572 } 3573 d := auxIntToInt64(v_0.AuxInt) 3574 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) { 3575 break 3576 } 3577 v.reset(OpLOONG64MOVVconst) 3578 v.AuxInt = int64ToAuxInt(1) 3579 return true 3580 } 3581 return false 3582 } 3583 func rewriteValueLOONG64_OpLOONG64SGTconst(v *Value) bool { 3584 v_0 := v.Args[0] 3585 // match: (SGTconst [c] (MOVVconst [d])) 3586 // cond: c>d 3587 // result: (MOVVconst [1]) 3588 for { 3589 c := auxIntToInt64(v.AuxInt) 3590 if v_0.Op != OpLOONG64MOVVconst { 3591 break 3592 } 3593 d := auxIntToInt64(v_0.AuxInt) 3594 if !(c > d) { 3595 break 3596 } 3597 v.reset(OpLOONG64MOVVconst) 3598 v.AuxInt = int64ToAuxInt(1) 3599 return true 3600 } 3601 // match: (SGTconst [c] (MOVVconst [d])) 3602 // cond: c<=d 3603 // result: (MOVVconst [0]) 3604 for { 3605 c := auxIntToInt64(v.AuxInt) 3606 if v_0.Op != OpLOONG64MOVVconst { 3607 break 3608 } 3609 d := auxIntToInt64(v_0.AuxInt) 3610 if !(c <= d) { 3611 break 3612 } 3613 v.reset(OpLOONG64MOVVconst) 3614 v.AuxInt = int64ToAuxInt(0) 3615 return true 3616 } 3617 // match: (SGTconst [c] (MOVBreg _)) 3618 // cond: 0x7f < c 3619 // result: (MOVVconst [1]) 3620 for { 3621 c := auxIntToInt64(v.AuxInt) 3622 if v_0.Op != OpLOONG64MOVBreg || !(0x7f < c) { 3623 break 3624 } 3625 v.reset(OpLOONG64MOVVconst) 3626 v.AuxInt = int64ToAuxInt(1) 3627 return true 3628 } 3629 // match: (SGTconst [c] (MOVBreg _)) 3630 // cond: c <= -0x80 3631 // result: (MOVVconst [0]) 3632 for { 3633 c := auxIntToInt64(v.AuxInt) 3634 if v_0.Op != OpLOONG64MOVBreg || !(c <= -0x80) { 3635 break 3636 } 3637 v.reset(OpLOONG64MOVVconst) 3638 v.AuxInt = int64ToAuxInt(0) 3639 return true 3640 } 3641 // match: (SGTconst [c] (MOVBUreg _)) 3642 // cond: 0xff < c 3643 // result: (MOVVconst [1]) 3644 for { 3645 c := auxIntToInt64(v.AuxInt) 3646 if v_0.Op != OpLOONG64MOVBUreg || !(0xff < c) { 3647 break 3648 } 3649 v.reset(OpLOONG64MOVVconst) 3650 v.AuxInt = int64ToAuxInt(1) 3651 return true 3652 } 3653 // match: (SGTconst [c] (MOVBUreg _)) 3654 // cond: c < 0 3655 // result: (MOVVconst [0]) 3656 for { 3657 c := auxIntToInt64(v.AuxInt) 3658 if v_0.Op != OpLOONG64MOVBUreg || !(c < 0) { 3659 break 3660 } 3661 v.reset(OpLOONG64MOVVconst) 3662 v.AuxInt = int64ToAuxInt(0) 3663 return true 3664 } 3665 // match: (SGTconst [c] (MOVHreg _)) 3666 // cond: 0x7fff < c 3667 // result: (MOVVconst [1]) 3668 for { 3669 c := auxIntToInt64(v.AuxInt) 3670 if v_0.Op != OpLOONG64MOVHreg || !(0x7fff < c) { 3671 break 3672 } 3673 v.reset(OpLOONG64MOVVconst) 3674 v.AuxInt = int64ToAuxInt(1) 3675 return true 3676 } 3677 // match: (SGTconst [c] (MOVHreg _)) 3678 // cond: c <= -0x8000 3679 // result: (MOVVconst [0]) 3680 for { 3681 c := auxIntToInt64(v.AuxInt) 3682 if v_0.Op != OpLOONG64MOVHreg || !(c <= -0x8000) { 3683 break 3684 } 3685 v.reset(OpLOONG64MOVVconst) 3686 v.AuxInt = int64ToAuxInt(0) 3687 return true 3688 } 3689 // match: (SGTconst [c] (MOVHUreg _)) 3690 // cond: 0xffff < c 3691 // result: (MOVVconst [1]) 3692 for { 3693 c := auxIntToInt64(v.AuxInt) 3694 if v_0.Op != OpLOONG64MOVHUreg || !(0xffff < c) { 3695 break 3696 } 3697 v.reset(OpLOONG64MOVVconst) 3698 v.AuxInt = int64ToAuxInt(1) 3699 return true 3700 } 3701 // match: (SGTconst [c] (MOVHUreg _)) 3702 // cond: c < 0 3703 // result: (MOVVconst [0]) 3704 for { 3705 c := auxIntToInt64(v.AuxInt) 3706 if v_0.Op != OpLOONG64MOVHUreg || !(c < 0) { 3707 break 3708 } 3709 v.reset(OpLOONG64MOVVconst) 3710 v.AuxInt = int64ToAuxInt(0) 3711 return true 3712 } 3713 // match: (SGTconst [c] (MOVWUreg _)) 3714 // cond: c < 0 3715 // result: (MOVVconst [0]) 3716 for { 3717 c := auxIntToInt64(v.AuxInt) 3718 if v_0.Op != OpLOONG64MOVWUreg || !(c < 0) { 3719 break 3720 } 3721 v.reset(OpLOONG64MOVVconst) 3722 v.AuxInt = int64ToAuxInt(0) 3723 return true 3724 } 3725 // match: (SGTconst [c] (ANDconst [m] _)) 3726 // cond: 0 <= m && m < c 3727 // result: (MOVVconst [1]) 3728 for { 3729 c := auxIntToInt64(v.AuxInt) 3730 if v_0.Op != OpLOONG64ANDconst { 3731 break 3732 } 3733 m := auxIntToInt64(v_0.AuxInt) 3734 if !(0 <= m && m < c) { 3735 break 3736 } 3737 v.reset(OpLOONG64MOVVconst) 3738 v.AuxInt = int64ToAuxInt(1) 3739 return true 3740 } 3741 // match: (SGTconst [c] (SRLVconst _ [d])) 3742 // cond: 0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c) 3743 // result: (MOVVconst [1]) 3744 for { 3745 c := auxIntToInt64(v.AuxInt) 3746 if v_0.Op != OpLOONG64SRLVconst { 3747 break 3748 } 3749 d := auxIntToInt64(v_0.AuxInt) 3750 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) { 3751 break 3752 } 3753 v.reset(OpLOONG64MOVVconst) 3754 v.AuxInt = int64ToAuxInt(1) 3755 return true 3756 } 3757 return false 3758 } 3759 func rewriteValueLOONG64_OpLOONG64SLLV(v *Value) bool { 3760 v_1 := v.Args[1] 3761 v_0 := v.Args[0] 3762 // match: (SLLV _ (MOVVconst [c])) 3763 // cond: uint64(c)>=64 3764 // result: (MOVVconst [0]) 3765 for { 3766 if v_1.Op != OpLOONG64MOVVconst { 3767 break 3768 } 3769 c := auxIntToInt64(v_1.AuxInt) 3770 if !(uint64(c) >= 64) { 3771 break 3772 } 3773 v.reset(OpLOONG64MOVVconst) 3774 v.AuxInt = int64ToAuxInt(0) 3775 return true 3776 } 3777 // match: (SLLV x (MOVVconst [c])) 3778 // result: (SLLVconst x [c]) 3779 for { 3780 x := v_0 3781 if v_1.Op != OpLOONG64MOVVconst { 3782 break 3783 } 3784 c := auxIntToInt64(v_1.AuxInt) 3785 v.reset(OpLOONG64SLLVconst) 3786 v.AuxInt = int64ToAuxInt(c) 3787 v.AddArg(x) 3788 return true 3789 } 3790 return false 3791 } 3792 func rewriteValueLOONG64_OpLOONG64SLLVconst(v *Value) bool { 3793 v_0 := v.Args[0] 3794 // match: (SLLVconst [c] (MOVVconst [d])) 3795 // result: (MOVVconst [d<<uint64(c)]) 3796 for { 3797 c := auxIntToInt64(v.AuxInt) 3798 if v_0.Op != OpLOONG64MOVVconst { 3799 break 3800 } 3801 d := auxIntToInt64(v_0.AuxInt) 3802 v.reset(OpLOONG64MOVVconst) 3803 v.AuxInt = int64ToAuxInt(d << uint64(c)) 3804 return true 3805 } 3806 return false 3807 } 3808 func rewriteValueLOONG64_OpLOONG64SRAV(v *Value) bool { 3809 v_1 := v.Args[1] 3810 v_0 := v.Args[0] 3811 // match: (SRAV x (MOVVconst [c])) 3812 // cond: uint64(c)>=64 3813 // result: (SRAVconst x [63]) 3814 for { 3815 x := v_0 3816 if v_1.Op != OpLOONG64MOVVconst { 3817 break 3818 } 3819 c := auxIntToInt64(v_1.AuxInt) 3820 if !(uint64(c) >= 64) { 3821 break 3822 } 3823 v.reset(OpLOONG64SRAVconst) 3824 v.AuxInt = int64ToAuxInt(63) 3825 v.AddArg(x) 3826 return true 3827 } 3828 // match: (SRAV x (MOVVconst [c])) 3829 // result: (SRAVconst x [c]) 3830 for { 3831 x := v_0 3832 if v_1.Op != OpLOONG64MOVVconst { 3833 break 3834 } 3835 c := auxIntToInt64(v_1.AuxInt) 3836 v.reset(OpLOONG64SRAVconst) 3837 v.AuxInt = int64ToAuxInt(c) 3838 v.AddArg(x) 3839 return true 3840 } 3841 return false 3842 } 3843 func rewriteValueLOONG64_OpLOONG64SRAVconst(v *Value) bool { 3844 v_0 := v.Args[0] 3845 // match: (SRAVconst [c] (MOVVconst [d])) 3846 // result: (MOVVconst [d>>uint64(c)]) 3847 for { 3848 c := auxIntToInt64(v.AuxInt) 3849 if v_0.Op != OpLOONG64MOVVconst { 3850 break 3851 } 3852 d := auxIntToInt64(v_0.AuxInt) 3853 v.reset(OpLOONG64MOVVconst) 3854 v.AuxInt = int64ToAuxInt(d >> uint64(c)) 3855 return true 3856 } 3857 return false 3858 } 3859 func rewriteValueLOONG64_OpLOONG64SRLV(v *Value) bool { 3860 v_1 := v.Args[1] 3861 v_0 := v.Args[0] 3862 // match: (SRLV _ (MOVVconst [c])) 3863 // cond: uint64(c)>=64 3864 // result: (MOVVconst [0]) 3865 for { 3866 if v_1.Op != OpLOONG64MOVVconst { 3867 break 3868 } 3869 c := auxIntToInt64(v_1.AuxInt) 3870 if !(uint64(c) >= 64) { 3871 break 3872 } 3873 v.reset(OpLOONG64MOVVconst) 3874 v.AuxInt = int64ToAuxInt(0) 3875 return true 3876 } 3877 // match: (SRLV x (MOVVconst [c])) 3878 // result: (SRLVconst x [c]) 3879 for { 3880 x := v_0 3881 if v_1.Op != OpLOONG64MOVVconst { 3882 break 3883 } 3884 c := auxIntToInt64(v_1.AuxInt) 3885 v.reset(OpLOONG64SRLVconst) 3886 v.AuxInt = int64ToAuxInt(c) 3887 v.AddArg(x) 3888 return true 3889 } 3890 return false 3891 } 3892 func rewriteValueLOONG64_OpLOONG64SRLVconst(v *Value) bool { 3893 v_0 := v.Args[0] 3894 // match: (SRLVconst [c] (MOVVconst [d])) 3895 // result: (MOVVconst [int64(uint64(d)>>uint64(c))]) 3896 for { 3897 c := auxIntToInt64(v.AuxInt) 3898 if v_0.Op != OpLOONG64MOVVconst { 3899 break 3900 } 3901 d := auxIntToInt64(v_0.AuxInt) 3902 v.reset(OpLOONG64MOVVconst) 3903 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c))) 3904 return true 3905 } 3906 return false 3907 } 3908 func rewriteValueLOONG64_OpLOONG64SUBV(v *Value) bool { 3909 v_1 := v.Args[1] 3910 v_0 := v.Args[0] 3911 // match: (SUBV x (MOVVconst [c])) 3912 // cond: is32Bit(c) 3913 // result: (SUBVconst [c] x) 3914 for { 3915 x := v_0 3916 if v_1.Op != OpLOONG64MOVVconst { 3917 break 3918 } 3919 c := auxIntToInt64(v_1.AuxInt) 3920 if !(is32Bit(c)) { 3921 break 3922 } 3923 v.reset(OpLOONG64SUBVconst) 3924 v.AuxInt = int64ToAuxInt(c) 3925 v.AddArg(x) 3926 return true 3927 } 3928 // match: (SUBV x x) 3929 // result: (MOVVconst [0]) 3930 for { 3931 x := v_0 3932 if x != v_1 { 3933 break 3934 } 3935 v.reset(OpLOONG64MOVVconst) 3936 v.AuxInt = int64ToAuxInt(0) 3937 return true 3938 } 3939 // match: (SUBV (MOVVconst [0]) x) 3940 // result: (NEGV x) 3941 for { 3942 if v_0.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 { 3943 break 3944 } 3945 x := v_1 3946 v.reset(OpLOONG64NEGV) 3947 v.AddArg(x) 3948 return true 3949 } 3950 return false 3951 } 3952 func rewriteValueLOONG64_OpLOONG64SUBVconst(v *Value) bool { 3953 v_0 := v.Args[0] 3954 // match: (SUBVconst [0] x) 3955 // result: x 3956 for { 3957 if auxIntToInt64(v.AuxInt) != 0 { 3958 break 3959 } 3960 x := v_0 3961 v.copyOf(x) 3962 return true 3963 } 3964 // match: (SUBVconst [c] (MOVVconst [d])) 3965 // result: (MOVVconst [d-c]) 3966 for { 3967 c := auxIntToInt64(v.AuxInt) 3968 if v_0.Op != OpLOONG64MOVVconst { 3969 break 3970 } 3971 d := auxIntToInt64(v_0.AuxInt) 3972 v.reset(OpLOONG64MOVVconst) 3973 v.AuxInt = int64ToAuxInt(d - c) 3974 return true 3975 } 3976 // match: (SUBVconst [c] (SUBVconst [d] x)) 3977 // cond: is32Bit(-c-d) 3978 // result: (ADDVconst [-c-d] x) 3979 for { 3980 c := auxIntToInt64(v.AuxInt) 3981 if v_0.Op != OpLOONG64SUBVconst { 3982 break 3983 } 3984 d := auxIntToInt64(v_0.AuxInt) 3985 x := v_0.Args[0] 3986 if !(is32Bit(-c - d)) { 3987 break 3988 } 3989 v.reset(OpLOONG64ADDVconst) 3990 v.AuxInt = int64ToAuxInt(-c - d) 3991 v.AddArg(x) 3992 return true 3993 } 3994 // match: (SUBVconst [c] (ADDVconst [d] x)) 3995 // cond: is32Bit(-c+d) 3996 // result: (ADDVconst [-c+d] x) 3997 for { 3998 c := auxIntToInt64(v.AuxInt) 3999 if v_0.Op != OpLOONG64ADDVconst { 4000 break 4001 } 4002 d := auxIntToInt64(v_0.AuxInt) 4003 x := v_0.Args[0] 4004 if !(is32Bit(-c + d)) { 4005 break 4006 } 4007 v.reset(OpLOONG64ADDVconst) 4008 v.AuxInt = int64ToAuxInt(-c + d) 4009 v.AddArg(x) 4010 return true 4011 } 4012 return false 4013 } 4014 func rewriteValueLOONG64_OpLOONG64XOR(v *Value) bool { 4015 v_1 := v.Args[1] 4016 v_0 := v.Args[0] 4017 // match: (XOR x (MOVVconst [c])) 4018 // cond: is32Bit(c) 4019 // result: (XORconst [c] x) 4020 for { 4021 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { 4022 x := v_0 4023 if v_1.Op != OpLOONG64MOVVconst { 4024 continue 4025 } 4026 c := auxIntToInt64(v_1.AuxInt) 4027 if !(is32Bit(c)) { 4028 continue 4029 } 4030 v.reset(OpLOONG64XORconst) 4031 v.AuxInt = int64ToAuxInt(c) 4032 v.AddArg(x) 4033 return true 4034 } 4035 break 4036 } 4037 // match: (XOR x x) 4038 // result: (MOVVconst [0]) 4039 for { 4040 x := v_0 4041 if x != v_1 { 4042 break 4043 } 4044 v.reset(OpLOONG64MOVVconst) 4045 v.AuxInt = int64ToAuxInt(0) 4046 return true 4047 } 4048 return false 4049 } 4050 func rewriteValueLOONG64_OpLOONG64XORconst(v *Value) bool { 4051 v_0 := v.Args[0] 4052 // match: (XORconst [0] x) 4053 // result: x 4054 for { 4055 if auxIntToInt64(v.AuxInt) != 0 { 4056 break 4057 } 4058 x := v_0 4059 v.copyOf(x) 4060 return true 4061 } 4062 // match: (XORconst [-1] x) 4063 // result: (NORconst [0] x) 4064 for { 4065 if auxIntToInt64(v.AuxInt) != -1 { 4066 break 4067 } 4068 x := v_0 4069 v.reset(OpLOONG64NORconst) 4070 v.AuxInt = int64ToAuxInt(0) 4071 v.AddArg(x) 4072 return true 4073 } 4074 // match: (XORconst [c] (MOVVconst [d])) 4075 // result: (MOVVconst [c^d]) 4076 for { 4077 c := auxIntToInt64(v.AuxInt) 4078 if v_0.Op != OpLOONG64MOVVconst { 4079 break 4080 } 4081 d := auxIntToInt64(v_0.AuxInt) 4082 v.reset(OpLOONG64MOVVconst) 4083 v.AuxInt = int64ToAuxInt(c ^ d) 4084 return true 4085 } 4086 // match: (XORconst [c] (XORconst [d] x)) 4087 // cond: is32Bit(c^d) 4088 // result: (XORconst [c^d] x) 4089 for { 4090 c := auxIntToInt64(v.AuxInt) 4091 if v_0.Op != OpLOONG64XORconst { 4092 break 4093 } 4094 d := auxIntToInt64(v_0.AuxInt) 4095 x := v_0.Args[0] 4096 if !(is32Bit(c ^ d)) { 4097 break 4098 } 4099 v.reset(OpLOONG64XORconst) 4100 v.AuxInt = int64ToAuxInt(c ^ d) 4101 v.AddArg(x) 4102 return true 4103 } 4104 return false 4105 } 4106 func rewriteValueLOONG64_OpLeq16(v *Value) bool { 4107 v_1 := v.Args[1] 4108 v_0 := v.Args[0] 4109 b := v.Block 4110 typ := &b.Func.Config.Types 4111 // match: (Leq16 x y) 4112 // result: (XOR (MOVVconst [1]) (SGT (SignExt16to64 x) (SignExt16to64 y))) 4113 for { 4114 x := v_0 4115 y := v_1 4116 v.reset(OpLOONG64XOR) 4117 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4118 v0.AuxInt = int64ToAuxInt(1) 4119 v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool) 4120 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 4121 v2.AddArg(x) 4122 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 4123 v3.AddArg(y) 4124 v1.AddArg2(v2, v3) 4125 v.AddArg2(v0, v1) 4126 return true 4127 } 4128 } 4129 func rewriteValueLOONG64_OpLeq16U(v *Value) bool { 4130 v_1 := v.Args[1] 4131 v_0 := v.Args[0] 4132 b := v.Block 4133 typ := &b.Func.Config.Types 4134 // match: (Leq16U x y) 4135 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt16to64 x) (ZeroExt16to64 y))) 4136 for { 4137 x := v_0 4138 y := v_1 4139 v.reset(OpLOONG64XOR) 4140 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4141 v0.AuxInt = int64ToAuxInt(1) 4142 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4143 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4144 v2.AddArg(x) 4145 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4146 v3.AddArg(y) 4147 v1.AddArg2(v2, v3) 4148 v.AddArg2(v0, v1) 4149 return true 4150 } 4151 } 4152 func rewriteValueLOONG64_OpLeq32(v *Value) bool { 4153 v_1 := v.Args[1] 4154 v_0 := v.Args[0] 4155 b := v.Block 4156 typ := &b.Func.Config.Types 4157 // match: (Leq32 x y) 4158 // result: (XOR (MOVVconst [1]) (SGT (SignExt32to64 x) (SignExt32to64 y))) 4159 for { 4160 x := v_0 4161 y := v_1 4162 v.reset(OpLOONG64XOR) 4163 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4164 v0.AuxInt = int64ToAuxInt(1) 4165 v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool) 4166 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 4167 v2.AddArg(x) 4168 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 4169 v3.AddArg(y) 4170 v1.AddArg2(v2, v3) 4171 v.AddArg2(v0, v1) 4172 return true 4173 } 4174 } 4175 func rewriteValueLOONG64_OpLeq32F(v *Value) bool { 4176 v_1 := v.Args[1] 4177 v_0 := v.Args[0] 4178 b := v.Block 4179 // match: (Leq32F x y) 4180 // result: (FPFlagTrue (CMPGEF y x)) 4181 for { 4182 x := v_0 4183 y := v_1 4184 v.reset(OpLOONG64FPFlagTrue) 4185 v0 := b.NewValue0(v.Pos, OpLOONG64CMPGEF, types.TypeFlags) 4186 v0.AddArg2(y, x) 4187 v.AddArg(v0) 4188 return true 4189 } 4190 } 4191 func rewriteValueLOONG64_OpLeq32U(v *Value) bool { 4192 v_1 := v.Args[1] 4193 v_0 := v.Args[0] 4194 b := v.Block 4195 typ := &b.Func.Config.Types 4196 // match: (Leq32U x y) 4197 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt32to64 x) (ZeroExt32to64 y))) 4198 for { 4199 x := v_0 4200 y := v_1 4201 v.reset(OpLOONG64XOR) 4202 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4203 v0.AuxInt = int64ToAuxInt(1) 4204 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4205 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4206 v2.AddArg(x) 4207 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4208 v3.AddArg(y) 4209 v1.AddArg2(v2, v3) 4210 v.AddArg2(v0, v1) 4211 return true 4212 } 4213 } 4214 func rewriteValueLOONG64_OpLeq64(v *Value) bool { 4215 v_1 := v.Args[1] 4216 v_0 := v.Args[0] 4217 b := v.Block 4218 typ := &b.Func.Config.Types 4219 // match: (Leq64 x y) 4220 // result: (XOR (MOVVconst [1]) (SGT x y)) 4221 for { 4222 x := v_0 4223 y := v_1 4224 v.reset(OpLOONG64XOR) 4225 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4226 v0.AuxInt = int64ToAuxInt(1) 4227 v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool) 4228 v1.AddArg2(x, y) 4229 v.AddArg2(v0, v1) 4230 return true 4231 } 4232 } 4233 func rewriteValueLOONG64_OpLeq64F(v *Value) bool { 4234 v_1 := v.Args[1] 4235 v_0 := v.Args[0] 4236 b := v.Block 4237 // match: (Leq64F x y) 4238 // result: (FPFlagTrue (CMPGED y x)) 4239 for { 4240 x := v_0 4241 y := v_1 4242 v.reset(OpLOONG64FPFlagTrue) 4243 v0 := b.NewValue0(v.Pos, OpLOONG64CMPGED, types.TypeFlags) 4244 v0.AddArg2(y, x) 4245 v.AddArg(v0) 4246 return true 4247 } 4248 } 4249 func rewriteValueLOONG64_OpLeq64U(v *Value) bool { 4250 v_1 := v.Args[1] 4251 v_0 := v.Args[0] 4252 b := v.Block 4253 typ := &b.Func.Config.Types 4254 // match: (Leq64U x y) 4255 // result: (XOR (MOVVconst [1]) (SGTU x y)) 4256 for { 4257 x := v_0 4258 y := v_1 4259 v.reset(OpLOONG64XOR) 4260 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4261 v0.AuxInt = int64ToAuxInt(1) 4262 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4263 v1.AddArg2(x, y) 4264 v.AddArg2(v0, v1) 4265 return true 4266 } 4267 } 4268 func rewriteValueLOONG64_OpLeq8(v *Value) bool { 4269 v_1 := v.Args[1] 4270 v_0 := v.Args[0] 4271 b := v.Block 4272 typ := &b.Func.Config.Types 4273 // match: (Leq8 x y) 4274 // result: (XOR (MOVVconst [1]) (SGT (SignExt8to64 x) (SignExt8to64 y))) 4275 for { 4276 x := v_0 4277 y := v_1 4278 v.reset(OpLOONG64XOR) 4279 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4280 v0.AuxInt = int64ToAuxInt(1) 4281 v1 := b.NewValue0(v.Pos, OpLOONG64SGT, typ.Bool) 4282 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 4283 v2.AddArg(x) 4284 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 4285 v3.AddArg(y) 4286 v1.AddArg2(v2, v3) 4287 v.AddArg2(v0, v1) 4288 return true 4289 } 4290 } 4291 func rewriteValueLOONG64_OpLeq8U(v *Value) bool { 4292 v_1 := v.Args[1] 4293 v_0 := v.Args[0] 4294 b := v.Block 4295 typ := &b.Func.Config.Types 4296 // match: (Leq8U x y) 4297 // result: (XOR (MOVVconst [1]) (SGTU (ZeroExt8to64 x) (ZeroExt8to64 y))) 4298 for { 4299 x := v_0 4300 y := v_1 4301 v.reset(OpLOONG64XOR) 4302 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4303 v0.AuxInt = int64ToAuxInt(1) 4304 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4305 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4306 v2.AddArg(x) 4307 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4308 v3.AddArg(y) 4309 v1.AddArg2(v2, v3) 4310 v.AddArg2(v0, v1) 4311 return true 4312 } 4313 } 4314 func rewriteValueLOONG64_OpLess16(v *Value) bool { 4315 v_1 := v.Args[1] 4316 v_0 := v.Args[0] 4317 b := v.Block 4318 typ := &b.Func.Config.Types 4319 // match: (Less16 x y) 4320 // result: (SGT (SignExt16to64 y) (SignExt16to64 x)) 4321 for { 4322 x := v_0 4323 y := v_1 4324 v.reset(OpLOONG64SGT) 4325 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 4326 v0.AddArg(y) 4327 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 4328 v1.AddArg(x) 4329 v.AddArg2(v0, v1) 4330 return true 4331 } 4332 } 4333 func rewriteValueLOONG64_OpLess16U(v *Value) bool { 4334 v_1 := v.Args[1] 4335 v_0 := v.Args[0] 4336 b := v.Block 4337 typ := &b.Func.Config.Types 4338 // match: (Less16U x y) 4339 // result: (SGTU (ZeroExt16to64 y) (ZeroExt16to64 x)) 4340 for { 4341 x := v_0 4342 y := v_1 4343 v.reset(OpLOONG64SGTU) 4344 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4345 v0.AddArg(y) 4346 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4347 v1.AddArg(x) 4348 v.AddArg2(v0, v1) 4349 return true 4350 } 4351 } 4352 func rewriteValueLOONG64_OpLess32(v *Value) bool { 4353 v_1 := v.Args[1] 4354 v_0 := v.Args[0] 4355 b := v.Block 4356 typ := &b.Func.Config.Types 4357 // match: (Less32 x y) 4358 // result: (SGT (SignExt32to64 y) (SignExt32to64 x)) 4359 for { 4360 x := v_0 4361 y := v_1 4362 v.reset(OpLOONG64SGT) 4363 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 4364 v0.AddArg(y) 4365 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 4366 v1.AddArg(x) 4367 v.AddArg2(v0, v1) 4368 return true 4369 } 4370 } 4371 func rewriteValueLOONG64_OpLess32F(v *Value) bool { 4372 v_1 := v.Args[1] 4373 v_0 := v.Args[0] 4374 b := v.Block 4375 // match: (Less32F x y) 4376 // result: (FPFlagTrue (CMPGTF y x)) 4377 for { 4378 x := v_0 4379 y := v_1 4380 v.reset(OpLOONG64FPFlagTrue) 4381 v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTF, types.TypeFlags) 4382 v0.AddArg2(y, x) 4383 v.AddArg(v0) 4384 return true 4385 } 4386 } 4387 func rewriteValueLOONG64_OpLess32U(v *Value) bool { 4388 v_1 := v.Args[1] 4389 v_0 := v.Args[0] 4390 b := v.Block 4391 typ := &b.Func.Config.Types 4392 // match: (Less32U x y) 4393 // result: (SGTU (ZeroExt32to64 y) (ZeroExt32to64 x)) 4394 for { 4395 x := v_0 4396 y := v_1 4397 v.reset(OpLOONG64SGTU) 4398 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4399 v0.AddArg(y) 4400 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4401 v1.AddArg(x) 4402 v.AddArg2(v0, v1) 4403 return true 4404 } 4405 } 4406 func rewriteValueLOONG64_OpLess64(v *Value) bool { 4407 v_1 := v.Args[1] 4408 v_0 := v.Args[0] 4409 // match: (Less64 x y) 4410 // result: (SGT y x) 4411 for { 4412 x := v_0 4413 y := v_1 4414 v.reset(OpLOONG64SGT) 4415 v.AddArg2(y, x) 4416 return true 4417 } 4418 } 4419 func rewriteValueLOONG64_OpLess64F(v *Value) bool { 4420 v_1 := v.Args[1] 4421 v_0 := v.Args[0] 4422 b := v.Block 4423 // match: (Less64F x y) 4424 // result: (FPFlagTrue (CMPGTD y x)) 4425 for { 4426 x := v_0 4427 y := v_1 4428 v.reset(OpLOONG64FPFlagTrue) 4429 v0 := b.NewValue0(v.Pos, OpLOONG64CMPGTD, types.TypeFlags) 4430 v0.AddArg2(y, x) 4431 v.AddArg(v0) 4432 return true 4433 } 4434 } 4435 func rewriteValueLOONG64_OpLess64U(v *Value) bool { 4436 v_1 := v.Args[1] 4437 v_0 := v.Args[0] 4438 // match: (Less64U x y) 4439 // result: (SGTU y x) 4440 for { 4441 x := v_0 4442 y := v_1 4443 v.reset(OpLOONG64SGTU) 4444 v.AddArg2(y, x) 4445 return true 4446 } 4447 } 4448 func rewriteValueLOONG64_OpLess8(v *Value) bool { 4449 v_1 := v.Args[1] 4450 v_0 := v.Args[0] 4451 b := v.Block 4452 typ := &b.Func.Config.Types 4453 // match: (Less8 x y) 4454 // result: (SGT (SignExt8to64 y) (SignExt8to64 x)) 4455 for { 4456 x := v_0 4457 y := v_1 4458 v.reset(OpLOONG64SGT) 4459 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 4460 v0.AddArg(y) 4461 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 4462 v1.AddArg(x) 4463 v.AddArg2(v0, v1) 4464 return true 4465 } 4466 } 4467 func rewriteValueLOONG64_OpLess8U(v *Value) bool { 4468 v_1 := v.Args[1] 4469 v_0 := v.Args[0] 4470 b := v.Block 4471 typ := &b.Func.Config.Types 4472 // match: (Less8U x y) 4473 // result: (SGTU (ZeroExt8to64 y) (ZeroExt8to64 x)) 4474 for { 4475 x := v_0 4476 y := v_1 4477 v.reset(OpLOONG64SGTU) 4478 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4479 v0.AddArg(y) 4480 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4481 v1.AddArg(x) 4482 v.AddArg2(v0, v1) 4483 return true 4484 } 4485 } 4486 func rewriteValueLOONG64_OpLoad(v *Value) bool { 4487 v_1 := v.Args[1] 4488 v_0 := v.Args[0] 4489 // match: (Load <t> ptr mem) 4490 // cond: t.IsBoolean() 4491 // result: (MOVBUload ptr mem) 4492 for { 4493 t := v.Type 4494 ptr := v_0 4495 mem := v_1 4496 if !(t.IsBoolean()) { 4497 break 4498 } 4499 v.reset(OpLOONG64MOVBUload) 4500 v.AddArg2(ptr, mem) 4501 return true 4502 } 4503 // match: (Load <t> ptr mem) 4504 // cond: (is8BitInt(t) && isSigned(t)) 4505 // result: (MOVBload ptr mem) 4506 for { 4507 t := v.Type 4508 ptr := v_0 4509 mem := v_1 4510 if !(is8BitInt(t) && isSigned(t)) { 4511 break 4512 } 4513 v.reset(OpLOONG64MOVBload) 4514 v.AddArg2(ptr, mem) 4515 return true 4516 } 4517 // match: (Load <t> ptr mem) 4518 // cond: (is8BitInt(t) && !isSigned(t)) 4519 // result: (MOVBUload ptr mem) 4520 for { 4521 t := v.Type 4522 ptr := v_0 4523 mem := v_1 4524 if !(is8BitInt(t) && !isSigned(t)) { 4525 break 4526 } 4527 v.reset(OpLOONG64MOVBUload) 4528 v.AddArg2(ptr, mem) 4529 return true 4530 } 4531 // match: (Load <t> ptr mem) 4532 // cond: (is16BitInt(t) && isSigned(t)) 4533 // result: (MOVHload ptr mem) 4534 for { 4535 t := v.Type 4536 ptr := v_0 4537 mem := v_1 4538 if !(is16BitInt(t) && isSigned(t)) { 4539 break 4540 } 4541 v.reset(OpLOONG64MOVHload) 4542 v.AddArg2(ptr, mem) 4543 return true 4544 } 4545 // match: (Load <t> ptr mem) 4546 // cond: (is16BitInt(t) && !isSigned(t)) 4547 // result: (MOVHUload ptr mem) 4548 for { 4549 t := v.Type 4550 ptr := v_0 4551 mem := v_1 4552 if !(is16BitInt(t) && !isSigned(t)) { 4553 break 4554 } 4555 v.reset(OpLOONG64MOVHUload) 4556 v.AddArg2(ptr, mem) 4557 return true 4558 } 4559 // match: (Load <t> ptr mem) 4560 // cond: (is32BitInt(t) && isSigned(t)) 4561 // result: (MOVWload ptr mem) 4562 for { 4563 t := v.Type 4564 ptr := v_0 4565 mem := v_1 4566 if !(is32BitInt(t) && isSigned(t)) { 4567 break 4568 } 4569 v.reset(OpLOONG64MOVWload) 4570 v.AddArg2(ptr, mem) 4571 return true 4572 } 4573 // match: (Load <t> ptr mem) 4574 // cond: (is32BitInt(t) && !isSigned(t)) 4575 // result: (MOVWUload ptr mem) 4576 for { 4577 t := v.Type 4578 ptr := v_0 4579 mem := v_1 4580 if !(is32BitInt(t) && !isSigned(t)) { 4581 break 4582 } 4583 v.reset(OpLOONG64MOVWUload) 4584 v.AddArg2(ptr, mem) 4585 return true 4586 } 4587 // match: (Load <t> ptr mem) 4588 // cond: (is64BitInt(t) || isPtr(t)) 4589 // result: (MOVVload ptr mem) 4590 for { 4591 t := v.Type 4592 ptr := v_0 4593 mem := v_1 4594 if !(is64BitInt(t) || isPtr(t)) { 4595 break 4596 } 4597 v.reset(OpLOONG64MOVVload) 4598 v.AddArg2(ptr, mem) 4599 return true 4600 } 4601 // match: (Load <t> ptr mem) 4602 // cond: is32BitFloat(t) 4603 // result: (MOVFload ptr mem) 4604 for { 4605 t := v.Type 4606 ptr := v_0 4607 mem := v_1 4608 if !(is32BitFloat(t)) { 4609 break 4610 } 4611 v.reset(OpLOONG64MOVFload) 4612 v.AddArg2(ptr, mem) 4613 return true 4614 } 4615 // match: (Load <t> ptr mem) 4616 // cond: is64BitFloat(t) 4617 // result: (MOVDload ptr mem) 4618 for { 4619 t := v.Type 4620 ptr := v_0 4621 mem := v_1 4622 if !(is64BitFloat(t)) { 4623 break 4624 } 4625 v.reset(OpLOONG64MOVDload) 4626 v.AddArg2(ptr, mem) 4627 return true 4628 } 4629 return false 4630 } 4631 func rewriteValueLOONG64_OpLocalAddr(v *Value) bool { 4632 v_0 := v.Args[0] 4633 // match: (LocalAddr {sym} base _) 4634 // result: (MOVVaddr {sym} base) 4635 for { 4636 sym := auxToSym(v.Aux) 4637 base := v_0 4638 v.reset(OpLOONG64MOVVaddr) 4639 v.Aux = symToAux(sym) 4640 v.AddArg(base) 4641 return true 4642 } 4643 } 4644 func rewriteValueLOONG64_OpLsh16x16(v *Value) bool { 4645 v_1 := v.Args[1] 4646 v_0 := v.Args[0] 4647 b := v.Block 4648 typ := &b.Func.Config.Types 4649 // match: (Lsh16x16 <t> x y) 4650 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 4651 for { 4652 t := v.Type 4653 x := v_0 4654 y := v_1 4655 v.reset(OpLOONG64AND) 4656 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4657 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4658 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4659 v2.AuxInt = int64ToAuxInt(64) 4660 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4661 v3.AddArg(y) 4662 v1.AddArg2(v2, v3) 4663 v0.AddArg(v1) 4664 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4665 v4.AddArg2(x, v3) 4666 v.AddArg2(v0, v4) 4667 return true 4668 } 4669 } 4670 func rewriteValueLOONG64_OpLsh16x32(v *Value) bool { 4671 v_1 := v.Args[1] 4672 v_0 := v.Args[0] 4673 b := v.Block 4674 typ := &b.Func.Config.Types 4675 // match: (Lsh16x32 <t> x y) 4676 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 4677 for { 4678 t := v.Type 4679 x := v_0 4680 y := v_1 4681 v.reset(OpLOONG64AND) 4682 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4683 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4684 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4685 v2.AuxInt = int64ToAuxInt(64) 4686 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4687 v3.AddArg(y) 4688 v1.AddArg2(v2, v3) 4689 v0.AddArg(v1) 4690 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4691 v4.AddArg2(x, v3) 4692 v.AddArg2(v0, v4) 4693 return true 4694 } 4695 } 4696 func rewriteValueLOONG64_OpLsh16x64(v *Value) bool { 4697 v_1 := v.Args[1] 4698 v_0 := v.Args[0] 4699 b := v.Block 4700 typ := &b.Func.Config.Types 4701 // match: (Lsh16x64 <t> x y) 4702 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 4703 for { 4704 t := v.Type 4705 x := v_0 4706 y := v_1 4707 v.reset(OpLOONG64AND) 4708 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4709 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4710 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4711 v2.AuxInt = int64ToAuxInt(64) 4712 v1.AddArg2(v2, y) 4713 v0.AddArg(v1) 4714 v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4715 v3.AddArg2(x, y) 4716 v.AddArg2(v0, v3) 4717 return true 4718 } 4719 } 4720 func rewriteValueLOONG64_OpLsh16x8(v *Value) bool { 4721 v_1 := v.Args[1] 4722 v_0 := v.Args[0] 4723 b := v.Block 4724 typ := &b.Func.Config.Types 4725 // match: (Lsh16x8 <t> x y) 4726 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 4727 for { 4728 t := v.Type 4729 x := v_0 4730 y := v_1 4731 v.reset(OpLOONG64AND) 4732 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4733 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4734 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4735 v2.AuxInt = int64ToAuxInt(64) 4736 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4737 v3.AddArg(y) 4738 v1.AddArg2(v2, v3) 4739 v0.AddArg(v1) 4740 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4741 v4.AddArg2(x, v3) 4742 v.AddArg2(v0, v4) 4743 return true 4744 } 4745 } 4746 func rewriteValueLOONG64_OpLsh32x16(v *Value) bool { 4747 v_1 := v.Args[1] 4748 v_0 := v.Args[0] 4749 b := v.Block 4750 typ := &b.Func.Config.Types 4751 // match: (Lsh32x16 <t> x y) 4752 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 4753 for { 4754 t := v.Type 4755 x := v_0 4756 y := v_1 4757 v.reset(OpLOONG64AND) 4758 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4759 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4760 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4761 v2.AuxInt = int64ToAuxInt(64) 4762 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4763 v3.AddArg(y) 4764 v1.AddArg2(v2, v3) 4765 v0.AddArg(v1) 4766 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4767 v4.AddArg2(x, v3) 4768 v.AddArg2(v0, v4) 4769 return true 4770 } 4771 } 4772 func rewriteValueLOONG64_OpLsh32x32(v *Value) bool { 4773 v_1 := v.Args[1] 4774 v_0 := v.Args[0] 4775 b := v.Block 4776 typ := &b.Func.Config.Types 4777 // match: (Lsh32x32 <t> x y) 4778 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 4779 for { 4780 t := v.Type 4781 x := v_0 4782 y := v_1 4783 v.reset(OpLOONG64AND) 4784 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4785 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4786 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4787 v2.AuxInt = int64ToAuxInt(64) 4788 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4789 v3.AddArg(y) 4790 v1.AddArg2(v2, v3) 4791 v0.AddArg(v1) 4792 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4793 v4.AddArg2(x, v3) 4794 v.AddArg2(v0, v4) 4795 return true 4796 } 4797 } 4798 func rewriteValueLOONG64_OpLsh32x64(v *Value) bool { 4799 v_1 := v.Args[1] 4800 v_0 := v.Args[0] 4801 b := v.Block 4802 typ := &b.Func.Config.Types 4803 // match: (Lsh32x64 <t> x y) 4804 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 4805 for { 4806 t := v.Type 4807 x := v_0 4808 y := v_1 4809 v.reset(OpLOONG64AND) 4810 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4811 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4812 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4813 v2.AuxInt = int64ToAuxInt(64) 4814 v1.AddArg2(v2, y) 4815 v0.AddArg(v1) 4816 v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4817 v3.AddArg2(x, y) 4818 v.AddArg2(v0, v3) 4819 return true 4820 } 4821 } 4822 func rewriteValueLOONG64_OpLsh32x8(v *Value) bool { 4823 v_1 := v.Args[1] 4824 v_0 := v.Args[0] 4825 b := v.Block 4826 typ := &b.Func.Config.Types 4827 // match: (Lsh32x8 <t> x y) 4828 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 4829 for { 4830 t := v.Type 4831 x := v_0 4832 y := v_1 4833 v.reset(OpLOONG64AND) 4834 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4835 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4836 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4837 v2.AuxInt = int64ToAuxInt(64) 4838 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4839 v3.AddArg(y) 4840 v1.AddArg2(v2, v3) 4841 v0.AddArg(v1) 4842 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4843 v4.AddArg2(x, v3) 4844 v.AddArg2(v0, v4) 4845 return true 4846 } 4847 } 4848 func rewriteValueLOONG64_OpLsh64x16(v *Value) bool { 4849 v_1 := v.Args[1] 4850 v_0 := v.Args[0] 4851 b := v.Block 4852 typ := &b.Func.Config.Types 4853 // match: (Lsh64x16 <t> x y) 4854 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 4855 for { 4856 t := v.Type 4857 x := v_0 4858 y := v_1 4859 v.reset(OpLOONG64AND) 4860 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4861 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4862 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4863 v2.AuxInt = int64ToAuxInt(64) 4864 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4865 v3.AddArg(y) 4866 v1.AddArg2(v2, v3) 4867 v0.AddArg(v1) 4868 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4869 v4.AddArg2(x, v3) 4870 v.AddArg2(v0, v4) 4871 return true 4872 } 4873 } 4874 func rewriteValueLOONG64_OpLsh64x32(v *Value) bool { 4875 v_1 := v.Args[1] 4876 v_0 := v.Args[0] 4877 b := v.Block 4878 typ := &b.Func.Config.Types 4879 // match: (Lsh64x32 <t> x y) 4880 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 4881 for { 4882 t := v.Type 4883 x := v_0 4884 y := v_1 4885 v.reset(OpLOONG64AND) 4886 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4887 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4888 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4889 v2.AuxInt = int64ToAuxInt(64) 4890 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4891 v3.AddArg(y) 4892 v1.AddArg2(v2, v3) 4893 v0.AddArg(v1) 4894 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4895 v4.AddArg2(x, v3) 4896 v.AddArg2(v0, v4) 4897 return true 4898 } 4899 } 4900 func rewriteValueLOONG64_OpLsh64x64(v *Value) bool { 4901 v_1 := v.Args[1] 4902 v_0 := v.Args[0] 4903 b := v.Block 4904 typ := &b.Func.Config.Types 4905 // match: (Lsh64x64 <t> x y) 4906 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 4907 for { 4908 t := v.Type 4909 x := v_0 4910 y := v_1 4911 v.reset(OpLOONG64AND) 4912 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4913 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4914 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4915 v2.AuxInt = int64ToAuxInt(64) 4916 v1.AddArg2(v2, y) 4917 v0.AddArg(v1) 4918 v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4919 v3.AddArg2(x, y) 4920 v.AddArg2(v0, v3) 4921 return true 4922 } 4923 } 4924 func rewriteValueLOONG64_OpLsh64x8(v *Value) bool { 4925 v_1 := v.Args[1] 4926 v_0 := v.Args[0] 4927 b := v.Block 4928 typ := &b.Func.Config.Types 4929 // match: (Lsh64x8 <t> x y) 4930 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 4931 for { 4932 t := v.Type 4933 x := v_0 4934 y := v_1 4935 v.reset(OpLOONG64AND) 4936 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4937 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4938 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4939 v2.AuxInt = int64ToAuxInt(64) 4940 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 4941 v3.AddArg(y) 4942 v1.AddArg2(v2, v3) 4943 v0.AddArg(v1) 4944 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4945 v4.AddArg2(x, v3) 4946 v.AddArg2(v0, v4) 4947 return true 4948 } 4949 } 4950 func rewriteValueLOONG64_OpLsh8x16(v *Value) bool { 4951 v_1 := v.Args[1] 4952 v_0 := v.Args[0] 4953 b := v.Block 4954 typ := &b.Func.Config.Types 4955 // match: (Lsh8x16 <t> x y) 4956 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SLLV <t> x (ZeroExt16to64 y))) 4957 for { 4958 t := v.Type 4959 x := v_0 4960 y := v_1 4961 v.reset(OpLOONG64AND) 4962 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4963 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4964 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4965 v2.AuxInt = int64ToAuxInt(64) 4966 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 4967 v3.AddArg(y) 4968 v1.AddArg2(v2, v3) 4969 v0.AddArg(v1) 4970 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4971 v4.AddArg2(x, v3) 4972 v.AddArg2(v0, v4) 4973 return true 4974 } 4975 } 4976 func rewriteValueLOONG64_OpLsh8x32(v *Value) bool { 4977 v_1 := v.Args[1] 4978 v_0 := v.Args[0] 4979 b := v.Block 4980 typ := &b.Func.Config.Types 4981 // match: (Lsh8x32 <t> x y) 4982 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SLLV <t> x (ZeroExt32to64 y))) 4983 for { 4984 t := v.Type 4985 x := v_0 4986 y := v_1 4987 v.reset(OpLOONG64AND) 4988 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 4989 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 4990 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 4991 v2.AuxInt = int64ToAuxInt(64) 4992 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 4993 v3.AddArg(y) 4994 v1.AddArg2(v2, v3) 4995 v0.AddArg(v1) 4996 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 4997 v4.AddArg2(x, v3) 4998 v.AddArg2(v0, v4) 4999 return true 5000 } 5001 } 5002 func rewriteValueLOONG64_OpLsh8x64(v *Value) bool { 5003 v_1 := v.Args[1] 5004 v_0 := v.Args[0] 5005 b := v.Block 5006 typ := &b.Func.Config.Types 5007 // match: (Lsh8x64 <t> x y) 5008 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SLLV <t> x y)) 5009 for { 5010 t := v.Type 5011 x := v_0 5012 y := v_1 5013 v.reset(OpLOONG64AND) 5014 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 5015 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 5016 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5017 v2.AuxInt = int64ToAuxInt(64) 5018 v1.AddArg2(v2, y) 5019 v0.AddArg(v1) 5020 v3 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 5021 v3.AddArg2(x, y) 5022 v.AddArg2(v0, v3) 5023 return true 5024 } 5025 } 5026 func rewriteValueLOONG64_OpLsh8x8(v *Value) bool { 5027 v_1 := v.Args[1] 5028 v_0 := v.Args[0] 5029 b := v.Block 5030 typ := &b.Func.Config.Types 5031 // match: (Lsh8x8 <t> x y) 5032 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SLLV <t> x (ZeroExt8to64 y))) 5033 for { 5034 t := v.Type 5035 x := v_0 5036 y := v_1 5037 v.reset(OpLOONG64AND) 5038 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 5039 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 5040 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5041 v2.AuxInt = int64ToAuxInt(64) 5042 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 5043 v3.AddArg(y) 5044 v1.AddArg2(v2, v3) 5045 v0.AddArg(v1) 5046 v4 := b.NewValue0(v.Pos, OpLOONG64SLLV, t) 5047 v4.AddArg2(x, v3) 5048 v.AddArg2(v0, v4) 5049 return true 5050 } 5051 } 5052 func rewriteValueLOONG64_OpMod16(v *Value) bool { 5053 v_1 := v.Args[1] 5054 v_0 := v.Args[0] 5055 b := v.Block 5056 typ := &b.Func.Config.Types 5057 // match: (Mod16 x y) 5058 // result: (Select0 (DIVV (SignExt16to64 x) (SignExt16to64 y))) 5059 for { 5060 x := v_0 5061 y := v_1 5062 v.reset(OpSelect0) 5063 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 5064 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 5065 v1.AddArg(x) 5066 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 5067 v2.AddArg(y) 5068 v0.AddArg2(v1, v2) 5069 v.AddArg(v0) 5070 return true 5071 } 5072 } 5073 func rewriteValueLOONG64_OpMod16u(v *Value) bool { 5074 v_1 := v.Args[1] 5075 v_0 := v.Args[0] 5076 b := v.Block 5077 typ := &b.Func.Config.Types 5078 // match: (Mod16u x y) 5079 // result: (Select0 (DIVVU (ZeroExt16to64 x) (ZeroExt16to64 y))) 5080 for { 5081 x := v_0 5082 y := v_1 5083 v.reset(OpSelect0) 5084 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5085 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 5086 v1.AddArg(x) 5087 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 5088 v2.AddArg(y) 5089 v0.AddArg2(v1, v2) 5090 v.AddArg(v0) 5091 return true 5092 } 5093 } 5094 func rewriteValueLOONG64_OpMod32(v *Value) bool { 5095 v_1 := v.Args[1] 5096 v_0 := v.Args[0] 5097 b := v.Block 5098 typ := &b.Func.Config.Types 5099 // match: (Mod32 x y) 5100 // result: (Select0 (DIVV (SignExt32to64 x) (SignExt32to64 y))) 5101 for { 5102 x := v_0 5103 y := v_1 5104 v.reset(OpSelect0) 5105 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 5106 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 5107 v1.AddArg(x) 5108 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 5109 v2.AddArg(y) 5110 v0.AddArg2(v1, v2) 5111 v.AddArg(v0) 5112 return true 5113 } 5114 } 5115 func rewriteValueLOONG64_OpMod32u(v *Value) bool { 5116 v_1 := v.Args[1] 5117 v_0 := v.Args[0] 5118 b := v.Block 5119 typ := &b.Func.Config.Types 5120 // match: (Mod32u x y) 5121 // result: (Select0 (DIVVU (ZeroExt32to64 x) (ZeroExt32to64 y))) 5122 for { 5123 x := v_0 5124 y := v_1 5125 v.reset(OpSelect0) 5126 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5127 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 5128 v1.AddArg(x) 5129 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 5130 v2.AddArg(y) 5131 v0.AddArg2(v1, v2) 5132 v.AddArg(v0) 5133 return true 5134 } 5135 } 5136 func rewriteValueLOONG64_OpMod64(v *Value) bool { 5137 v_1 := v.Args[1] 5138 v_0 := v.Args[0] 5139 b := v.Block 5140 typ := &b.Func.Config.Types 5141 // match: (Mod64 x y) 5142 // result: (Select0 (DIVV x y)) 5143 for { 5144 x := v_0 5145 y := v_1 5146 v.reset(OpSelect0) 5147 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 5148 v0.AddArg2(x, y) 5149 v.AddArg(v0) 5150 return true 5151 } 5152 } 5153 func rewriteValueLOONG64_OpMod64u(v *Value) bool { 5154 v_1 := v.Args[1] 5155 v_0 := v.Args[0] 5156 b := v.Block 5157 typ := &b.Func.Config.Types 5158 // match: (Mod64u x y) 5159 // result: (Select0 (DIVVU x y)) 5160 for { 5161 x := v_0 5162 y := v_1 5163 v.reset(OpSelect0) 5164 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5165 v0.AddArg2(x, y) 5166 v.AddArg(v0) 5167 return true 5168 } 5169 } 5170 func rewriteValueLOONG64_OpMod8(v *Value) bool { 5171 v_1 := v.Args[1] 5172 v_0 := v.Args[0] 5173 b := v.Block 5174 typ := &b.Func.Config.Types 5175 // match: (Mod8 x y) 5176 // result: (Select0 (DIVV (SignExt8to64 x) (SignExt8to64 y))) 5177 for { 5178 x := v_0 5179 y := v_1 5180 v.reset(OpSelect0) 5181 v0 := b.NewValue0(v.Pos, OpLOONG64DIVV, types.NewTuple(typ.Int64, typ.Int64)) 5182 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 5183 v1.AddArg(x) 5184 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 5185 v2.AddArg(y) 5186 v0.AddArg2(v1, v2) 5187 v.AddArg(v0) 5188 return true 5189 } 5190 } 5191 func rewriteValueLOONG64_OpMod8u(v *Value) bool { 5192 v_1 := v.Args[1] 5193 v_0 := v.Args[0] 5194 b := v.Block 5195 typ := &b.Func.Config.Types 5196 // match: (Mod8u x y) 5197 // result: (Select0 (DIVVU (ZeroExt8to64 x) (ZeroExt8to64 y))) 5198 for { 5199 x := v_0 5200 y := v_1 5201 v.reset(OpSelect0) 5202 v0 := b.NewValue0(v.Pos, OpLOONG64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5203 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 5204 v1.AddArg(x) 5205 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 5206 v2.AddArg(y) 5207 v0.AddArg2(v1, v2) 5208 v.AddArg(v0) 5209 return true 5210 } 5211 } 5212 func rewriteValueLOONG64_OpMove(v *Value) bool { 5213 v_2 := v.Args[2] 5214 v_1 := v.Args[1] 5215 v_0 := v.Args[0] 5216 b := v.Block 5217 config := b.Func.Config 5218 typ := &b.Func.Config.Types 5219 // match: (Move [0] _ _ mem) 5220 // result: mem 5221 for { 5222 if auxIntToInt64(v.AuxInt) != 0 { 5223 break 5224 } 5225 mem := v_2 5226 v.copyOf(mem) 5227 return true 5228 } 5229 // match: (Move [1] dst src mem) 5230 // result: (MOVBstore dst (MOVBload src mem) mem) 5231 for { 5232 if auxIntToInt64(v.AuxInt) != 1 { 5233 break 5234 } 5235 dst := v_0 5236 src := v_1 5237 mem := v_2 5238 v.reset(OpLOONG64MOVBstore) 5239 v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5240 v0.AddArg2(src, mem) 5241 v.AddArg3(dst, v0, mem) 5242 return true 5243 } 5244 // match: (Move [2] {t} dst src mem) 5245 // cond: t.Alignment()%2 == 0 5246 // result: (MOVHstore dst (MOVHload src mem) mem) 5247 for { 5248 if auxIntToInt64(v.AuxInt) != 2 { 5249 break 5250 } 5251 t := auxToType(v.Aux) 5252 dst := v_0 5253 src := v_1 5254 mem := v_2 5255 if !(t.Alignment()%2 == 0) { 5256 break 5257 } 5258 v.reset(OpLOONG64MOVHstore) 5259 v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5260 v0.AddArg2(src, mem) 5261 v.AddArg3(dst, v0, mem) 5262 return true 5263 } 5264 // match: (Move [2] dst src mem) 5265 // result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)) 5266 for { 5267 if auxIntToInt64(v.AuxInt) != 2 { 5268 break 5269 } 5270 dst := v_0 5271 src := v_1 5272 mem := v_2 5273 v.reset(OpLOONG64MOVBstore) 5274 v.AuxInt = int32ToAuxInt(1) 5275 v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5276 v0.AuxInt = int32ToAuxInt(1) 5277 v0.AddArg2(src, mem) 5278 v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 5279 v2 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5280 v2.AddArg2(src, mem) 5281 v1.AddArg3(dst, v2, mem) 5282 v.AddArg3(dst, v0, v1) 5283 return true 5284 } 5285 // match: (Move [4] {t} dst src mem) 5286 // cond: t.Alignment()%4 == 0 5287 // result: (MOVWstore dst (MOVWload src mem) mem) 5288 for { 5289 if auxIntToInt64(v.AuxInt) != 4 { 5290 break 5291 } 5292 t := auxToType(v.Aux) 5293 dst := v_0 5294 src := v_1 5295 mem := v_2 5296 if !(t.Alignment()%4 == 0) { 5297 break 5298 } 5299 v.reset(OpLOONG64MOVWstore) 5300 v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32) 5301 v0.AddArg2(src, mem) 5302 v.AddArg3(dst, v0, mem) 5303 return true 5304 } 5305 // match: (Move [4] {t} dst src mem) 5306 // cond: t.Alignment()%2 == 0 5307 // result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)) 5308 for { 5309 if auxIntToInt64(v.AuxInt) != 4 { 5310 break 5311 } 5312 t := auxToType(v.Aux) 5313 dst := v_0 5314 src := v_1 5315 mem := v_2 5316 if !(t.Alignment()%2 == 0) { 5317 break 5318 } 5319 v.reset(OpLOONG64MOVHstore) 5320 v.AuxInt = int32ToAuxInt(2) 5321 v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5322 v0.AuxInt = int32ToAuxInt(2) 5323 v0.AddArg2(src, mem) 5324 v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 5325 v2 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5326 v2.AddArg2(src, mem) 5327 v1.AddArg3(dst, v2, mem) 5328 v.AddArg3(dst, v0, v1) 5329 return true 5330 } 5331 // match: (Move [4] dst src mem) 5332 // result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))) 5333 for { 5334 if auxIntToInt64(v.AuxInt) != 4 { 5335 break 5336 } 5337 dst := v_0 5338 src := v_1 5339 mem := v_2 5340 v.reset(OpLOONG64MOVBstore) 5341 v.AuxInt = int32ToAuxInt(3) 5342 v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5343 v0.AuxInt = int32ToAuxInt(3) 5344 v0.AddArg2(src, mem) 5345 v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 5346 v1.AuxInt = int32ToAuxInt(2) 5347 v2 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5348 v2.AuxInt = int32ToAuxInt(2) 5349 v2.AddArg2(src, mem) 5350 v3 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 5351 v3.AuxInt = int32ToAuxInt(1) 5352 v4 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5353 v4.AuxInt = int32ToAuxInt(1) 5354 v4.AddArg2(src, mem) 5355 v5 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 5356 v6 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5357 v6.AddArg2(src, mem) 5358 v5.AddArg3(dst, v6, mem) 5359 v3.AddArg3(dst, v4, v5) 5360 v1.AddArg3(dst, v2, v3) 5361 v.AddArg3(dst, v0, v1) 5362 return true 5363 } 5364 // match: (Move [8] {t} dst src mem) 5365 // cond: t.Alignment()%8 == 0 5366 // result: (MOVVstore dst (MOVVload src mem) mem) 5367 for { 5368 if auxIntToInt64(v.AuxInt) != 8 { 5369 break 5370 } 5371 t := auxToType(v.Aux) 5372 dst := v_0 5373 src := v_1 5374 mem := v_2 5375 if !(t.Alignment()%8 == 0) { 5376 break 5377 } 5378 v.reset(OpLOONG64MOVVstore) 5379 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64) 5380 v0.AddArg2(src, mem) 5381 v.AddArg3(dst, v0, mem) 5382 return true 5383 } 5384 // match: (Move [8] {t} dst src mem) 5385 // cond: t.Alignment()%4 == 0 5386 // result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)) 5387 for { 5388 if auxIntToInt64(v.AuxInt) != 8 { 5389 break 5390 } 5391 t := auxToType(v.Aux) 5392 dst := v_0 5393 src := v_1 5394 mem := v_2 5395 if !(t.Alignment()%4 == 0) { 5396 break 5397 } 5398 v.reset(OpLOONG64MOVWstore) 5399 v.AuxInt = int32ToAuxInt(4) 5400 v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32) 5401 v0.AuxInt = int32ToAuxInt(4) 5402 v0.AddArg2(src, mem) 5403 v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem) 5404 v2 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32) 5405 v2.AddArg2(src, mem) 5406 v1.AddArg3(dst, v2, mem) 5407 v.AddArg3(dst, v0, v1) 5408 return true 5409 } 5410 // match: (Move [8] {t} dst src mem) 5411 // cond: t.Alignment()%2 == 0 5412 // result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))) 5413 for { 5414 if auxIntToInt64(v.AuxInt) != 8 { 5415 break 5416 } 5417 t := auxToType(v.Aux) 5418 dst := v_0 5419 src := v_1 5420 mem := v_2 5421 if !(t.Alignment()%2 == 0) { 5422 break 5423 } 5424 v.reset(OpLOONG64MOVHstore) 5425 v.AuxInt = int32ToAuxInt(6) 5426 v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5427 v0.AuxInt = int32ToAuxInt(6) 5428 v0.AddArg2(src, mem) 5429 v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 5430 v1.AuxInt = int32ToAuxInt(4) 5431 v2 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5432 v2.AuxInt = int32ToAuxInt(4) 5433 v2.AddArg2(src, mem) 5434 v3 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 5435 v3.AuxInt = int32ToAuxInt(2) 5436 v4 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5437 v4.AuxInt = int32ToAuxInt(2) 5438 v4.AddArg2(src, mem) 5439 v5 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 5440 v6 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5441 v6.AddArg2(src, mem) 5442 v5.AddArg3(dst, v6, mem) 5443 v3.AddArg3(dst, v4, v5) 5444 v1.AddArg3(dst, v2, v3) 5445 v.AddArg3(dst, v0, v1) 5446 return true 5447 } 5448 // match: (Move [3] dst src mem) 5449 // result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))) 5450 for { 5451 if auxIntToInt64(v.AuxInt) != 3 { 5452 break 5453 } 5454 dst := v_0 5455 src := v_1 5456 mem := v_2 5457 v.reset(OpLOONG64MOVBstore) 5458 v.AuxInt = int32ToAuxInt(2) 5459 v0 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5460 v0.AuxInt = int32ToAuxInt(2) 5461 v0.AddArg2(src, mem) 5462 v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 5463 v1.AuxInt = int32ToAuxInt(1) 5464 v2 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5465 v2.AuxInt = int32ToAuxInt(1) 5466 v2.AddArg2(src, mem) 5467 v3 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 5468 v4 := b.NewValue0(v.Pos, OpLOONG64MOVBload, typ.Int8) 5469 v4.AddArg2(src, mem) 5470 v3.AddArg3(dst, v4, mem) 5471 v1.AddArg3(dst, v2, v3) 5472 v.AddArg3(dst, v0, v1) 5473 return true 5474 } 5475 // match: (Move [6] {t} dst src mem) 5476 // cond: t.Alignment()%2 == 0 5477 // result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))) 5478 for { 5479 if auxIntToInt64(v.AuxInt) != 6 { 5480 break 5481 } 5482 t := auxToType(v.Aux) 5483 dst := v_0 5484 src := v_1 5485 mem := v_2 5486 if !(t.Alignment()%2 == 0) { 5487 break 5488 } 5489 v.reset(OpLOONG64MOVHstore) 5490 v.AuxInt = int32ToAuxInt(4) 5491 v0 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5492 v0.AuxInt = int32ToAuxInt(4) 5493 v0.AddArg2(src, mem) 5494 v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 5495 v1.AuxInt = int32ToAuxInt(2) 5496 v2 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5497 v2.AuxInt = int32ToAuxInt(2) 5498 v2.AddArg2(src, mem) 5499 v3 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 5500 v4 := b.NewValue0(v.Pos, OpLOONG64MOVHload, typ.Int16) 5501 v4.AddArg2(src, mem) 5502 v3.AddArg3(dst, v4, mem) 5503 v1.AddArg3(dst, v2, v3) 5504 v.AddArg3(dst, v0, v1) 5505 return true 5506 } 5507 // match: (Move [12] {t} dst src mem) 5508 // cond: t.Alignment()%4 == 0 5509 // result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))) 5510 for { 5511 if auxIntToInt64(v.AuxInt) != 12 { 5512 break 5513 } 5514 t := auxToType(v.Aux) 5515 dst := v_0 5516 src := v_1 5517 mem := v_2 5518 if !(t.Alignment()%4 == 0) { 5519 break 5520 } 5521 v.reset(OpLOONG64MOVWstore) 5522 v.AuxInt = int32ToAuxInt(8) 5523 v0 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32) 5524 v0.AuxInt = int32ToAuxInt(8) 5525 v0.AddArg2(src, mem) 5526 v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem) 5527 v1.AuxInt = int32ToAuxInt(4) 5528 v2 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32) 5529 v2.AuxInt = int32ToAuxInt(4) 5530 v2.AddArg2(src, mem) 5531 v3 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem) 5532 v4 := b.NewValue0(v.Pos, OpLOONG64MOVWload, typ.Int32) 5533 v4.AddArg2(src, mem) 5534 v3.AddArg3(dst, v4, mem) 5535 v1.AddArg3(dst, v2, v3) 5536 v.AddArg3(dst, v0, v1) 5537 return true 5538 } 5539 // match: (Move [16] {t} dst src mem) 5540 // cond: t.Alignment()%8 == 0 5541 // result: (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem)) 5542 for { 5543 if auxIntToInt64(v.AuxInt) != 16 { 5544 break 5545 } 5546 t := auxToType(v.Aux) 5547 dst := v_0 5548 src := v_1 5549 mem := v_2 5550 if !(t.Alignment()%8 == 0) { 5551 break 5552 } 5553 v.reset(OpLOONG64MOVVstore) 5554 v.AuxInt = int32ToAuxInt(8) 5555 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64) 5556 v0.AuxInt = int32ToAuxInt(8) 5557 v0.AddArg2(src, mem) 5558 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem) 5559 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64) 5560 v2.AddArg2(src, mem) 5561 v1.AddArg3(dst, v2, mem) 5562 v.AddArg3(dst, v0, v1) 5563 return true 5564 } 5565 // match: (Move [24] {t} dst src mem) 5566 // cond: t.Alignment()%8 == 0 5567 // result: (MOVVstore [16] dst (MOVVload [16] src mem) (MOVVstore [8] dst (MOVVload [8] src mem) (MOVVstore dst (MOVVload src mem) mem))) 5568 for { 5569 if auxIntToInt64(v.AuxInt) != 24 { 5570 break 5571 } 5572 t := auxToType(v.Aux) 5573 dst := v_0 5574 src := v_1 5575 mem := v_2 5576 if !(t.Alignment()%8 == 0) { 5577 break 5578 } 5579 v.reset(OpLOONG64MOVVstore) 5580 v.AuxInt = int32ToAuxInt(16) 5581 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64) 5582 v0.AuxInt = int32ToAuxInt(16) 5583 v0.AddArg2(src, mem) 5584 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem) 5585 v1.AuxInt = int32ToAuxInt(8) 5586 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64) 5587 v2.AuxInt = int32ToAuxInt(8) 5588 v2.AddArg2(src, mem) 5589 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem) 5590 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVload, typ.UInt64) 5591 v4.AddArg2(src, mem) 5592 v3.AddArg3(dst, v4, mem) 5593 v1.AddArg3(dst, v2, v3) 5594 v.AddArg3(dst, v0, v1) 5595 return true 5596 } 5597 // match: (Move [s] {t} dst src mem) 5598 // cond: s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s) 5599 // result: (DUFFCOPY [16 * (128 - s/8)] dst src mem) 5600 for { 5601 s := auxIntToInt64(v.AuxInt) 5602 t := auxToType(v.Aux) 5603 dst := v_0 5604 src := v_1 5605 mem := v_2 5606 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) { 5607 break 5608 } 5609 v.reset(OpLOONG64DUFFCOPY) 5610 v.AuxInt = int64ToAuxInt(16 * (128 - s/8)) 5611 v.AddArg3(dst, src, mem) 5612 return true 5613 } 5614 // match: (Move [s] {t} dst src mem) 5615 // cond: s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0 5616 // result: (LoweredMove [t.Alignment()] dst src (ADDVconst <src.Type> src [s-moveSize(t.Alignment(), config)]) mem) 5617 for { 5618 s := auxIntToInt64(v.AuxInt) 5619 t := auxToType(v.Aux) 5620 dst := v_0 5621 src := v_1 5622 mem := v_2 5623 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) { 5624 break 5625 } 5626 v.reset(OpLOONG64LoweredMove) 5627 v.AuxInt = int64ToAuxInt(t.Alignment()) 5628 v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, src.Type) 5629 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config)) 5630 v0.AddArg(src) 5631 v.AddArg4(dst, src, v0, mem) 5632 return true 5633 } 5634 return false 5635 } 5636 func rewriteValueLOONG64_OpMul16(v *Value) bool { 5637 v_1 := v.Args[1] 5638 v_0 := v.Args[0] 5639 b := v.Block 5640 typ := &b.Func.Config.Types 5641 // match: (Mul16 x y) 5642 // result: (Select1 (MULVU x y)) 5643 for { 5644 x := v_0 5645 y := v_1 5646 v.reset(OpSelect1) 5647 v0 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5648 v0.AddArg2(x, y) 5649 v.AddArg(v0) 5650 return true 5651 } 5652 } 5653 func rewriteValueLOONG64_OpMul32(v *Value) bool { 5654 v_1 := v.Args[1] 5655 v_0 := v.Args[0] 5656 b := v.Block 5657 typ := &b.Func.Config.Types 5658 // match: (Mul32 x y) 5659 // result: (Select1 (MULVU x y)) 5660 for { 5661 x := v_0 5662 y := v_1 5663 v.reset(OpSelect1) 5664 v0 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5665 v0.AddArg2(x, y) 5666 v.AddArg(v0) 5667 return true 5668 } 5669 } 5670 func rewriteValueLOONG64_OpMul64(v *Value) bool { 5671 v_1 := v.Args[1] 5672 v_0 := v.Args[0] 5673 b := v.Block 5674 typ := &b.Func.Config.Types 5675 // match: (Mul64 x y) 5676 // result: (Select1 (MULVU x y)) 5677 for { 5678 x := v_0 5679 y := v_1 5680 v.reset(OpSelect1) 5681 v0 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5682 v0.AddArg2(x, y) 5683 v.AddArg(v0) 5684 return true 5685 } 5686 } 5687 func rewriteValueLOONG64_OpMul8(v *Value) bool { 5688 v_1 := v.Args[1] 5689 v_0 := v.Args[0] 5690 b := v.Block 5691 typ := &b.Func.Config.Types 5692 // match: (Mul8 x y) 5693 // result: (Select1 (MULVU x y)) 5694 for { 5695 x := v_0 5696 y := v_1 5697 v.reset(OpSelect1) 5698 v0 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 5699 v0.AddArg2(x, y) 5700 v.AddArg(v0) 5701 return true 5702 } 5703 } 5704 func rewriteValueLOONG64_OpNeq16(v *Value) bool { 5705 v_1 := v.Args[1] 5706 v_0 := v.Args[0] 5707 b := v.Block 5708 typ := &b.Func.Config.Types 5709 // match: (Neq16 x y) 5710 // result: (SGTU (XOR (ZeroExt16to32 x) (ZeroExt16to64 y)) (MOVVconst [0])) 5711 for { 5712 x := v_0 5713 y := v_1 5714 v.reset(OpLOONG64SGTU) 5715 v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 5716 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32) 5717 v1.AddArg(x) 5718 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 5719 v2.AddArg(y) 5720 v0.AddArg2(v1, v2) 5721 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5722 v3.AuxInt = int64ToAuxInt(0) 5723 v.AddArg2(v0, v3) 5724 return true 5725 } 5726 } 5727 func rewriteValueLOONG64_OpNeq32(v *Value) bool { 5728 v_1 := v.Args[1] 5729 v_0 := v.Args[0] 5730 b := v.Block 5731 typ := &b.Func.Config.Types 5732 // match: (Neq32 x y) 5733 // result: (SGTU (XOR (ZeroExt32to64 x) (ZeroExt32to64 y)) (MOVVconst [0])) 5734 for { 5735 x := v_0 5736 y := v_1 5737 v.reset(OpLOONG64SGTU) 5738 v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 5739 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 5740 v1.AddArg(x) 5741 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 5742 v2.AddArg(y) 5743 v0.AddArg2(v1, v2) 5744 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5745 v3.AuxInt = int64ToAuxInt(0) 5746 v.AddArg2(v0, v3) 5747 return true 5748 } 5749 } 5750 func rewriteValueLOONG64_OpNeq32F(v *Value) bool { 5751 v_1 := v.Args[1] 5752 v_0 := v.Args[0] 5753 b := v.Block 5754 // match: (Neq32F x y) 5755 // result: (FPFlagFalse (CMPEQF x y)) 5756 for { 5757 x := v_0 5758 y := v_1 5759 v.reset(OpLOONG64FPFlagFalse) 5760 v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQF, types.TypeFlags) 5761 v0.AddArg2(x, y) 5762 v.AddArg(v0) 5763 return true 5764 } 5765 } 5766 func rewriteValueLOONG64_OpNeq64(v *Value) bool { 5767 v_1 := v.Args[1] 5768 v_0 := v.Args[0] 5769 b := v.Block 5770 typ := &b.Func.Config.Types 5771 // match: (Neq64 x y) 5772 // result: (SGTU (XOR x y) (MOVVconst [0])) 5773 for { 5774 x := v_0 5775 y := v_1 5776 v.reset(OpLOONG64SGTU) 5777 v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 5778 v0.AddArg2(x, y) 5779 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5780 v1.AuxInt = int64ToAuxInt(0) 5781 v.AddArg2(v0, v1) 5782 return true 5783 } 5784 } 5785 func rewriteValueLOONG64_OpNeq64F(v *Value) bool { 5786 v_1 := v.Args[1] 5787 v_0 := v.Args[0] 5788 b := v.Block 5789 // match: (Neq64F x y) 5790 // result: (FPFlagFalse (CMPEQD x y)) 5791 for { 5792 x := v_0 5793 y := v_1 5794 v.reset(OpLOONG64FPFlagFalse) 5795 v0 := b.NewValue0(v.Pos, OpLOONG64CMPEQD, types.TypeFlags) 5796 v0.AddArg2(x, y) 5797 v.AddArg(v0) 5798 return true 5799 } 5800 } 5801 func rewriteValueLOONG64_OpNeq8(v *Value) bool { 5802 v_1 := v.Args[1] 5803 v_0 := v.Args[0] 5804 b := v.Block 5805 typ := &b.Func.Config.Types 5806 // match: (Neq8 x y) 5807 // result: (SGTU (XOR (ZeroExt8to64 x) (ZeroExt8to64 y)) (MOVVconst [0])) 5808 for { 5809 x := v_0 5810 y := v_1 5811 v.reset(OpLOONG64SGTU) 5812 v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 5813 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 5814 v1.AddArg(x) 5815 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 5816 v2.AddArg(y) 5817 v0.AddArg2(v1, v2) 5818 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5819 v3.AuxInt = int64ToAuxInt(0) 5820 v.AddArg2(v0, v3) 5821 return true 5822 } 5823 } 5824 func rewriteValueLOONG64_OpNeqPtr(v *Value) bool { 5825 v_1 := v.Args[1] 5826 v_0 := v.Args[0] 5827 b := v.Block 5828 typ := &b.Func.Config.Types 5829 // match: (NeqPtr x y) 5830 // result: (SGTU (XOR x y) (MOVVconst [0])) 5831 for { 5832 x := v_0 5833 y := v_1 5834 v.reset(OpLOONG64SGTU) 5835 v0 := b.NewValue0(v.Pos, OpLOONG64XOR, typ.UInt64) 5836 v0.AddArg2(x, y) 5837 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5838 v1.AuxInt = int64ToAuxInt(0) 5839 v.AddArg2(v0, v1) 5840 return true 5841 } 5842 } 5843 func rewriteValueLOONG64_OpNot(v *Value) bool { 5844 v_0 := v.Args[0] 5845 // match: (Not x) 5846 // result: (XORconst [1] x) 5847 for { 5848 x := v_0 5849 v.reset(OpLOONG64XORconst) 5850 v.AuxInt = int64ToAuxInt(1) 5851 v.AddArg(x) 5852 return true 5853 } 5854 } 5855 func rewriteValueLOONG64_OpOffPtr(v *Value) bool { 5856 v_0 := v.Args[0] 5857 // match: (OffPtr [off] ptr:(SP)) 5858 // result: (MOVVaddr [int32(off)] ptr) 5859 for { 5860 off := auxIntToInt64(v.AuxInt) 5861 ptr := v_0 5862 if ptr.Op != OpSP { 5863 break 5864 } 5865 v.reset(OpLOONG64MOVVaddr) 5866 v.AuxInt = int32ToAuxInt(int32(off)) 5867 v.AddArg(ptr) 5868 return true 5869 } 5870 // match: (OffPtr [off] ptr) 5871 // result: (ADDVconst [off] ptr) 5872 for { 5873 off := auxIntToInt64(v.AuxInt) 5874 ptr := v_0 5875 v.reset(OpLOONG64ADDVconst) 5876 v.AuxInt = int64ToAuxInt(off) 5877 v.AddArg(ptr) 5878 return true 5879 } 5880 } 5881 func rewriteValueLOONG64_OpPanicBounds(v *Value) bool { 5882 v_2 := v.Args[2] 5883 v_1 := v.Args[1] 5884 v_0 := v.Args[0] 5885 // match: (PanicBounds [kind] x y mem) 5886 // cond: boundsABI(kind) == 0 5887 // result: (LoweredPanicBoundsA [kind] x y mem) 5888 for { 5889 kind := auxIntToInt64(v.AuxInt) 5890 x := v_0 5891 y := v_1 5892 mem := v_2 5893 if !(boundsABI(kind) == 0) { 5894 break 5895 } 5896 v.reset(OpLOONG64LoweredPanicBoundsA) 5897 v.AuxInt = int64ToAuxInt(kind) 5898 v.AddArg3(x, y, mem) 5899 return true 5900 } 5901 // match: (PanicBounds [kind] x y mem) 5902 // cond: boundsABI(kind) == 1 5903 // result: (LoweredPanicBoundsB [kind] x y mem) 5904 for { 5905 kind := auxIntToInt64(v.AuxInt) 5906 x := v_0 5907 y := v_1 5908 mem := v_2 5909 if !(boundsABI(kind) == 1) { 5910 break 5911 } 5912 v.reset(OpLOONG64LoweredPanicBoundsB) 5913 v.AuxInt = int64ToAuxInt(kind) 5914 v.AddArg3(x, y, mem) 5915 return true 5916 } 5917 // match: (PanicBounds [kind] x y mem) 5918 // cond: boundsABI(kind) == 2 5919 // result: (LoweredPanicBoundsC [kind] x y mem) 5920 for { 5921 kind := auxIntToInt64(v.AuxInt) 5922 x := v_0 5923 y := v_1 5924 mem := v_2 5925 if !(boundsABI(kind) == 2) { 5926 break 5927 } 5928 v.reset(OpLOONG64LoweredPanicBoundsC) 5929 v.AuxInt = int64ToAuxInt(kind) 5930 v.AddArg3(x, y, mem) 5931 return true 5932 } 5933 return false 5934 } 5935 func rewriteValueLOONG64_OpRotateLeft16(v *Value) bool { 5936 v_1 := v.Args[1] 5937 v_0 := v.Args[0] 5938 b := v.Block 5939 typ := &b.Func.Config.Types 5940 // match: (RotateLeft16 <t> x (MOVVconst [c])) 5941 // result: (Or16 (Lsh16x64 <t> x (MOVVconst [c&15])) (Rsh16Ux64 <t> x (MOVVconst [-c&15]))) 5942 for { 5943 t := v.Type 5944 x := v_0 5945 if v_1.Op != OpLOONG64MOVVconst { 5946 break 5947 } 5948 c := auxIntToInt64(v_1.AuxInt) 5949 v.reset(OpOr16) 5950 v0 := b.NewValue0(v.Pos, OpLsh16x64, t) 5951 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5952 v1.AuxInt = int64ToAuxInt(c & 15) 5953 v0.AddArg2(x, v1) 5954 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t) 5955 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 5956 v3.AuxInt = int64ToAuxInt(-c & 15) 5957 v2.AddArg2(x, v3) 5958 v.AddArg2(v0, v2) 5959 return true 5960 } 5961 return false 5962 } 5963 func rewriteValueLOONG64_OpRotateLeft32(v *Value) bool { 5964 v_1 := v.Args[1] 5965 v_0 := v.Args[0] 5966 b := v.Block 5967 // match: (RotateLeft32 x y) 5968 // result: (ROTR x (NEGV <y.Type> y)) 5969 for { 5970 x := v_0 5971 y := v_1 5972 v.reset(OpLOONG64ROTR) 5973 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type) 5974 v0.AddArg(y) 5975 v.AddArg2(x, v0) 5976 return true 5977 } 5978 } 5979 func rewriteValueLOONG64_OpRotateLeft64(v *Value) bool { 5980 v_1 := v.Args[1] 5981 v_0 := v.Args[0] 5982 b := v.Block 5983 // match: (RotateLeft64 x y) 5984 // result: (ROTRV x (NEGV <y.Type> y)) 5985 for { 5986 x := v_0 5987 y := v_1 5988 v.reset(OpLOONG64ROTRV) 5989 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, y.Type) 5990 v0.AddArg(y) 5991 v.AddArg2(x, v0) 5992 return true 5993 } 5994 } 5995 func rewriteValueLOONG64_OpRotateLeft8(v *Value) bool { 5996 v_1 := v.Args[1] 5997 v_0 := v.Args[0] 5998 b := v.Block 5999 typ := &b.Func.Config.Types 6000 // match: (RotateLeft8 <t> x (MOVVconst [c])) 6001 // result: (Or8 (Lsh8x64 <t> x (MOVVconst [c&7])) (Rsh8Ux64 <t> x (MOVVconst [-c&7]))) 6002 for { 6003 t := v.Type 6004 x := v_0 6005 if v_1.Op != OpLOONG64MOVVconst { 6006 break 6007 } 6008 c := auxIntToInt64(v_1.AuxInt) 6009 v.reset(OpOr8) 6010 v0 := b.NewValue0(v.Pos, OpLsh8x64, t) 6011 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6012 v1.AuxInt = int64ToAuxInt(c & 7) 6013 v0.AddArg2(x, v1) 6014 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t) 6015 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6016 v3.AuxInt = int64ToAuxInt(-c & 7) 6017 v2.AddArg2(x, v3) 6018 v.AddArg2(v0, v2) 6019 return true 6020 } 6021 return false 6022 } 6023 func rewriteValueLOONG64_OpRsh16Ux16(v *Value) bool { 6024 v_1 := v.Args[1] 6025 v_0 := v.Args[0] 6026 b := v.Block 6027 typ := &b.Func.Config.Types 6028 // match: (Rsh16Ux16 <t> x y) 6029 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt16to64 y))) 6030 for { 6031 t := v.Type 6032 x := v_0 6033 y := v_1 6034 v.reset(OpLOONG64AND) 6035 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6036 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6037 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6038 v2.AuxInt = int64ToAuxInt(64) 6039 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6040 v3.AddArg(y) 6041 v1.AddArg2(v2, v3) 6042 v0.AddArg(v1) 6043 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6044 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6045 v5.AddArg(x) 6046 v4.AddArg2(v5, v3) 6047 v.AddArg2(v0, v4) 6048 return true 6049 } 6050 } 6051 func rewriteValueLOONG64_OpRsh16Ux32(v *Value) bool { 6052 v_1 := v.Args[1] 6053 v_0 := v.Args[0] 6054 b := v.Block 6055 typ := &b.Func.Config.Types 6056 // match: (Rsh16Ux32 <t> x y) 6057 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt32to64 y))) 6058 for { 6059 t := v.Type 6060 x := v_0 6061 y := v_1 6062 v.reset(OpLOONG64AND) 6063 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6064 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6065 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6066 v2.AuxInt = int64ToAuxInt(64) 6067 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6068 v3.AddArg(y) 6069 v1.AddArg2(v2, v3) 6070 v0.AddArg(v1) 6071 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6072 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6073 v5.AddArg(x) 6074 v4.AddArg2(v5, v3) 6075 v.AddArg2(v0, v4) 6076 return true 6077 } 6078 } 6079 func rewriteValueLOONG64_OpRsh16Ux64(v *Value) bool { 6080 v_1 := v.Args[1] 6081 v_0 := v.Args[0] 6082 b := v.Block 6083 typ := &b.Func.Config.Types 6084 // match: (Rsh16Ux64 <t> x y) 6085 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt16to64 x) y)) 6086 for { 6087 t := v.Type 6088 x := v_0 6089 y := v_1 6090 v.reset(OpLOONG64AND) 6091 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6092 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6093 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6094 v2.AuxInt = int64ToAuxInt(64) 6095 v1.AddArg2(v2, y) 6096 v0.AddArg(v1) 6097 v3 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6098 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6099 v4.AddArg(x) 6100 v3.AddArg2(v4, y) 6101 v.AddArg2(v0, v3) 6102 return true 6103 } 6104 } 6105 func rewriteValueLOONG64_OpRsh16Ux8(v *Value) bool { 6106 v_1 := v.Args[1] 6107 v_0 := v.Args[0] 6108 b := v.Block 6109 typ := &b.Func.Config.Types 6110 // match: (Rsh16Ux8 <t> x y) 6111 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt16to64 x) (ZeroExt8to64 y))) 6112 for { 6113 t := v.Type 6114 x := v_0 6115 y := v_1 6116 v.reset(OpLOONG64AND) 6117 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6118 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6119 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6120 v2.AuxInt = int64ToAuxInt(64) 6121 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6122 v3.AddArg(y) 6123 v1.AddArg2(v2, v3) 6124 v0.AddArg(v1) 6125 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6126 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6127 v5.AddArg(x) 6128 v4.AddArg2(v5, v3) 6129 v.AddArg2(v0, v4) 6130 return true 6131 } 6132 } 6133 func rewriteValueLOONG64_OpRsh16x16(v *Value) bool { 6134 v_1 := v.Args[1] 6135 v_0 := v.Args[0] 6136 b := v.Block 6137 typ := &b.Func.Config.Types 6138 // match: (Rsh16x16 <t> x y) 6139 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 6140 for { 6141 t := v.Type 6142 x := v_0 6143 y := v_1 6144 v.reset(OpLOONG64SRAV) 6145 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6146 v0.AddArg(x) 6147 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6148 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6149 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6150 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6151 v4.AddArg(y) 6152 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6153 v5.AuxInt = int64ToAuxInt(63) 6154 v3.AddArg2(v4, v5) 6155 v2.AddArg(v3) 6156 v1.AddArg2(v2, v4) 6157 v.AddArg2(v0, v1) 6158 return true 6159 } 6160 } 6161 func rewriteValueLOONG64_OpRsh16x32(v *Value) bool { 6162 v_1 := v.Args[1] 6163 v_0 := v.Args[0] 6164 b := v.Block 6165 typ := &b.Func.Config.Types 6166 // match: (Rsh16x32 <t> x y) 6167 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 6168 for { 6169 t := v.Type 6170 x := v_0 6171 y := v_1 6172 v.reset(OpLOONG64SRAV) 6173 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6174 v0.AddArg(x) 6175 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6176 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6177 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6178 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6179 v4.AddArg(y) 6180 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6181 v5.AuxInt = int64ToAuxInt(63) 6182 v3.AddArg2(v4, v5) 6183 v2.AddArg(v3) 6184 v1.AddArg2(v2, v4) 6185 v.AddArg2(v0, v1) 6186 return true 6187 } 6188 } 6189 func rewriteValueLOONG64_OpRsh16x64(v *Value) bool { 6190 v_1 := v.Args[1] 6191 v_0 := v.Args[0] 6192 b := v.Block 6193 typ := &b.Func.Config.Types 6194 // match: (Rsh16x64 <t> x y) 6195 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 6196 for { 6197 t := v.Type 6198 x := v_0 6199 y := v_1 6200 v.reset(OpLOONG64SRAV) 6201 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6202 v0.AddArg(x) 6203 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6204 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6205 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6206 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6207 v4.AuxInt = int64ToAuxInt(63) 6208 v3.AddArg2(y, v4) 6209 v2.AddArg(v3) 6210 v1.AddArg2(v2, y) 6211 v.AddArg2(v0, v1) 6212 return true 6213 } 6214 } 6215 func rewriteValueLOONG64_OpRsh16x8(v *Value) bool { 6216 v_1 := v.Args[1] 6217 v_0 := v.Args[0] 6218 b := v.Block 6219 typ := &b.Func.Config.Types 6220 // match: (Rsh16x8 <t> x y) 6221 // result: (SRAV (SignExt16to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 6222 for { 6223 t := v.Type 6224 x := v_0 6225 y := v_1 6226 v.reset(OpLOONG64SRAV) 6227 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64) 6228 v0.AddArg(x) 6229 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6230 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6231 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6232 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6233 v4.AddArg(y) 6234 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6235 v5.AuxInt = int64ToAuxInt(63) 6236 v3.AddArg2(v4, v5) 6237 v2.AddArg(v3) 6238 v1.AddArg2(v2, v4) 6239 v.AddArg2(v0, v1) 6240 return true 6241 } 6242 } 6243 func rewriteValueLOONG64_OpRsh32Ux16(v *Value) bool { 6244 v_1 := v.Args[1] 6245 v_0 := v.Args[0] 6246 b := v.Block 6247 typ := &b.Func.Config.Types 6248 // match: (Rsh32Ux16 <t> x y) 6249 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt16to64 y))) 6250 for { 6251 t := v.Type 6252 x := v_0 6253 y := v_1 6254 v.reset(OpLOONG64AND) 6255 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6256 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6257 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6258 v2.AuxInt = int64ToAuxInt(64) 6259 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6260 v3.AddArg(y) 6261 v1.AddArg2(v2, v3) 6262 v0.AddArg(v1) 6263 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6264 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6265 v5.AddArg(x) 6266 v4.AddArg2(v5, v3) 6267 v.AddArg2(v0, v4) 6268 return true 6269 } 6270 } 6271 func rewriteValueLOONG64_OpRsh32Ux32(v *Value) bool { 6272 v_1 := v.Args[1] 6273 v_0 := v.Args[0] 6274 b := v.Block 6275 typ := &b.Func.Config.Types 6276 // match: (Rsh32Ux32 <t> x y) 6277 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt32to64 y))) 6278 for { 6279 t := v.Type 6280 x := v_0 6281 y := v_1 6282 v.reset(OpLOONG64AND) 6283 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6284 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6285 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6286 v2.AuxInt = int64ToAuxInt(64) 6287 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6288 v3.AddArg(y) 6289 v1.AddArg2(v2, v3) 6290 v0.AddArg(v1) 6291 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6292 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6293 v5.AddArg(x) 6294 v4.AddArg2(v5, v3) 6295 v.AddArg2(v0, v4) 6296 return true 6297 } 6298 } 6299 func rewriteValueLOONG64_OpRsh32Ux64(v *Value) bool { 6300 v_1 := v.Args[1] 6301 v_0 := v.Args[0] 6302 b := v.Block 6303 typ := &b.Func.Config.Types 6304 // match: (Rsh32Ux64 <t> x y) 6305 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt32to64 x) y)) 6306 for { 6307 t := v.Type 6308 x := v_0 6309 y := v_1 6310 v.reset(OpLOONG64AND) 6311 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6312 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6313 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6314 v2.AuxInt = int64ToAuxInt(64) 6315 v1.AddArg2(v2, y) 6316 v0.AddArg(v1) 6317 v3 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6318 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6319 v4.AddArg(x) 6320 v3.AddArg2(v4, y) 6321 v.AddArg2(v0, v3) 6322 return true 6323 } 6324 } 6325 func rewriteValueLOONG64_OpRsh32Ux8(v *Value) bool { 6326 v_1 := v.Args[1] 6327 v_0 := v.Args[0] 6328 b := v.Block 6329 typ := &b.Func.Config.Types 6330 // match: (Rsh32Ux8 <t> x y) 6331 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt32to64 x) (ZeroExt8to64 y))) 6332 for { 6333 t := v.Type 6334 x := v_0 6335 y := v_1 6336 v.reset(OpLOONG64AND) 6337 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6338 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6339 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6340 v2.AuxInt = int64ToAuxInt(64) 6341 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6342 v3.AddArg(y) 6343 v1.AddArg2(v2, v3) 6344 v0.AddArg(v1) 6345 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6346 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6347 v5.AddArg(x) 6348 v4.AddArg2(v5, v3) 6349 v.AddArg2(v0, v4) 6350 return true 6351 } 6352 } 6353 func rewriteValueLOONG64_OpRsh32x16(v *Value) bool { 6354 v_1 := v.Args[1] 6355 v_0 := v.Args[0] 6356 b := v.Block 6357 typ := &b.Func.Config.Types 6358 // match: (Rsh32x16 <t> x y) 6359 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 6360 for { 6361 t := v.Type 6362 x := v_0 6363 y := v_1 6364 v.reset(OpLOONG64SRAV) 6365 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6366 v0.AddArg(x) 6367 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6368 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6369 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6370 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6371 v4.AddArg(y) 6372 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6373 v5.AuxInt = int64ToAuxInt(63) 6374 v3.AddArg2(v4, v5) 6375 v2.AddArg(v3) 6376 v1.AddArg2(v2, v4) 6377 v.AddArg2(v0, v1) 6378 return true 6379 } 6380 } 6381 func rewriteValueLOONG64_OpRsh32x32(v *Value) bool { 6382 v_1 := v.Args[1] 6383 v_0 := v.Args[0] 6384 b := v.Block 6385 typ := &b.Func.Config.Types 6386 // match: (Rsh32x32 <t> x y) 6387 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 6388 for { 6389 t := v.Type 6390 x := v_0 6391 y := v_1 6392 v.reset(OpLOONG64SRAV) 6393 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6394 v0.AddArg(x) 6395 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6396 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6397 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6398 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6399 v4.AddArg(y) 6400 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6401 v5.AuxInt = int64ToAuxInt(63) 6402 v3.AddArg2(v4, v5) 6403 v2.AddArg(v3) 6404 v1.AddArg2(v2, v4) 6405 v.AddArg2(v0, v1) 6406 return true 6407 } 6408 } 6409 func rewriteValueLOONG64_OpRsh32x64(v *Value) bool { 6410 v_1 := v.Args[1] 6411 v_0 := v.Args[0] 6412 b := v.Block 6413 typ := &b.Func.Config.Types 6414 // match: (Rsh32x64 <t> x y) 6415 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 6416 for { 6417 t := v.Type 6418 x := v_0 6419 y := v_1 6420 v.reset(OpLOONG64SRAV) 6421 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6422 v0.AddArg(x) 6423 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6424 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6425 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6426 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6427 v4.AuxInt = int64ToAuxInt(63) 6428 v3.AddArg2(y, v4) 6429 v2.AddArg(v3) 6430 v1.AddArg2(v2, y) 6431 v.AddArg2(v0, v1) 6432 return true 6433 } 6434 } 6435 func rewriteValueLOONG64_OpRsh32x8(v *Value) bool { 6436 v_1 := v.Args[1] 6437 v_0 := v.Args[0] 6438 b := v.Block 6439 typ := &b.Func.Config.Types 6440 // match: (Rsh32x8 <t> x y) 6441 // result: (SRAV (SignExt32to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 6442 for { 6443 t := v.Type 6444 x := v_0 6445 y := v_1 6446 v.reset(OpLOONG64SRAV) 6447 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64) 6448 v0.AddArg(x) 6449 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6450 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6451 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6452 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6453 v4.AddArg(y) 6454 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6455 v5.AuxInt = int64ToAuxInt(63) 6456 v3.AddArg2(v4, v5) 6457 v2.AddArg(v3) 6458 v1.AddArg2(v2, v4) 6459 v.AddArg2(v0, v1) 6460 return true 6461 } 6462 } 6463 func rewriteValueLOONG64_OpRsh64Ux16(v *Value) bool { 6464 v_1 := v.Args[1] 6465 v_0 := v.Args[0] 6466 b := v.Block 6467 typ := &b.Func.Config.Types 6468 // match: (Rsh64Ux16 <t> x y) 6469 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> x (ZeroExt16to64 y))) 6470 for { 6471 t := v.Type 6472 x := v_0 6473 y := v_1 6474 v.reset(OpLOONG64AND) 6475 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6476 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6477 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6478 v2.AuxInt = int64ToAuxInt(64) 6479 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6480 v3.AddArg(y) 6481 v1.AddArg2(v2, v3) 6482 v0.AddArg(v1) 6483 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6484 v4.AddArg2(x, v3) 6485 v.AddArg2(v0, v4) 6486 return true 6487 } 6488 } 6489 func rewriteValueLOONG64_OpRsh64Ux32(v *Value) bool { 6490 v_1 := v.Args[1] 6491 v_0 := v.Args[0] 6492 b := v.Block 6493 typ := &b.Func.Config.Types 6494 // match: (Rsh64Ux32 <t> x y) 6495 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> x (ZeroExt32to64 y))) 6496 for { 6497 t := v.Type 6498 x := v_0 6499 y := v_1 6500 v.reset(OpLOONG64AND) 6501 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6502 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6503 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6504 v2.AuxInt = int64ToAuxInt(64) 6505 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6506 v3.AddArg(y) 6507 v1.AddArg2(v2, v3) 6508 v0.AddArg(v1) 6509 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6510 v4.AddArg2(x, v3) 6511 v.AddArg2(v0, v4) 6512 return true 6513 } 6514 } 6515 func rewriteValueLOONG64_OpRsh64Ux64(v *Value) bool { 6516 v_1 := v.Args[1] 6517 v_0 := v.Args[0] 6518 b := v.Block 6519 typ := &b.Func.Config.Types 6520 // match: (Rsh64Ux64 <t> x y) 6521 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> x y)) 6522 for { 6523 t := v.Type 6524 x := v_0 6525 y := v_1 6526 v.reset(OpLOONG64AND) 6527 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6528 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6529 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6530 v2.AuxInt = int64ToAuxInt(64) 6531 v1.AddArg2(v2, y) 6532 v0.AddArg(v1) 6533 v3 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6534 v3.AddArg2(x, y) 6535 v.AddArg2(v0, v3) 6536 return true 6537 } 6538 } 6539 func rewriteValueLOONG64_OpRsh64Ux8(v *Value) bool { 6540 v_1 := v.Args[1] 6541 v_0 := v.Args[0] 6542 b := v.Block 6543 typ := &b.Func.Config.Types 6544 // match: (Rsh64Ux8 <t> x y) 6545 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> x (ZeroExt8to64 y))) 6546 for { 6547 t := v.Type 6548 x := v_0 6549 y := v_1 6550 v.reset(OpLOONG64AND) 6551 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6552 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6553 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6554 v2.AuxInt = int64ToAuxInt(64) 6555 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6556 v3.AddArg(y) 6557 v1.AddArg2(v2, v3) 6558 v0.AddArg(v1) 6559 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6560 v4.AddArg2(x, v3) 6561 v.AddArg2(v0, v4) 6562 return true 6563 } 6564 } 6565 func rewriteValueLOONG64_OpRsh64x16(v *Value) bool { 6566 v_1 := v.Args[1] 6567 v_0 := v.Args[0] 6568 b := v.Block 6569 typ := &b.Func.Config.Types 6570 // match: (Rsh64x16 <t> x y) 6571 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 6572 for { 6573 t := v.Type 6574 x := v_0 6575 y := v_1 6576 v.reset(OpLOONG64SRAV) 6577 v0 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6578 v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6579 v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6580 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6581 v3.AddArg(y) 6582 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6583 v4.AuxInt = int64ToAuxInt(63) 6584 v2.AddArg2(v3, v4) 6585 v1.AddArg(v2) 6586 v0.AddArg2(v1, v3) 6587 v.AddArg2(x, v0) 6588 return true 6589 } 6590 } 6591 func rewriteValueLOONG64_OpRsh64x32(v *Value) bool { 6592 v_1 := v.Args[1] 6593 v_0 := v.Args[0] 6594 b := v.Block 6595 typ := &b.Func.Config.Types 6596 // match: (Rsh64x32 <t> x y) 6597 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 6598 for { 6599 t := v.Type 6600 x := v_0 6601 y := v_1 6602 v.reset(OpLOONG64SRAV) 6603 v0 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6604 v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6605 v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6606 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6607 v3.AddArg(y) 6608 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6609 v4.AuxInt = int64ToAuxInt(63) 6610 v2.AddArg2(v3, v4) 6611 v1.AddArg(v2) 6612 v0.AddArg2(v1, v3) 6613 v.AddArg2(x, v0) 6614 return true 6615 } 6616 } 6617 func rewriteValueLOONG64_OpRsh64x64(v *Value) bool { 6618 v_1 := v.Args[1] 6619 v_0 := v.Args[0] 6620 b := v.Block 6621 typ := &b.Func.Config.Types 6622 // match: (Rsh64x64 <t> x y) 6623 // result: (SRAV x (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 6624 for { 6625 t := v.Type 6626 x := v_0 6627 y := v_1 6628 v.reset(OpLOONG64SRAV) 6629 v0 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6630 v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6631 v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6632 v3 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6633 v3.AuxInt = int64ToAuxInt(63) 6634 v2.AddArg2(y, v3) 6635 v1.AddArg(v2) 6636 v0.AddArg2(v1, y) 6637 v.AddArg2(x, v0) 6638 return true 6639 } 6640 } 6641 func rewriteValueLOONG64_OpRsh64x8(v *Value) bool { 6642 v_1 := v.Args[1] 6643 v_0 := v.Args[0] 6644 b := v.Block 6645 typ := &b.Func.Config.Types 6646 // match: (Rsh64x8 <t> x y) 6647 // result: (SRAV x (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 6648 for { 6649 t := v.Type 6650 x := v_0 6651 y := v_1 6652 v.reset(OpLOONG64SRAV) 6653 v0 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6654 v1 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6655 v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6656 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6657 v3.AddArg(y) 6658 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6659 v4.AuxInt = int64ToAuxInt(63) 6660 v2.AddArg2(v3, v4) 6661 v1.AddArg(v2) 6662 v0.AddArg2(v1, v3) 6663 v.AddArg2(x, v0) 6664 return true 6665 } 6666 } 6667 func rewriteValueLOONG64_OpRsh8Ux16(v *Value) bool { 6668 v_1 := v.Args[1] 6669 v_0 := v.Args[0] 6670 b := v.Block 6671 typ := &b.Func.Config.Types 6672 // match: (Rsh8Ux16 <t> x y) 6673 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt16to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt16to64 y))) 6674 for { 6675 t := v.Type 6676 x := v_0 6677 y := v_1 6678 v.reset(OpLOONG64AND) 6679 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6680 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6681 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6682 v2.AuxInt = int64ToAuxInt(64) 6683 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6684 v3.AddArg(y) 6685 v1.AddArg2(v2, v3) 6686 v0.AddArg(v1) 6687 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6688 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6689 v5.AddArg(x) 6690 v4.AddArg2(v5, v3) 6691 v.AddArg2(v0, v4) 6692 return true 6693 } 6694 } 6695 func rewriteValueLOONG64_OpRsh8Ux32(v *Value) bool { 6696 v_1 := v.Args[1] 6697 v_0 := v.Args[0] 6698 b := v.Block 6699 typ := &b.Func.Config.Types 6700 // match: (Rsh8Ux32 <t> x y) 6701 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt32to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt32to64 y))) 6702 for { 6703 t := v.Type 6704 x := v_0 6705 y := v_1 6706 v.reset(OpLOONG64AND) 6707 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6708 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6709 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6710 v2.AuxInt = int64ToAuxInt(64) 6711 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6712 v3.AddArg(y) 6713 v1.AddArg2(v2, v3) 6714 v0.AddArg(v1) 6715 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6716 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6717 v5.AddArg(x) 6718 v4.AddArg2(v5, v3) 6719 v.AddArg2(v0, v4) 6720 return true 6721 } 6722 } 6723 func rewriteValueLOONG64_OpRsh8Ux64(v *Value) bool { 6724 v_1 := v.Args[1] 6725 v_0 := v.Args[0] 6726 b := v.Block 6727 typ := &b.Func.Config.Types 6728 // match: (Rsh8Ux64 <t> x y) 6729 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) y)) (SRLV <t> (ZeroExt8to64 x) y)) 6730 for { 6731 t := v.Type 6732 x := v_0 6733 y := v_1 6734 v.reset(OpLOONG64AND) 6735 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6736 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6737 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6738 v2.AuxInt = int64ToAuxInt(64) 6739 v1.AddArg2(v2, y) 6740 v0.AddArg(v1) 6741 v3 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6742 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6743 v4.AddArg(x) 6744 v3.AddArg2(v4, y) 6745 v.AddArg2(v0, v3) 6746 return true 6747 } 6748 } 6749 func rewriteValueLOONG64_OpRsh8Ux8(v *Value) bool { 6750 v_1 := v.Args[1] 6751 v_0 := v.Args[0] 6752 b := v.Block 6753 typ := &b.Func.Config.Types 6754 // match: (Rsh8Ux8 <t> x y) 6755 // result: (AND (NEGV <t> (SGTU (MOVVconst <typ.UInt64> [64]) (ZeroExt8to64 y))) (SRLV <t> (ZeroExt8to64 x) (ZeroExt8to64 y))) 6756 for { 6757 t := v.Type 6758 x := v_0 6759 y := v_1 6760 v.reset(OpLOONG64AND) 6761 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6762 v1 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6763 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6764 v2.AuxInt = int64ToAuxInt(64) 6765 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6766 v3.AddArg(y) 6767 v1.AddArg2(v2, v3) 6768 v0.AddArg(v1) 6769 v4 := b.NewValue0(v.Pos, OpLOONG64SRLV, t) 6770 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6771 v5.AddArg(x) 6772 v4.AddArg2(v5, v3) 6773 v.AddArg2(v0, v4) 6774 return true 6775 } 6776 } 6777 func rewriteValueLOONG64_OpRsh8x16(v *Value) bool { 6778 v_1 := v.Args[1] 6779 v_0 := v.Args[0] 6780 b := v.Block 6781 typ := &b.Func.Config.Types 6782 // match: (Rsh8x16 <t> x y) 6783 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt16to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt16to64 y))) 6784 for { 6785 t := v.Type 6786 x := v_0 6787 y := v_1 6788 v.reset(OpLOONG64SRAV) 6789 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6790 v0.AddArg(x) 6791 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6792 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6793 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6794 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64) 6795 v4.AddArg(y) 6796 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6797 v5.AuxInt = int64ToAuxInt(63) 6798 v3.AddArg2(v4, v5) 6799 v2.AddArg(v3) 6800 v1.AddArg2(v2, v4) 6801 v.AddArg2(v0, v1) 6802 return true 6803 } 6804 } 6805 func rewriteValueLOONG64_OpRsh8x32(v *Value) bool { 6806 v_1 := v.Args[1] 6807 v_0 := v.Args[0] 6808 b := v.Block 6809 typ := &b.Func.Config.Types 6810 // match: (Rsh8x32 <t> x y) 6811 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt32to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt32to64 y))) 6812 for { 6813 t := v.Type 6814 x := v_0 6815 y := v_1 6816 v.reset(OpLOONG64SRAV) 6817 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6818 v0.AddArg(x) 6819 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6820 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6821 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6822 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64) 6823 v4.AddArg(y) 6824 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6825 v5.AuxInt = int64ToAuxInt(63) 6826 v3.AddArg2(v4, v5) 6827 v2.AddArg(v3) 6828 v1.AddArg2(v2, v4) 6829 v.AddArg2(v0, v1) 6830 return true 6831 } 6832 } 6833 func rewriteValueLOONG64_OpRsh8x64(v *Value) bool { 6834 v_1 := v.Args[1] 6835 v_0 := v.Args[0] 6836 b := v.Block 6837 typ := &b.Func.Config.Types 6838 // match: (Rsh8x64 <t> x y) 6839 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU y (MOVVconst <typ.UInt64> [63]))) y)) 6840 for { 6841 t := v.Type 6842 x := v_0 6843 y := v_1 6844 v.reset(OpLOONG64SRAV) 6845 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6846 v0.AddArg(x) 6847 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6848 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6849 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6850 v4 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6851 v4.AuxInt = int64ToAuxInt(63) 6852 v3.AddArg2(y, v4) 6853 v2.AddArg(v3) 6854 v1.AddArg2(v2, y) 6855 v.AddArg2(v0, v1) 6856 return true 6857 } 6858 } 6859 func rewriteValueLOONG64_OpRsh8x8(v *Value) bool { 6860 v_1 := v.Args[1] 6861 v_0 := v.Args[0] 6862 b := v.Block 6863 typ := &b.Func.Config.Types 6864 // match: (Rsh8x8 <t> x y) 6865 // result: (SRAV (SignExt8to64 x) (OR <t> (NEGV <t> (SGTU (ZeroExt8to64 y) (MOVVconst <typ.UInt64> [63]))) (ZeroExt8to64 y))) 6866 for { 6867 t := v.Type 6868 x := v_0 6869 y := v_1 6870 v.reset(OpLOONG64SRAV) 6871 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64) 6872 v0.AddArg(x) 6873 v1 := b.NewValue0(v.Pos, OpLOONG64OR, t) 6874 v2 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 6875 v3 := b.NewValue0(v.Pos, OpLOONG64SGTU, typ.Bool) 6876 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64) 6877 v4.AddArg(y) 6878 v5 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 6879 v5.AuxInt = int64ToAuxInt(63) 6880 v3.AddArg2(v4, v5) 6881 v2.AddArg(v3) 6882 v1.AddArg2(v2, v4) 6883 v.AddArg2(v0, v1) 6884 return true 6885 } 6886 } 6887 func rewriteValueLOONG64_OpSelect0(v *Value) bool { 6888 v_0 := v.Args[0] 6889 b := v.Block 6890 typ := &b.Func.Config.Types 6891 // match: (Select0 (Mul64uover x y)) 6892 // result: (Select1 <typ.UInt64> (MULVU x y)) 6893 for { 6894 if v_0.Op != OpMul64uover { 6895 break 6896 } 6897 y := v_0.Args[1] 6898 x := v_0.Args[0] 6899 v.reset(OpSelect1) 6900 v.Type = typ.UInt64 6901 v0 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 6902 v0.AddArg2(x, y) 6903 v.AddArg(v0) 6904 return true 6905 } 6906 // match: (Select0 <t> (Add64carry x y c)) 6907 // result: (ADDV (ADDV <t> x y) c) 6908 for { 6909 t := v.Type 6910 if v_0.Op != OpAdd64carry { 6911 break 6912 } 6913 c := v_0.Args[2] 6914 x := v_0.Args[0] 6915 y := v_0.Args[1] 6916 v.reset(OpLOONG64ADDV) 6917 v0 := b.NewValue0(v.Pos, OpLOONG64ADDV, t) 6918 v0.AddArg2(x, y) 6919 v.AddArg2(v0, c) 6920 return true 6921 } 6922 // match: (Select0 <t> (Sub64borrow x y c)) 6923 // result: (SUBV (SUBV <t> x y) c) 6924 for { 6925 t := v.Type 6926 if v_0.Op != OpSub64borrow { 6927 break 6928 } 6929 c := v_0.Args[2] 6930 x := v_0.Args[0] 6931 y := v_0.Args[1] 6932 v.reset(OpLOONG64SUBV) 6933 v0 := b.NewValue0(v.Pos, OpLOONG64SUBV, t) 6934 v0.AddArg2(x, y) 6935 v.AddArg2(v0, c) 6936 return true 6937 } 6938 // match: (Select0 (DIVVU _ (MOVVconst [1]))) 6939 // result: (MOVVconst [0]) 6940 for { 6941 if v_0.Op != OpLOONG64DIVVU { 6942 break 6943 } 6944 _ = v_0.Args[1] 6945 v_0_1 := v_0.Args[1] 6946 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 { 6947 break 6948 } 6949 v.reset(OpLOONG64MOVVconst) 6950 v.AuxInt = int64ToAuxInt(0) 6951 return true 6952 } 6953 // match: (Select0 (DIVVU x (MOVVconst [c]))) 6954 // cond: isPowerOfTwo64(c) 6955 // result: (ANDconst [c-1] x) 6956 for { 6957 if v_0.Op != OpLOONG64DIVVU { 6958 break 6959 } 6960 _ = v_0.Args[1] 6961 x := v_0.Args[0] 6962 v_0_1 := v_0.Args[1] 6963 if v_0_1.Op != OpLOONG64MOVVconst { 6964 break 6965 } 6966 c := auxIntToInt64(v_0_1.AuxInt) 6967 if !(isPowerOfTwo64(c)) { 6968 break 6969 } 6970 v.reset(OpLOONG64ANDconst) 6971 v.AuxInt = int64ToAuxInt(c - 1) 6972 v.AddArg(x) 6973 return true 6974 } 6975 // match: (Select0 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 6976 // cond: d != 0 6977 // result: (MOVVconst [c%d]) 6978 for { 6979 if v_0.Op != OpLOONG64DIVV { 6980 break 6981 } 6982 _ = v_0.Args[1] 6983 v_0_0 := v_0.Args[0] 6984 if v_0_0.Op != OpLOONG64MOVVconst { 6985 break 6986 } 6987 c := auxIntToInt64(v_0_0.AuxInt) 6988 v_0_1 := v_0.Args[1] 6989 if v_0_1.Op != OpLOONG64MOVVconst { 6990 break 6991 } 6992 d := auxIntToInt64(v_0_1.AuxInt) 6993 if !(d != 0) { 6994 break 6995 } 6996 v.reset(OpLOONG64MOVVconst) 6997 v.AuxInt = int64ToAuxInt(c % d) 6998 return true 6999 } 7000 // match: (Select0 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 7001 // cond: d != 0 7002 // result: (MOVVconst [int64(uint64(c)%uint64(d))]) 7003 for { 7004 if v_0.Op != OpLOONG64DIVVU { 7005 break 7006 } 7007 _ = v_0.Args[1] 7008 v_0_0 := v_0.Args[0] 7009 if v_0_0.Op != OpLOONG64MOVVconst { 7010 break 7011 } 7012 c := auxIntToInt64(v_0_0.AuxInt) 7013 v_0_1 := v_0.Args[1] 7014 if v_0_1.Op != OpLOONG64MOVVconst { 7015 break 7016 } 7017 d := auxIntToInt64(v_0_1.AuxInt) 7018 if !(d != 0) { 7019 break 7020 } 7021 v.reset(OpLOONG64MOVVconst) 7022 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d))) 7023 return true 7024 } 7025 return false 7026 } 7027 func rewriteValueLOONG64_OpSelect1(v *Value) bool { 7028 v_0 := v.Args[0] 7029 b := v.Block 7030 typ := &b.Func.Config.Types 7031 // match: (Select1 (Mul64uover x y)) 7032 // result: (SGTU <typ.Bool> (Select0 <typ.UInt64> (MULVU x y)) (MOVVconst <typ.UInt64> [0])) 7033 for { 7034 if v_0.Op != OpMul64uover { 7035 break 7036 } 7037 y := v_0.Args[1] 7038 x := v_0.Args[0] 7039 v.reset(OpLOONG64SGTU) 7040 v.Type = typ.Bool 7041 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64) 7042 v1 := b.NewValue0(v.Pos, OpLOONG64MULVU, types.NewTuple(typ.UInt64, typ.UInt64)) 7043 v1.AddArg2(x, y) 7044 v0.AddArg(v1) 7045 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7046 v2.AuxInt = int64ToAuxInt(0) 7047 v.AddArg2(v0, v2) 7048 return true 7049 } 7050 // match: (Select1 <t> (Add64carry x y c)) 7051 // result: (OR (SGTU <t> x s:(ADDV <t> x y)) (SGTU <t> s (ADDV <t> s c))) 7052 for { 7053 t := v.Type 7054 if v_0.Op != OpAdd64carry { 7055 break 7056 } 7057 c := v_0.Args[2] 7058 x := v_0.Args[0] 7059 y := v_0.Args[1] 7060 v.reset(OpLOONG64OR) 7061 v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t) 7062 s := b.NewValue0(v.Pos, OpLOONG64ADDV, t) 7063 s.AddArg2(x, y) 7064 v0.AddArg2(x, s) 7065 v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t) 7066 v3 := b.NewValue0(v.Pos, OpLOONG64ADDV, t) 7067 v3.AddArg2(s, c) 7068 v2.AddArg2(s, v3) 7069 v.AddArg2(v0, v2) 7070 return true 7071 } 7072 // match: (Select1 <t> (Sub64borrow x y c)) 7073 // result: (OR (SGTU <t> s:(SUBV <t> x y) x) (SGTU <t> (SUBV <t> s c) s)) 7074 for { 7075 t := v.Type 7076 if v_0.Op != OpSub64borrow { 7077 break 7078 } 7079 c := v_0.Args[2] 7080 x := v_0.Args[0] 7081 y := v_0.Args[1] 7082 v.reset(OpLOONG64OR) 7083 v0 := b.NewValue0(v.Pos, OpLOONG64SGTU, t) 7084 s := b.NewValue0(v.Pos, OpLOONG64SUBV, t) 7085 s.AddArg2(x, y) 7086 v0.AddArg2(s, x) 7087 v2 := b.NewValue0(v.Pos, OpLOONG64SGTU, t) 7088 v3 := b.NewValue0(v.Pos, OpLOONG64SUBV, t) 7089 v3.AddArg2(s, c) 7090 v2.AddArg2(v3, s) 7091 v.AddArg2(v0, v2) 7092 return true 7093 } 7094 // match: (Select1 (MULVU x (MOVVconst [-1]))) 7095 // result: (NEGV x) 7096 for { 7097 if v_0.Op != OpLOONG64MULVU { 7098 break 7099 } 7100 _ = v_0.Args[1] 7101 v_0_0 := v_0.Args[0] 7102 v_0_1 := v_0.Args[1] 7103 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 7104 x := v_0_0 7105 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 { 7106 continue 7107 } 7108 v.reset(OpLOONG64NEGV) 7109 v.AddArg(x) 7110 return true 7111 } 7112 break 7113 } 7114 // match: (Select1 (MULVU _ (MOVVconst [0]))) 7115 // result: (MOVVconst [0]) 7116 for { 7117 if v_0.Op != OpLOONG64MULVU { 7118 break 7119 } 7120 _ = v_0.Args[1] 7121 v_0_0 := v_0.Args[0] 7122 v_0_1 := v_0.Args[1] 7123 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 7124 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 { 7125 continue 7126 } 7127 v.reset(OpLOONG64MOVVconst) 7128 v.AuxInt = int64ToAuxInt(0) 7129 return true 7130 } 7131 break 7132 } 7133 // match: (Select1 (MULVU x (MOVVconst [1]))) 7134 // result: x 7135 for { 7136 if v_0.Op != OpLOONG64MULVU { 7137 break 7138 } 7139 _ = v_0.Args[1] 7140 v_0_0 := v_0.Args[0] 7141 v_0_1 := v_0.Args[1] 7142 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 7143 x := v_0_0 7144 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 { 7145 continue 7146 } 7147 v.copyOf(x) 7148 return true 7149 } 7150 break 7151 } 7152 // match: (Select1 (MULVU x (MOVVconst [c]))) 7153 // cond: isPowerOfTwo64(c) 7154 // result: (SLLVconst [log64(c)] x) 7155 for { 7156 if v_0.Op != OpLOONG64MULVU { 7157 break 7158 } 7159 _ = v_0.Args[1] 7160 v_0_0 := v_0.Args[0] 7161 v_0_1 := v_0.Args[1] 7162 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 7163 x := v_0_0 7164 if v_0_1.Op != OpLOONG64MOVVconst { 7165 continue 7166 } 7167 c := auxIntToInt64(v_0_1.AuxInt) 7168 if !(isPowerOfTwo64(c)) { 7169 continue 7170 } 7171 v.reset(OpLOONG64SLLVconst) 7172 v.AuxInt = int64ToAuxInt(log64(c)) 7173 v.AddArg(x) 7174 return true 7175 } 7176 break 7177 } 7178 // match: (Select1 (DIVVU x (MOVVconst [1]))) 7179 // result: x 7180 for { 7181 if v_0.Op != OpLOONG64DIVVU { 7182 break 7183 } 7184 _ = v_0.Args[1] 7185 x := v_0.Args[0] 7186 v_0_1 := v_0.Args[1] 7187 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 { 7188 break 7189 } 7190 v.copyOf(x) 7191 return true 7192 } 7193 // match: (Select1 (DIVVU x (MOVVconst [c]))) 7194 // cond: isPowerOfTwo64(c) 7195 // result: (SRLVconst [log64(c)] x) 7196 for { 7197 if v_0.Op != OpLOONG64DIVVU { 7198 break 7199 } 7200 _ = v_0.Args[1] 7201 x := v_0.Args[0] 7202 v_0_1 := v_0.Args[1] 7203 if v_0_1.Op != OpLOONG64MOVVconst { 7204 break 7205 } 7206 c := auxIntToInt64(v_0_1.AuxInt) 7207 if !(isPowerOfTwo64(c)) { 7208 break 7209 } 7210 v.reset(OpLOONG64SRLVconst) 7211 v.AuxInt = int64ToAuxInt(log64(c)) 7212 v.AddArg(x) 7213 return true 7214 } 7215 // match: (Select1 (MULVU (MOVVconst [c]) (MOVVconst [d]))) 7216 // result: (MOVVconst [c*d]) 7217 for { 7218 if v_0.Op != OpLOONG64MULVU { 7219 break 7220 } 7221 _ = v_0.Args[1] 7222 v_0_0 := v_0.Args[0] 7223 v_0_1 := v_0.Args[1] 7224 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 { 7225 if v_0_0.Op != OpLOONG64MOVVconst { 7226 continue 7227 } 7228 c := auxIntToInt64(v_0_0.AuxInt) 7229 if v_0_1.Op != OpLOONG64MOVVconst { 7230 continue 7231 } 7232 d := auxIntToInt64(v_0_1.AuxInt) 7233 v.reset(OpLOONG64MOVVconst) 7234 v.AuxInt = int64ToAuxInt(c * d) 7235 return true 7236 } 7237 break 7238 } 7239 // match: (Select1 (DIVV (MOVVconst [c]) (MOVVconst [d]))) 7240 // cond: d != 0 7241 // result: (MOVVconst [c/d]) 7242 for { 7243 if v_0.Op != OpLOONG64DIVV { 7244 break 7245 } 7246 _ = v_0.Args[1] 7247 v_0_0 := v_0.Args[0] 7248 if v_0_0.Op != OpLOONG64MOVVconst { 7249 break 7250 } 7251 c := auxIntToInt64(v_0_0.AuxInt) 7252 v_0_1 := v_0.Args[1] 7253 if v_0_1.Op != OpLOONG64MOVVconst { 7254 break 7255 } 7256 d := auxIntToInt64(v_0_1.AuxInt) 7257 if !(d != 0) { 7258 break 7259 } 7260 v.reset(OpLOONG64MOVVconst) 7261 v.AuxInt = int64ToAuxInt(c / d) 7262 return true 7263 } 7264 // match: (Select1 (DIVVU (MOVVconst [c]) (MOVVconst [d]))) 7265 // cond: d != 0 7266 // result: (MOVVconst [int64(uint64(c)/uint64(d))]) 7267 for { 7268 if v_0.Op != OpLOONG64DIVVU { 7269 break 7270 } 7271 _ = v_0.Args[1] 7272 v_0_0 := v_0.Args[0] 7273 if v_0_0.Op != OpLOONG64MOVVconst { 7274 break 7275 } 7276 c := auxIntToInt64(v_0_0.AuxInt) 7277 v_0_1 := v_0.Args[1] 7278 if v_0_1.Op != OpLOONG64MOVVconst { 7279 break 7280 } 7281 d := auxIntToInt64(v_0_1.AuxInt) 7282 if !(d != 0) { 7283 break 7284 } 7285 v.reset(OpLOONG64MOVVconst) 7286 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d))) 7287 return true 7288 } 7289 return false 7290 } 7291 func rewriteValueLOONG64_OpSlicemask(v *Value) bool { 7292 v_0 := v.Args[0] 7293 b := v.Block 7294 // match: (Slicemask <t> x) 7295 // result: (SRAVconst (NEGV <t> x) [63]) 7296 for { 7297 t := v.Type 7298 x := v_0 7299 v.reset(OpLOONG64SRAVconst) 7300 v.AuxInt = int64ToAuxInt(63) 7301 v0 := b.NewValue0(v.Pos, OpLOONG64NEGV, t) 7302 v0.AddArg(x) 7303 v.AddArg(v0) 7304 return true 7305 } 7306 } 7307 func rewriteValueLOONG64_OpStore(v *Value) bool { 7308 v_2 := v.Args[2] 7309 v_1 := v.Args[1] 7310 v_0 := v.Args[0] 7311 // match: (Store {t} ptr val mem) 7312 // cond: t.Size() == 1 7313 // result: (MOVBstore ptr val mem) 7314 for { 7315 t := auxToType(v.Aux) 7316 ptr := v_0 7317 val := v_1 7318 mem := v_2 7319 if !(t.Size() == 1) { 7320 break 7321 } 7322 v.reset(OpLOONG64MOVBstore) 7323 v.AddArg3(ptr, val, mem) 7324 return true 7325 } 7326 // match: (Store {t} ptr val mem) 7327 // cond: t.Size() == 2 7328 // result: (MOVHstore ptr val mem) 7329 for { 7330 t := auxToType(v.Aux) 7331 ptr := v_0 7332 val := v_1 7333 mem := v_2 7334 if !(t.Size() == 2) { 7335 break 7336 } 7337 v.reset(OpLOONG64MOVHstore) 7338 v.AddArg3(ptr, val, mem) 7339 return true 7340 } 7341 // match: (Store {t} ptr val mem) 7342 // cond: t.Size() == 4 && !is32BitFloat(val.Type) 7343 // result: (MOVWstore ptr val mem) 7344 for { 7345 t := auxToType(v.Aux) 7346 ptr := v_0 7347 val := v_1 7348 mem := v_2 7349 if !(t.Size() == 4 && !is32BitFloat(val.Type)) { 7350 break 7351 } 7352 v.reset(OpLOONG64MOVWstore) 7353 v.AddArg3(ptr, val, mem) 7354 return true 7355 } 7356 // match: (Store {t} ptr val mem) 7357 // cond: t.Size() == 8 && !is64BitFloat(val.Type) 7358 // result: (MOVVstore ptr val mem) 7359 for { 7360 t := auxToType(v.Aux) 7361 ptr := v_0 7362 val := v_1 7363 mem := v_2 7364 if !(t.Size() == 8 && !is64BitFloat(val.Type)) { 7365 break 7366 } 7367 v.reset(OpLOONG64MOVVstore) 7368 v.AddArg3(ptr, val, mem) 7369 return true 7370 } 7371 // match: (Store {t} ptr val mem) 7372 // cond: t.Size() == 4 && is32BitFloat(val.Type) 7373 // result: (MOVFstore ptr val mem) 7374 for { 7375 t := auxToType(v.Aux) 7376 ptr := v_0 7377 val := v_1 7378 mem := v_2 7379 if !(t.Size() == 4 && is32BitFloat(val.Type)) { 7380 break 7381 } 7382 v.reset(OpLOONG64MOVFstore) 7383 v.AddArg3(ptr, val, mem) 7384 return true 7385 } 7386 // match: (Store {t} ptr val mem) 7387 // cond: t.Size() == 8 && is64BitFloat(val.Type) 7388 // result: (MOVDstore ptr val mem) 7389 for { 7390 t := auxToType(v.Aux) 7391 ptr := v_0 7392 val := v_1 7393 mem := v_2 7394 if !(t.Size() == 8 && is64BitFloat(val.Type)) { 7395 break 7396 } 7397 v.reset(OpLOONG64MOVDstore) 7398 v.AddArg3(ptr, val, mem) 7399 return true 7400 } 7401 return false 7402 } 7403 func rewriteValueLOONG64_OpZero(v *Value) bool { 7404 v_1 := v.Args[1] 7405 v_0 := v.Args[0] 7406 b := v.Block 7407 config := b.Func.Config 7408 typ := &b.Func.Config.Types 7409 // match: (Zero [0] _ mem) 7410 // result: mem 7411 for { 7412 if auxIntToInt64(v.AuxInt) != 0 { 7413 break 7414 } 7415 mem := v_1 7416 v.copyOf(mem) 7417 return true 7418 } 7419 // match: (Zero [1] ptr mem) 7420 // result: (MOVBstore ptr (MOVVconst [0]) mem) 7421 for { 7422 if auxIntToInt64(v.AuxInt) != 1 { 7423 break 7424 } 7425 ptr := v_0 7426 mem := v_1 7427 v.reset(OpLOONG64MOVBstore) 7428 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7429 v0.AuxInt = int64ToAuxInt(0) 7430 v.AddArg3(ptr, v0, mem) 7431 return true 7432 } 7433 // match: (Zero [2] {t} ptr mem) 7434 // cond: t.Alignment()%2 == 0 7435 // result: (MOVHstore ptr (MOVVconst [0]) mem) 7436 for { 7437 if auxIntToInt64(v.AuxInt) != 2 { 7438 break 7439 } 7440 t := auxToType(v.Aux) 7441 ptr := v_0 7442 mem := v_1 7443 if !(t.Alignment()%2 == 0) { 7444 break 7445 } 7446 v.reset(OpLOONG64MOVHstore) 7447 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7448 v0.AuxInt = int64ToAuxInt(0) 7449 v.AddArg3(ptr, v0, mem) 7450 return true 7451 } 7452 // match: (Zero [2] ptr mem) 7453 // result: (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)) 7454 for { 7455 if auxIntToInt64(v.AuxInt) != 2 { 7456 break 7457 } 7458 ptr := v_0 7459 mem := v_1 7460 v.reset(OpLOONG64MOVBstore) 7461 v.AuxInt = int32ToAuxInt(1) 7462 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7463 v0.AuxInt = int64ToAuxInt(0) 7464 v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 7465 v1.AuxInt = int32ToAuxInt(0) 7466 v1.AddArg3(ptr, v0, mem) 7467 v.AddArg3(ptr, v0, v1) 7468 return true 7469 } 7470 // match: (Zero [4] {t} ptr mem) 7471 // cond: t.Alignment()%4 == 0 7472 // result: (MOVWstore ptr (MOVVconst [0]) mem) 7473 for { 7474 if auxIntToInt64(v.AuxInt) != 4 { 7475 break 7476 } 7477 t := auxToType(v.Aux) 7478 ptr := v_0 7479 mem := v_1 7480 if !(t.Alignment()%4 == 0) { 7481 break 7482 } 7483 v.reset(OpLOONG64MOVWstore) 7484 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7485 v0.AuxInt = int64ToAuxInt(0) 7486 v.AddArg3(ptr, v0, mem) 7487 return true 7488 } 7489 // match: (Zero [4] {t} ptr mem) 7490 // cond: t.Alignment()%2 == 0 7491 // result: (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)) 7492 for { 7493 if auxIntToInt64(v.AuxInt) != 4 { 7494 break 7495 } 7496 t := auxToType(v.Aux) 7497 ptr := v_0 7498 mem := v_1 7499 if !(t.Alignment()%2 == 0) { 7500 break 7501 } 7502 v.reset(OpLOONG64MOVHstore) 7503 v.AuxInt = int32ToAuxInt(2) 7504 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7505 v0.AuxInt = int64ToAuxInt(0) 7506 v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 7507 v1.AuxInt = int32ToAuxInt(0) 7508 v1.AddArg3(ptr, v0, mem) 7509 v.AddArg3(ptr, v0, v1) 7510 return true 7511 } 7512 // match: (Zero [4] ptr mem) 7513 // result: (MOVBstore [3] ptr (MOVVconst [0]) (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem)))) 7514 for { 7515 if auxIntToInt64(v.AuxInt) != 4 { 7516 break 7517 } 7518 ptr := v_0 7519 mem := v_1 7520 v.reset(OpLOONG64MOVBstore) 7521 v.AuxInt = int32ToAuxInt(3) 7522 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7523 v0.AuxInt = int64ToAuxInt(0) 7524 v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 7525 v1.AuxInt = int32ToAuxInt(2) 7526 v2 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 7527 v2.AuxInt = int32ToAuxInt(1) 7528 v3 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 7529 v3.AuxInt = int32ToAuxInt(0) 7530 v3.AddArg3(ptr, v0, mem) 7531 v2.AddArg3(ptr, v0, v3) 7532 v1.AddArg3(ptr, v0, v2) 7533 v.AddArg3(ptr, v0, v1) 7534 return true 7535 } 7536 // match: (Zero [8] {t} ptr mem) 7537 // cond: t.Alignment()%8 == 0 7538 // result: (MOVVstore ptr (MOVVconst [0]) mem) 7539 for { 7540 if auxIntToInt64(v.AuxInt) != 8 { 7541 break 7542 } 7543 t := auxToType(v.Aux) 7544 ptr := v_0 7545 mem := v_1 7546 if !(t.Alignment()%8 == 0) { 7547 break 7548 } 7549 v.reset(OpLOONG64MOVVstore) 7550 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7551 v0.AuxInt = int64ToAuxInt(0) 7552 v.AddArg3(ptr, v0, mem) 7553 return true 7554 } 7555 // match: (Zero [8] {t} ptr mem) 7556 // cond: t.Alignment()%4 == 0 7557 // result: (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem)) 7558 for { 7559 if auxIntToInt64(v.AuxInt) != 8 { 7560 break 7561 } 7562 t := auxToType(v.Aux) 7563 ptr := v_0 7564 mem := v_1 7565 if !(t.Alignment()%4 == 0) { 7566 break 7567 } 7568 v.reset(OpLOONG64MOVWstore) 7569 v.AuxInt = int32ToAuxInt(4) 7570 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7571 v0.AuxInt = int64ToAuxInt(0) 7572 v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem) 7573 v1.AuxInt = int32ToAuxInt(0) 7574 v1.AddArg3(ptr, v0, mem) 7575 v.AddArg3(ptr, v0, v1) 7576 return true 7577 } 7578 // match: (Zero [8] {t} ptr mem) 7579 // cond: t.Alignment()%2 == 0 7580 // result: (MOVHstore [6] ptr (MOVVconst [0]) (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem)))) 7581 for { 7582 if auxIntToInt64(v.AuxInt) != 8 { 7583 break 7584 } 7585 t := auxToType(v.Aux) 7586 ptr := v_0 7587 mem := v_1 7588 if !(t.Alignment()%2 == 0) { 7589 break 7590 } 7591 v.reset(OpLOONG64MOVHstore) 7592 v.AuxInt = int32ToAuxInt(6) 7593 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7594 v0.AuxInt = int64ToAuxInt(0) 7595 v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 7596 v1.AuxInt = int32ToAuxInt(4) 7597 v2 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 7598 v2.AuxInt = int32ToAuxInt(2) 7599 v3 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 7600 v3.AuxInt = int32ToAuxInt(0) 7601 v3.AddArg3(ptr, v0, mem) 7602 v2.AddArg3(ptr, v0, v3) 7603 v1.AddArg3(ptr, v0, v2) 7604 v.AddArg3(ptr, v0, v1) 7605 return true 7606 } 7607 // match: (Zero [3] ptr mem) 7608 // result: (MOVBstore [2] ptr (MOVVconst [0]) (MOVBstore [1] ptr (MOVVconst [0]) (MOVBstore [0] ptr (MOVVconst [0]) mem))) 7609 for { 7610 if auxIntToInt64(v.AuxInt) != 3 { 7611 break 7612 } 7613 ptr := v_0 7614 mem := v_1 7615 v.reset(OpLOONG64MOVBstore) 7616 v.AuxInt = int32ToAuxInt(2) 7617 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7618 v0.AuxInt = int64ToAuxInt(0) 7619 v1 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 7620 v1.AuxInt = int32ToAuxInt(1) 7621 v2 := b.NewValue0(v.Pos, OpLOONG64MOVBstore, types.TypeMem) 7622 v2.AuxInt = int32ToAuxInt(0) 7623 v2.AddArg3(ptr, v0, mem) 7624 v1.AddArg3(ptr, v0, v2) 7625 v.AddArg3(ptr, v0, v1) 7626 return true 7627 } 7628 // match: (Zero [6] {t} ptr mem) 7629 // cond: t.Alignment()%2 == 0 7630 // result: (MOVHstore [4] ptr (MOVVconst [0]) (MOVHstore [2] ptr (MOVVconst [0]) (MOVHstore [0] ptr (MOVVconst [0]) mem))) 7631 for { 7632 if auxIntToInt64(v.AuxInt) != 6 { 7633 break 7634 } 7635 t := auxToType(v.Aux) 7636 ptr := v_0 7637 mem := v_1 7638 if !(t.Alignment()%2 == 0) { 7639 break 7640 } 7641 v.reset(OpLOONG64MOVHstore) 7642 v.AuxInt = int32ToAuxInt(4) 7643 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7644 v0.AuxInt = int64ToAuxInt(0) 7645 v1 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 7646 v1.AuxInt = int32ToAuxInt(2) 7647 v2 := b.NewValue0(v.Pos, OpLOONG64MOVHstore, types.TypeMem) 7648 v2.AuxInt = int32ToAuxInt(0) 7649 v2.AddArg3(ptr, v0, mem) 7650 v1.AddArg3(ptr, v0, v2) 7651 v.AddArg3(ptr, v0, v1) 7652 return true 7653 } 7654 // match: (Zero [12] {t} ptr mem) 7655 // cond: t.Alignment()%4 == 0 7656 // result: (MOVWstore [8] ptr (MOVVconst [0]) (MOVWstore [4] ptr (MOVVconst [0]) (MOVWstore [0] ptr (MOVVconst [0]) mem))) 7657 for { 7658 if auxIntToInt64(v.AuxInt) != 12 { 7659 break 7660 } 7661 t := auxToType(v.Aux) 7662 ptr := v_0 7663 mem := v_1 7664 if !(t.Alignment()%4 == 0) { 7665 break 7666 } 7667 v.reset(OpLOONG64MOVWstore) 7668 v.AuxInt = int32ToAuxInt(8) 7669 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7670 v0.AuxInt = int64ToAuxInt(0) 7671 v1 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem) 7672 v1.AuxInt = int32ToAuxInt(4) 7673 v2 := b.NewValue0(v.Pos, OpLOONG64MOVWstore, types.TypeMem) 7674 v2.AuxInt = int32ToAuxInt(0) 7675 v2.AddArg3(ptr, v0, mem) 7676 v1.AddArg3(ptr, v0, v2) 7677 v.AddArg3(ptr, v0, v1) 7678 return true 7679 } 7680 // match: (Zero [16] {t} ptr mem) 7681 // cond: t.Alignment()%8 == 0 7682 // result: (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem)) 7683 for { 7684 if auxIntToInt64(v.AuxInt) != 16 { 7685 break 7686 } 7687 t := auxToType(v.Aux) 7688 ptr := v_0 7689 mem := v_1 7690 if !(t.Alignment()%8 == 0) { 7691 break 7692 } 7693 v.reset(OpLOONG64MOVVstore) 7694 v.AuxInt = int32ToAuxInt(8) 7695 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7696 v0.AuxInt = int64ToAuxInt(0) 7697 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem) 7698 v1.AuxInt = int32ToAuxInt(0) 7699 v1.AddArg3(ptr, v0, mem) 7700 v.AddArg3(ptr, v0, v1) 7701 return true 7702 } 7703 // match: (Zero [24] {t} ptr mem) 7704 // cond: t.Alignment()%8 == 0 7705 // result: (MOVVstore [16] ptr (MOVVconst [0]) (MOVVstore [8] ptr (MOVVconst [0]) (MOVVstore [0] ptr (MOVVconst [0]) mem))) 7706 for { 7707 if auxIntToInt64(v.AuxInt) != 24 { 7708 break 7709 } 7710 t := auxToType(v.Aux) 7711 ptr := v_0 7712 mem := v_1 7713 if !(t.Alignment()%8 == 0) { 7714 break 7715 } 7716 v.reset(OpLOONG64MOVVstore) 7717 v.AuxInt = int32ToAuxInt(16) 7718 v0 := b.NewValue0(v.Pos, OpLOONG64MOVVconst, typ.UInt64) 7719 v0.AuxInt = int64ToAuxInt(0) 7720 v1 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem) 7721 v1.AuxInt = int32ToAuxInt(8) 7722 v2 := b.NewValue0(v.Pos, OpLOONG64MOVVstore, types.TypeMem) 7723 v2.AuxInt = int32ToAuxInt(0) 7724 v2.AddArg3(ptr, v0, mem) 7725 v1.AddArg3(ptr, v0, v2) 7726 v.AddArg3(ptr, v0, v1) 7727 return true 7728 } 7729 // match: (Zero [s] {t} ptr mem) 7730 // cond: s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice 7731 // result: (DUFFZERO [8 * (128 - s/8)] ptr mem) 7732 for { 7733 s := auxIntToInt64(v.AuxInt) 7734 t := auxToType(v.Aux) 7735 ptr := v_0 7736 mem := v_1 7737 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) { 7738 break 7739 } 7740 v.reset(OpLOONG64DUFFZERO) 7741 v.AuxInt = int64ToAuxInt(8 * (128 - s/8)) 7742 v.AddArg2(ptr, mem) 7743 return true 7744 } 7745 // match: (Zero [s] {t} ptr mem) 7746 // cond: (s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0 7747 // result: (LoweredZero [t.Alignment()] ptr (ADDVconst <ptr.Type> ptr [s-moveSize(t.Alignment(), config)]) mem) 7748 for { 7749 s := auxIntToInt64(v.AuxInt) 7750 t := auxToType(v.Aux) 7751 ptr := v_0 7752 mem := v_1 7753 if !((s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0) { 7754 break 7755 } 7756 v.reset(OpLOONG64LoweredZero) 7757 v.AuxInt = int64ToAuxInt(t.Alignment()) 7758 v0 := b.NewValue0(v.Pos, OpLOONG64ADDVconst, ptr.Type) 7759 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config)) 7760 v0.AddArg(ptr) 7761 v.AddArg3(ptr, v0, mem) 7762 return true 7763 } 7764 return false 7765 } 7766 func rewriteBlockLOONG64(b *Block) bool { 7767 switch b.Kind { 7768 case BlockLOONG64EQ: 7769 // match: (EQ (FPFlagTrue cmp) yes no) 7770 // result: (FPF cmp yes no) 7771 for b.Controls[0].Op == OpLOONG64FPFlagTrue { 7772 v_0 := b.Controls[0] 7773 cmp := v_0.Args[0] 7774 b.resetWithControl(BlockLOONG64FPF, cmp) 7775 return true 7776 } 7777 // match: (EQ (FPFlagFalse cmp) yes no) 7778 // result: (FPT cmp yes no) 7779 for b.Controls[0].Op == OpLOONG64FPFlagFalse { 7780 v_0 := b.Controls[0] 7781 cmp := v_0.Args[0] 7782 b.resetWithControl(BlockLOONG64FPT, cmp) 7783 return true 7784 } 7785 // match: (EQ (XORconst [1] cmp:(SGT _ _)) yes no) 7786 // result: (NE cmp yes no) 7787 for b.Controls[0].Op == OpLOONG64XORconst { 7788 v_0 := b.Controls[0] 7789 if auxIntToInt64(v_0.AuxInt) != 1 { 7790 break 7791 } 7792 cmp := v_0.Args[0] 7793 if cmp.Op != OpLOONG64SGT { 7794 break 7795 } 7796 b.resetWithControl(BlockLOONG64NE, cmp) 7797 return true 7798 } 7799 // match: (EQ (XORconst [1] cmp:(SGTU _ _)) yes no) 7800 // result: (NE cmp yes no) 7801 for b.Controls[0].Op == OpLOONG64XORconst { 7802 v_0 := b.Controls[0] 7803 if auxIntToInt64(v_0.AuxInt) != 1 { 7804 break 7805 } 7806 cmp := v_0.Args[0] 7807 if cmp.Op != OpLOONG64SGTU { 7808 break 7809 } 7810 b.resetWithControl(BlockLOONG64NE, cmp) 7811 return true 7812 } 7813 // match: (EQ (XORconst [1] cmp:(SGTconst _)) yes no) 7814 // result: (NE cmp yes no) 7815 for b.Controls[0].Op == OpLOONG64XORconst { 7816 v_0 := b.Controls[0] 7817 if auxIntToInt64(v_0.AuxInt) != 1 { 7818 break 7819 } 7820 cmp := v_0.Args[0] 7821 if cmp.Op != OpLOONG64SGTconst { 7822 break 7823 } 7824 b.resetWithControl(BlockLOONG64NE, cmp) 7825 return true 7826 } 7827 // match: (EQ (XORconst [1] cmp:(SGTUconst _)) yes no) 7828 // result: (NE cmp yes no) 7829 for b.Controls[0].Op == OpLOONG64XORconst { 7830 v_0 := b.Controls[0] 7831 if auxIntToInt64(v_0.AuxInt) != 1 { 7832 break 7833 } 7834 cmp := v_0.Args[0] 7835 if cmp.Op != OpLOONG64SGTUconst { 7836 break 7837 } 7838 b.resetWithControl(BlockLOONG64NE, cmp) 7839 return true 7840 } 7841 // match: (EQ (SGTUconst [1] x) yes no) 7842 // result: (NE x yes no) 7843 for b.Controls[0].Op == OpLOONG64SGTUconst { 7844 v_0 := b.Controls[0] 7845 if auxIntToInt64(v_0.AuxInt) != 1 { 7846 break 7847 } 7848 x := v_0.Args[0] 7849 b.resetWithControl(BlockLOONG64NE, x) 7850 return true 7851 } 7852 // match: (EQ (SGTU x (MOVVconst [0])) yes no) 7853 // result: (EQ x yes no) 7854 for b.Controls[0].Op == OpLOONG64SGTU { 7855 v_0 := b.Controls[0] 7856 _ = v_0.Args[1] 7857 x := v_0.Args[0] 7858 v_0_1 := v_0.Args[1] 7859 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 { 7860 break 7861 } 7862 b.resetWithControl(BlockLOONG64EQ, x) 7863 return true 7864 } 7865 // match: (EQ (SGTconst [0] x) yes no) 7866 // result: (GEZ x yes no) 7867 for b.Controls[0].Op == OpLOONG64SGTconst { 7868 v_0 := b.Controls[0] 7869 if auxIntToInt64(v_0.AuxInt) != 0 { 7870 break 7871 } 7872 x := v_0.Args[0] 7873 b.resetWithControl(BlockLOONG64GEZ, x) 7874 return true 7875 } 7876 // match: (EQ (SGT x (MOVVconst [0])) yes no) 7877 // result: (LEZ x yes no) 7878 for b.Controls[0].Op == OpLOONG64SGT { 7879 v_0 := b.Controls[0] 7880 _ = v_0.Args[1] 7881 x := v_0.Args[0] 7882 v_0_1 := v_0.Args[1] 7883 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 { 7884 break 7885 } 7886 b.resetWithControl(BlockLOONG64LEZ, x) 7887 return true 7888 } 7889 // match: (EQ (MOVVconst [0]) yes no) 7890 // result: (First yes no) 7891 for b.Controls[0].Op == OpLOONG64MOVVconst { 7892 v_0 := b.Controls[0] 7893 if auxIntToInt64(v_0.AuxInt) != 0 { 7894 break 7895 } 7896 b.Reset(BlockFirst) 7897 return true 7898 } 7899 // match: (EQ (MOVVconst [c]) yes no) 7900 // cond: c != 0 7901 // result: (First no yes) 7902 for b.Controls[0].Op == OpLOONG64MOVVconst { 7903 v_0 := b.Controls[0] 7904 c := auxIntToInt64(v_0.AuxInt) 7905 if !(c != 0) { 7906 break 7907 } 7908 b.Reset(BlockFirst) 7909 b.swapSuccessors() 7910 return true 7911 } 7912 case BlockLOONG64GEZ: 7913 // match: (GEZ (MOVVconst [c]) yes no) 7914 // cond: c >= 0 7915 // result: (First yes no) 7916 for b.Controls[0].Op == OpLOONG64MOVVconst { 7917 v_0 := b.Controls[0] 7918 c := auxIntToInt64(v_0.AuxInt) 7919 if !(c >= 0) { 7920 break 7921 } 7922 b.Reset(BlockFirst) 7923 return true 7924 } 7925 // match: (GEZ (MOVVconst [c]) yes no) 7926 // cond: c < 0 7927 // result: (First no yes) 7928 for b.Controls[0].Op == OpLOONG64MOVVconst { 7929 v_0 := b.Controls[0] 7930 c := auxIntToInt64(v_0.AuxInt) 7931 if !(c < 0) { 7932 break 7933 } 7934 b.Reset(BlockFirst) 7935 b.swapSuccessors() 7936 return true 7937 } 7938 case BlockLOONG64GTZ: 7939 // match: (GTZ (MOVVconst [c]) yes no) 7940 // cond: c > 0 7941 // result: (First yes no) 7942 for b.Controls[0].Op == OpLOONG64MOVVconst { 7943 v_0 := b.Controls[0] 7944 c := auxIntToInt64(v_0.AuxInt) 7945 if !(c > 0) { 7946 break 7947 } 7948 b.Reset(BlockFirst) 7949 return true 7950 } 7951 // match: (GTZ (MOVVconst [c]) yes no) 7952 // cond: c <= 0 7953 // result: (First no yes) 7954 for b.Controls[0].Op == OpLOONG64MOVVconst { 7955 v_0 := b.Controls[0] 7956 c := auxIntToInt64(v_0.AuxInt) 7957 if !(c <= 0) { 7958 break 7959 } 7960 b.Reset(BlockFirst) 7961 b.swapSuccessors() 7962 return true 7963 } 7964 case BlockIf: 7965 // match: (If cond yes no) 7966 // result: (NE cond yes no) 7967 for { 7968 cond := b.Controls[0] 7969 b.resetWithControl(BlockLOONG64NE, cond) 7970 return true 7971 } 7972 case BlockLOONG64LEZ: 7973 // match: (LEZ (MOVVconst [c]) yes no) 7974 // cond: c <= 0 7975 // result: (First yes no) 7976 for b.Controls[0].Op == OpLOONG64MOVVconst { 7977 v_0 := b.Controls[0] 7978 c := auxIntToInt64(v_0.AuxInt) 7979 if !(c <= 0) { 7980 break 7981 } 7982 b.Reset(BlockFirst) 7983 return true 7984 } 7985 // match: (LEZ (MOVVconst [c]) yes no) 7986 // cond: c > 0 7987 // result: (First no yes) 7988 for b.Controls[0].Op == OpLOONG64MOVVconst { 7989 v_0 := b.Controls[0] 7990 c := auxIntToInt64(v_0.AuxInt) 7991 if !(c > 0) { 7992 break 7993 } 7994 b.Reset(BlockFirst) 7995 b.swapSuccessors() 7996 return true 7997 } 7998 case BlockLOONG64LTZ: 7999 // match: (LTZ (MOVVconst [c]) yes no) 8000 // cond: c < 0 8001 // result: (First yes no) 8002 for b.Controls[0].Op == OpLOONG64MOVVconst { 8003 v_0 := b.Controls[0] 8004 c := auxIntToInt64(v_0.AuxInt) 8005 if !(c < 0) { 8006 break 8007 } 8008 b.Reset(BlockFirst) 8009 return true 8010 } 8011 // match: (LTZ (MOVVconst [c]) yes no) 8012 // cond: c >= 0 8013 // result: (First no yes) 8014 for b.Controls[0].Op == OpLOONG64MOVVconst { 8015 v_0 := b.Controls[0] 8016 c := auxIntToInt64(v_0.AuxInt) 8017 if !(c >= 0) { 8018 break 8019 } 8020 b.Reset(BlockFirst) 8021 b.swapSuccessors() 8022 return true 8023 } 8024 case BlockLOONG64NE: 8025 // match: (NE (FPFlagTrue cmp) yes no) 8026 // result: (FPT cmp yes no) 8027 for b.Controls[0].Op == OpLOONG64FPFlagTrue { 8028 v_0 := b.Controls[0] 8029 cmp := v_0.Args[0] 8030 b.resetWithControl(BlockLOONG64FPT, cmp) 8031 return true 8032 } 8033 // match: (NE (FPFlagFalse cmp) yes no) 8034 // result: (FPF cmp yes no) 8035 for b.Controls[0].Op == OpLOONG64FPFlagFalse { 8036 v_0 := b.Controls[0] 8037 cmp := v_0.Args[0] 8038 b.resetWithControl(BlockLOONG64FPF, cmp) 8039 return true 8040 } 8041 // match: (NE (XORconst [1] cmp:(SGT _ _)) yes no) 8042 // result: (EQ cmp yes no) 8043 for b.Controls[0].Op == OpLOONG64XORconst { 8044 v_0 := b.Controls[0] 8045 if auxIntToInt64(v_0.AuxInt) != 1 { 8046 break 8047 } 8048 cmp := v_0.Args[0] 8049 if cmp.Op != OpLOONG64SGT { 8050 break 8051 } 8052 b.resetWithControl(BlockLOONG64EQ, cmp) 8053 return true 8054 } 8055 // match: (NE (XORconst [1] cmp:(SGTU _ _)) yes no) 8056 // result: (EQ cmp yes no) 8057 for b.Controls[0].Op == OpLOONG64XORconst { 8058 v_0 := b.Controls[0] 8059 if auxIntToInt64(v_0.AuxInt) != 1 { 8060 break 8061 } 8062 cmp := v_0.Args[0] 8063 if cmp.Op != OpLOONG64SGTU { 8064 break 8065 } 8066 b.resetWithControl(BlockLOONG64EQ, cmp) 8067 return true 8068 } 8069 // match: (NE (XORconst [1] cmp:(SGTconst _)) yes no) 8070 // result: (EQ cmp yes no) 8071 for b.Controls[0].Op == OpLOONG64XORconst { 8072 v_0 := b.Controls[0] 8073 if auxIntToInt64(v_0.AuxInt) != 1 { 8074 break 8075 } 8076 cmp := v_0.Args[0] 8077 if cmp.Op != OpLOONG64SGTconst { 8078 break 8079 } 8080 b.resetWithControl(BlockLOONG64EQ, cmp) 8081 return true 8082 } 8083 // match: (NE (XORconst [1] cmp:(SGTUconst _)) yes no) 8084 // result: (EQ cmp yes no) 8085 for b.Controls[0].Op == OpLOONG64XORconst { 8086 v_0 := b.Controls[0] 8087 if auxIntToInt64(v_0.AuxInt) != 1 { 8088 break 8089 } 8090 cmp := v_0.Args[0] 8091 if cmp.Op != OpLOONG64SGTUconst { 8092 break 8093 } 8094 b.resetWithControl(BlockLOONG64EQ, cmp) 8095 return true 8096 } 8097 // match: (NE (SGTUconst [1] x) yes no) 8098 // result: (EQ x yes no) 8099 for b.Controls[0].Op == OpLOONG64SGTUconst { 8100 v_0 := b.Controls[0] 8101 if auxIntToInt64(v_0.AuxInt) != 1 { 8102 break 8103 } 8104 x := v_0.Args[0] 8105 b.resetWithControl(BlockLOONG64EQ, x) 8106 return true 8107 } 8108 // match: (NE (SGTU x (MOVVconst [0])) yes no) 8109 // result: (NE x yes no) 8110 for b.Controls[0].Op == OpLOONG64SGTU { 8111 v_0 := b.Controls[0] 8112 _ = v_0.Args[1] 8113 x := v_0.Args[0] 8114 v_0_1 := v_0.Args[1] 8115 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 { 8116 break 8117 } 8118 b.resetWithControl(BlockLOONG64NE, x) 8119 return true 8120 } 8121 // match: (NE (SGTconst [0] x) yes no) 8122 // result: (LTZ x yes no) 8123 for b.Controls[0].Op == OpLOONG64SGTconst { 8124 v_0 := b.Controls[0] 8125 if auxIntToInt64(v_0.AuxInt) != 0 { 8126 break 8127 } 8128 x := v_0.Args[0] 8129 b.resetWithControl(BlockLOONG64LTZ, x) 8130 return true 8131 } 8132 // match: (NE (SGT x (MOVVconst [0])) yes no) 8133 // result: (GTZ x yes no) 8134 for b.Controls[0].Op == OpLOONG64SGT { 8135 v_0 := b.Controls[0] 8136 _ = v_0.Args[1] 8137 x := v_0.Args[0] 8138 v_0_1 := v_0.Args[1] 8139 if v_0_1.Op != OpLOONG64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 { 8140 break 8141 } 8142 b.resetWithControl(BlockLOONG64GTZ, x) 8143 return true 8144 } 8145 // match: (NE (MOVVconst [0]) yes no) 8146 // result: (First no yes) 8147 for b.Controls[0].Op == OpLOONG64MOVVconst { 8148 v_0 := b.Controls[0] 8149 if auxIntToInt64(v_0.AuxInt) != 0 { 8150 break 8151 } 8152 b.Reset(BlockFirst) 8153 b.swapSuccessors() 8154 return true 8155 } 8156 // match: (NE (MOVVconst [c]) yes no) 8157 // cond: c != 0 8158 // result: (First yes no) 8159 for b.Controls[0].Op == OpLOONG64MOVVconst { 8160 v_0 := b.Controls[0] 8161 c := auxIntToInt64(v_0.AuxInt) 8162 if !(c != 0) { 8163 break 8164 } 8165 b.Reset(BlockFirst) 8166 return true 8167 } 8168 } 8169 return false 8170 }