github.com/sbinet/go@v0.0.0-20160827155028-54d7de7dd62b/src/cmd/compile/internal/ssa/rewritegeneric.go (about) 1 // autogenerated from gen/generic.rules: do not edit! 2 // generated with: cd gen; go run *.go 3 4 package ssa 5 6 import "math" 7 8 var _ = math.MinInt8 // in case not otherwise used 9 func rewriteValuegeneric(v *Value, config *Config) bool { 10 switch v.Op { 11 case OpAdd16: 12 return rewriteValuegeneric_OpAdd16(v, config) 13 case OpAdd32: 14 return rewriteValuegeneric_OpAdd32(v, config) 15 case OpAdd32F: 16 return rewriteValuegeneric_OpAdd32F(v, config) 17 case OpAdd64: 18 return rewriteValuegeneric_OpAdd64(v, config) 19 case OpAdd64F: 20 return rewriteValuegeneric_OpAdd64F(v, config) 21 case OpAdd8: 22 return rewriteValuegeneric_OpAdd8(v, config) 23 case OpAddPtr: 24 return rewriteValuegeneric_OpAddPtr(v, config) 25 case OpAnd16: 26 return rewriteValuegeneric_OpAnd16(v, config) 27 case OpAnd32: 28 return rewriteValuegeneric_OpAnd32(v, config) 29 case OpAnd64: 30 return rewriteValuegeneric_OpAnd64(v, config) 31 case OpAnd8: 32 return rewriteValuegeneric_OpAnd8(v, config) 33 case OpArg: 34 return rewriteValuegeneric_OpArg(v, config) 35 case OpArrayIndex: 36 return rewriteValuegeneric_OpArrayIndex(v, config) 37 case OpCom16: 38 return rewriteValuegeneric_OpCom16(v, config) 39 case OpCom32: 40 return rewriteValuegeneric_OpCom32(v, config) 41 case OpCom64: 42 return rewriteValuegeneric_OpCom64(v, config) 43 case OpCom8: 44 return rewriteValuegeneric_OpCom8(v, config) 45 case OpConstInterface: 46 return rewriteValuegeneric_OpConstInterface(v, config) 47 case OpConstSlice: 48 return rewriteValuegeneric_OpConstSlice(v, config) 49 case OpConstString: 50 return rewriteValuegeneric_OpConstString(v, config) 51 case OpConvert: 52 return rewriteValuegeneric_OpConvert(v, config) 53 case OpCvt32Fto64F: 54 return rewriteValuegeneric_OpCvt32Fto64F(v, config) 55 case OpCvt64Fto32F: 56 return rewriteValuegeneric_OpCvt64Fto32F(v, config) 57 case OpDiv32F: 58 return rewriteValuegeneric_OpDiv32F(v, config) 59 case OpDiv64: 60 return rewriteValuegeneric_OpDiv64(v, config) 61 case OpDiv64F: 62 return rewriteValuegeneric_OpDiv64F(v, config) 63 case OpDiv64u: 64 return rewriteValuegeneric_OpDiv64u(v, config) 65 case OpEq16: 66 return rewriteValuegeneric_OpEq16(v, config) 67 case OpEq32: 68 return rewriteValuegeneric_OpEq32(v, config) 69 case OpEq64: 70 return rewriteValuegeneric_OpEq64(v, config) 71 case OpEq8: 72 return rewriteValuegeneric_OpEq8(v, config) 73 case OpEqB: 74 return rewriteValuegeneric_OpEqB(v, config) 75 case OpEqInter: 76 return rewriteValuegeneric_OpEqInter(v, config) 77 case OpEqPtr: 78 return rewriteValuegeneric_OpEqPtr(v, config) 79 case OpEqSlice: 80 return rewriteValuegeneric_OpEqSlice(v, config) 81 case OpGeq16: 82 return rewriteValuegeneric_OpGeq16(v, config) 83 case OpGeq16U: 84 return rewriteValuegeneric_OpGeq16U(v, config) 85 case OpGeq32: 86 return rewriteValuegeneric_OpGeq32(v, config) 87 case OpGeq32U: 88 return rewriteValuegeneric_OpGeq32U(v, config) 89 case OpGeq64: 90 return rewriteValuegeneric_OpGeq64(v, config) 91 case OpGeq64U: 92 return rewriteValuegeneric_OpGeq64U(v, config) 93 case OpGeq8: 94 return rewriteValuegeneric_OpGeq8(v, config) 95 case OpGeq8U: 96 return rewriteValuegeneric_OpGeq8U(v, config) 97 case OpGreater16: 98 return rewriteValuegeneric_OpGreater16(v, config) 99 case OpGreater16U: 100 return rewriteValuegeneric_OpGreater16U(v, config) 101 case OpGreater32: 102 return rewriteValuegeneric_OpGreater32(v, config) 103 case OpGreater32U: 104 return rewriteValuegeneric_OpGreater32U(v, config) 105 case OpGreater64: 106 return rewriteValuegeneric_OpGreater64(v, config) 107 case OpGreater64U: 108 return rewriteValuegeneric_OpGreater64U(v, config) 109 case OpGreater8: 110 return rewriteValuegeneric_OpGreater8(v, config) 111 case OpGreater8U: 112 return rewriteValuegeneric_OpGreater8U(v, config) 113 case OpIsInBounds: 114 return rewriteValuegeneric_OpIsInBounds(v, config) 115 case OpIsSliceInBounds: 116 return rewriteValuegeneric_OpIsSliceInBounds(v, config) 117 case OpLeq16: 118 return rewriteValuegeneric_OpLeq16(v, config) 119 case OpLeq16U: 120 return rewriteValuegeneric_OpLeq16U(v, config) 121 case OpLeq32: 122 return rewriteValuegeneric_OpLeq32(v, config) 123 case OpLeq32U: 124 return rewriteValuegeneric_OpLeq32U(v, config) 125 case OpLeq64: 126 return rewriteValuegeneric_OpLeq64(v, config) 127 case OpLeq64U: 128 return rewriteValuegeneric_OpLeq64U(v, config) 129 case OpLeq8: 130 return rewriteValuegeneric_OpLeq8(v, config) 131 case OpLeq8U: 132 return rewriteValuegeneric_OpLeq8U(v, config) 133 case OpLess16: 134 return rewriteValuegeneric_OpLess16(v, config) 135 case OpLess16U: 136 return rewriteValuegeneric_OpLess16U(v, config) 137 case OpLess32: 138 return rewriteValuegeneric_OpLess32(v, config) 139 case OpLess32U: 140 return rewriteValuegeneric_OpLess32U(v, config) 141 case OpLess64: 142 return rewriteValuegeneric_OpLess64(v, config) 143 case OpLess64U: 144 return rewriteValuegeneric_OpLess64U(v, config) 145 case OpLess8: 146 return rewriteValuegeneric_OpLess8(v, config) 147 case OpLess8U: 148 return rewriteValuegeneric_OpLess8U(v, config) 149 case OpLoad: 150 return rewriteValuegeneric_OpLoad(v, config) 151 case OpLsh16x16: 152 return rewriteValuegeneric_OpLsh16x16(v, config) 153 case OpLsh16x32: 154 return rewriteValuegeneric_OpLsh16x32(v, config) 155 case OpLsh16x64: 156 return rewriteValuegeneric_OpLsh16x64(v, config) 157 case OpLsh16x8: 158 return rewriteValuegeneric_OpLsh16x8(v, config) 159 case OpLsh32x16: 160 return rewriteValuegeneric_OpLsh32x16(v, config) 161 case OpLsh32x32: 162 return rewriteValuegeneric_OpLsh32x32(v, config) 163 case OpLsh32x64: 164 return rewriteValuegeneric_OpLsh32x64(v, config) 165 case OpLsh32x8: 166 return rewriteValuegeneric_OpLsh32x8(v, config) 167 case OpLsh64x16: 168 return rewriteValuegeneric_OpLsh64x16(v, config) 169 case OpLsh64x32: 170 return rewriteValuegeneric_OpLsh64x32(v, config) 171 case OpLsh64x64: 172 return rewriteValuegeneric_OpLsh64x64(v, config) 173 case OpLsh64x8: 174 return rewriteValuegeneric_OpLsh64x8(v, config) 175 case OpLsh8x16: 176 return rewriteValuegeneric_OpLsh8x16(v, config) 177 case OpLsh8x32: 178 return rewriteValuegeneric_OpLsh8x32(v, config) 179 case OpLsh8x64: 180 return rewriteValuegeneric_OpLsh8x64(v, config) 181 case OpLsh8x8: 182 return rewriteValuegeneric_OpLsh8x8(v, config) 183 case OpMod16: 184 return rewriteValuegeneric_OpMod16(v, config) 185 case OpMod16u: 186 return rewriteValuegeneric_OpMod16u(v, config) 187 case OpMod32: 188 return rewriteValuegeneric_OpMod32(v, config) 189 case OpMod32u: 190 return rewriteValuegeneric_OpMod32u(v, config) 191 case OpMod64: 192 return rewriteValuegeneric_OpMod64(v, config) 193 case OpMod64u: 194 return rewriteValuegeneric_OpMod64u(v, config) 195 case OpMod8: 196 return rewriteValuegeneric_OpMod8(v, config) 197 case OpMod8u: 198 return rewriteValuegeneric_OpMod8u(v, config) 199 case OpMul16: 200 return rewriteValuegeneric_OpMul16(v, config) 201 case OpMul32: 202 return rewriteValuegeneric_OpMul32(v, config) 203 case OpMul32F: 204 return rewriteValuegeneric_OpMul32F(v, config) 205 case OpMul64: 206 return rewriteValuegeneric_OpMul64(v, config) 207 case OpMul64F: 208 return rewriteValuegeneric_OpMul64F(v, config) 209 case OpMul8: 210 return rewriteValuegeneric_OpMul8(v, config) 211 case OpNeg16: 212 return rewriteValuegeneric_OpNeg16(v, config) 213 case OpNeg32: 214 return rewriteValuegeneric_OpNeg32(v, config) 215 case OpNeg64: 216 return rewriteValuegeneric_OpNeg64(v, config) 217 case OpNeg8: 218 return rewriteValuegeneric_OpNeg8(v, config) 219 case OpNeq16: 220 return rewriteValuegeneric_OpNeq16(v, config) 221 case OpNeq32: 222 return rewriteValuegeneric_OpNeq32(v, config) 223 case OpNeq64: 224 return rewriteValuegeneric_OpNeq64(v, config) 225 case OpNeq8: 226 return rewriteValuegeneric_OpNeq8(v, config) 227 case OpNeqB: 228 return rewriteValuegeneric_OpNeqB(v, config) 229 case OpNeqInter: 230 return rewriteValuegeneric_OpNeqInter(v, config) 231 case OpNeqPtr: 232 return rewriteValuegeneric_OpNeqPtr(v, config) 233 case OpNeqSlice: 234 return rewriteValuegeneric_OpNeqSlice(v, config) 235 case OpOffPtr: 236 return rewriteValuegeneric_OpOffPtr(v, config) 237 case OpOr16: 238 return rewriteValuegeneric_OpOr16(v, config) 239 case OpOr32: 240 return rewriteValuegeneric_OpOr32(v, config) 241 case OpOr64: 242 return rewriteValuegeneric_OpOr64(v, config) 243 case OpOr8: 244 return rewriteValuegeneric_OpOr8(v, config) 245 case OpPhi: 246 return rewriteValuegeneric_OpPhi(v, config) 247 case OpPtrIndex: 248 return rewriteValuegeneric_OpPtrIndex(v, config) 249 case OpRsh16Ux16: 250 return rewriteValuegeneric_OpRsh16Ux16(v, config) 251 case OpRsh16Ux32: 252 return rewriteValuegeneric_OpRsh16Ux32(v, config) 253 case OpRsh16Ux64: 254 return rewriteValuegeneric_OpRsh16Ux64(v, config) 255 case OpRsh16Ux8: 256 return rewriteValuegeneric_OpRsh16Ux8(v, config) 257 case OpRsh16x16: 258 return rewriteValuegeneric_OpRsh16x16(v, config) 259 case OpRsh16x32: 260 return rewriteValuegeneric_OpRsh16x32(v, config) 261 case OpRsh16x64: 262 return rewriteValuegeneric_OpRsh16x64(v, config) 263 case OpRsh16x8: 264 return rewriteValuegeneric_OpRsh16x8(v, config) 265 case OpRsh32Ux16: 266 return rewriteValuegeneric_OpRsh32Ux16(v, config) 267 case OpRsh32Ux32: 268 return rewriteValuegeneric_OpRsh32Ux32(v, config) 269 case OpRsh32Ux64: 270 return rewriteValuegeneric_OpRsh32Ux64(v, config) 271 case OpRsh32Ux8: 272 return rewriteValuegeneric_OpRsh32Ux8(v, config) 273 case OpRsh32x16: 274 return rewriteValuegeneric_OpRsh32x16(v, config) 275 case OpRsh32x32: 276 return rewriteValuegeneric_OpRsh32x32(v, config) 277 case OpRsh32x64: 278 return rewriteValuegeneric_OpRsh32x64(v, config) 279 case OpRsh32x8: 280 return rewriteValuegeneric_OpRsh32x8(v, config) 281 case OpRsh64Ux16: 282 return rewriteValuegeneric_OpRsh64Ux16(v, config) 283 case OpRsh64Ux32: 284 return rewriteValuegeneric_OpRsh64Ux32(v, config) 285 case OpRsh64Ux64: 286 return rewriteValuegeneric_OpRsh64Ux64(v, config) 287 case OpRsh64Ux8: 288 return rewriteValuegeneric_OpRsh64Ux8(v, config) 289 case OpRsh64x16: 290 return rewriteValuegeneric_OpRsh64x16(v, config) 291 case OpRsh64x32: 292 return rewriteValuegeneric_OpRsh64x32(v, config) 293 case OpRsh64x64: 294 return rewriteValuegeneric_OpRsh64x64(v, config) 295 case OpRsh64x8: 296 return rewriteValuegeneric_OpRsh64x8(v, config) 297 case OpRsh8Ux16: 298 return rewriteValuegeneric_OpRsh8Ux16(v, config) 299 case OpRsh8Ux32: 300 return rewriteValuegeneric_OpRsh8Ux32(v, config) 301 case OpRsh8Ux64: 302 return rewriteValuegeneric_OpRsh8Ux64(v, config) 303 case OpRsh8Ux8: 304 return rewriteValuegeneric_OpRsh8Ux8(v, config) 305 case OpRsh8x16: 306 return rewriteValuegeneric_OpRsh8x16(v, config) 307 case OpRsh8x32: 308 return rewriteValuegeneric_OpRsh8x32(v, config) 309 case OpRsh8x64: 310 return rewriteValuegeneric_OpRsh8x64(v, config) 311 case OpRsh8x8: 312 return rewriteValuegeneric_OpRsh8x8(v, config) 313 case OpSliceCap: 314 return rewriteValuegeneric_OpSliceCap(v, config) 315 case OpSliceLen: 316 return rewriteValuegeneric_OpSliceLen(v, config) 317 case OpSlicePtr: 318 return rewriteValuegeneric_OpSlicePtr(v, config) 319 case OpSqrt: 320 return rewriteValuegeneric_OpSqrt(v, config) 321 case OpStore: 322 return rewriteValuegeneric_OpStore(v, config) 323 case OpStringLen: 324 return rewriteValuegeneric_OpStringLen(v, config) 325 case OpStringPtr: 326 return rewriteValuegeneric_OpStringPtr(v, config) 327 case OpStructSelect: 328 return rewriteValuegeneric_OpStructSelect(v, config) 329 case OpSub16: 330 return rewriteValuegeneric_OpSub16(v, config) 331 case OpSub32: 332 return rewriteValuegeneric_OpSub32(v, config) 333 case OpSub32F: 334 return rewriteValuegeneric_OpSub32F(v, config) 335 case OpSub64: 336 return rewriteValuegeneric_OpSub64(v, config) 337 case OpSub64F: 338 return rewriteValuegeneric_OpSub64F(v, config) 339 case OpSub8: 340 return rewriteValuegeneric_OpSub8(v, config) 341 case OpTrunc16to8: 342 return rewriteValuegeneric_OpTrunc16to8(v, config) 343 case OpTrunc32to16: 344 return rewriteValuegeneric_OpTrunc32to16(v, config) 345 case OpTrunc32to8: 346 return rewriteValuegeneric_OpTrunc32to8(v, config) 347 case OpTrunc64to16: 348 return rewriteValuegeneric_OpTrunc64to16(v, config) 349 case OpTrunc64to32: 350 return rewriteValuegeneric_OpTrunc64to32(v, config) 351 case OpTrunc64to8: 352 return rewriteValuegeneric_OpTrunc64to8(v, config) 353 case OpXor16: 354 return rewriteValuegeneric_OpXor16(v, config) 355 case OpXor32: 356 return rewriteValuegeneric_OpXor32(v, config) 357 case OpXor64: 358 return rewriteValuegeneric_OpXor64(v, config) 359 case OpXor8: 360 return rewriteValuegeneric_OpXor8(v, config) 361 } 362 return false 363 } 364 func rewriteValuegeneric_OpAdd16(v *Value, config *Config) bool { 365 b := v.Block 366 _ = b 367 // match: (Add16 (Const16 [c]) (Const16 [d])) 368 // cond: 369 // result: (Const16 [int64(int16(c+d))]) 370 for { 371 v_0 := v.Args[0] 372 if v_0.Op != OpConst16 { 373 break 374 } 375 c := v_0.AuxInt 376 v_1 := v.Args[1] 377 if v_1.Op != OpConst16 { 378 break 379 } 380 d := v_1.AuxInt 381 v.reset(OpConst16) 382 v.AuxInt = int64(int16(c + d)) 383 return true 384 } 385 // match: (Add16 x (Const16 <t> [c])) 386 // cond: x.Op != OpConst16 387 // result: (Add16 (Const16 <t> [c]) x) 388 for { 389 x := v.Args[0] 390 v_1 := v.Args[1] 391 if v_1.Op != OpConst16 { 392 break 393 } 394 t := v_1.Type 395 c := v_1.AuxInt 396 if !(x.Op != OpConst16) { 397 break 398 } 399 v.reset(OpAdd16) 400 v0 := b.NewValue0(v.Line, OpConst16, t) 401 v0.AuxInt = c 402 v.AddArg(v0) 403 v.AddArg(x) 404 return true 405 } 406 // match: (Add16 (Const16 [0]) x) 407 // cond: 408 // result: x 409 for { 410 v_0 := v.Args[0] 411 if v_0.Op != OpConst16 { 412 break 413 } 414 if v_0.AuxInt != 0 { 415 break 416 } 417 x := v.Args[1] 418 v.reset(OpCopy) 419 v.Type = x.Type 420 v.AddArg(x) 421 return true 422 } 423 return false 424 } 425 func rewriteValuegeneric_OpAdd32(v *Value, config *Config) bool { 426 b := v.Block 427 _ = b 428 // match: (Add32 (Const32 [c]) (Const32 [d])) 429 // cond: 430 // result: (Const32 [int64(int32(c+d))]) 431 for { 432 v_0 := v.Args[0] 433 if v_0.Op != OpConst32 { 434 break 435 } 436 c := v_0.AuxInt 437 v_1 := v.Args[1] 438 if v_1.Op != OpConst32 { 439 break 440 } 441 d := v_1.AuxInt 442 v.reset(OpConst32) 443 v.AuxInt = int64(int32(c + d)) 444 return true 445 } 446 // match: (Add32 x (Const32 <t> [c])) 447 // cond: x.Op != OpConst32 448 // result: (Add32 (Const32 <t> [c]) x) 449 for { 450 x := v.Args[0] 451 v_1 := v.Args[1] 452 if v_1.Op != OpConst32 { 453 break 454 } 455 t := v_1.Type 456 c := v_1.AuxInt 457 if !(x.Op != OpConst32) { 458 break 459 } 460 v.reset(OpAdd32) 461 v0 := b.NewValue0(v.Line, OpConst32, t) 462 v0.AuxInt = c 463 v.AddArg(v0) 464 v.AddArg(x) 465 return true 466 } 467 // match: (Add32 (Const32 [0]) x) 468 // cond: 469 // result: x 470 for { 471 v_0 := v.Args[0] 472 if v_0.Op != OpConst32 { 473 break 474 } 475 if v_0.AuxInt != 0 { 476 break 477 } 478 x := v.Args[1] 479 v.reset(OpCopy) 480 v.Type = x.Type 481 v.AddArg(x) 482 return true 483 } 484 return false 485 } 486 func rewriteValuegeneric_OpAdd32F(v *Value, config *Config) bool { 487 b := v.Block 488 _ = b 489 // match: (Add32F (Const32F [c]) (Const32F [d])) 490 // cond: 491 // result: (Const32F [f2i(float64(i2f32(c) + i2f32(d)))]) 492 for { 493 v_0 := v.Args[0] 494 if v_0.Op != OpConst32F { 495 break 496 } 497 c := v_0.AuxInt 498 v_1 := v.Args[1] 499 if v_1.Op != OpConst32F { 500 break 501 } 502 d := v_1.AuxInt 503 v.reset(OpConst32F) 504 v.AuxInt = f2i(float64(i2f32(c) + i2f32(d))) 505 return true 506 } 507 // match: (Add32F x (Const32F [0])) 508 // cond: 509 // result: x 510 for { 511 x := v.Args[0] 512 v_1 := v.Args[1] 513 if v_1.Op != OpConst32F { 514 break 515 } 516 if v_1.AuxInt != 0 { 517 break 518 } 519 v.reset(OpCopy) 520 v.Type = x.Type 521 v.AddArg(x) 522 return true 523 } 524 // match: (Add32F (Const32F [0]) x) 525 // cond: 526 // result: x 527 for { 528 v_0 := v.Args[0] 529 if v_0.Op != OpConst32F { 530 break 531 } 532 if v_0.AuxInt != 0 { 533 break 534 } 535 x := v.Args[1] 536 v.reset(OpCopy) 537 v.Type = x.Type 538 v.AddArg(x) 539 return true 540 } 541 return false 542 } 543 func rewriteValuegeneric_OpAdd64(v *Value, config *Config) bool { 544 b := v.Block 545 _ = b 546 // match: (Add64 (Const64 [c]) (Const64 [d])) 547 // cond: 548 // result: (Const64 [c+d]) 549 for { 550 v_0 := v.Args[0] 551 if v_0.Op != OpConst64 { 552 break 553 } 554 c := v_0.AuxInt 555 v_1 := v.Args[1] 556 if v_1.Op != OpConst64 { 557 break 558 } 559 d := v_1.AuxInt 560 v.reset(OpConst64) 561 v.AuxInt = c + d 562 return true 563 } 564 // match: (Add64 x (Const64 <t> [c])) 565 // cond: x.Op != OpConst64 566 // result: (Add64 (Const64 <t> [c]) x) 567 for { 568 x := v.Args[0] 569 v_1 := v.Args[1] 570 if v_1.Op != OpConst64 { 571 break 572 } 573 t := v_1.Type 574 c := v_1.AuxInt 575 if !(x.Op != OpConst64) { 576 break 577 } 578 v.reset(OpAdd64) 579 v0 := b.NewValue0(v.Line, OpConst64, t) 580 v0.AuxInt = c 581 v.AddArg(v0) 582 v.AddArg(x) 583 return true 584 } 585 // match: (Add64 (Const64 [0]) x) 586 // cond: 587 // result: x 588 for { 589 v_0 := v.Args[0] 590 if v_0.Op != OpConst64 { 591 break 592 } 593 if v_0.AuxInt != 0 { 594 break 595 } 596 x := v.Args[1] 597 v.reset(OpCopy) 598 v.Type = x.Type 599 v.AddArg(x) 600 return true 601 } 602 return false 603 } 604 func rewriteValuegeneric_OpAdd64F(v *Value, config *Config) bool { 605 b := v.Block 606 _ = b 607 // match: (Add64F (Const64F [c]) (Const64F [d])) 608 // cond: 609 // result: (Const64F [f2i(i2f(c) + i2f(d))]) 610 for { 611 v_0 := v.Args[0] 612 if v_0.Op != OpConst64F { 613 break 614 } 615 c := v_0.AuxInt 616 v_1 := v.Args[1] 617 if v_1.Op != OpConst64F { 618 break 619 } 620 d := v_1.AuxInt 621 v.reset(OpConst64F) 622 v.AuxInt = f2i(i2f(c) + i2f(d)) 623 return true 624 } 625 // match: (Add64F x (Const64F [0])) 626 // cond: 627 // result: x 628 for { 629 x := v.Args[0] 630 v_1 := v.Args[1] 631 if v_1.Op != OpConst64F { 632 break 633 } 634 if v_1.AuxInt != 0 { 635 break 636 } 637 v.reset(OpCopy) 638 v.Type = x.Type 639 v.AddArg(x) 640 return true 641 } 642 // match: (Add64F (Const64F [0]) x) 643 // cond: 644 // result: x 645 for { 646 v_0 := v.Args[0] 647 if v_0.Op != OpConst64F { 648 break 649 } 650 if v_0.AuxInt != 0 { 651 break 652 } 653 x := v.Args[1] 654 v.reset(OpCopy) 655 v.Type = x.Type 656 v.AddArg(x) 657 return true 658 } 659 return false 660 } 661 func rewriteValuegeneric_OpAdd8(v *Value, config *Config) bool { 662 b := v.Block 663 _ = b 664 // match: (Add8 (Const8 [c]) (Const8 [d])) 665 // cond: 666 // result: (Const8 [int64(int8(c+d))]) 667 for { 668 v_0 := v.Args[0] 669 if v_0.Op != OpConst8 { 670 break 671 } 672 c := v_0.AuxInt 673 v_1 := v.Args[1] 674 if v_1.Op != OpConst8 { 675 break 676 } 677 d := v_1.AuxInt 678 v.reset(OpConst8) 679 v.AuxInt = int64(int8(c + d)) 680 return true 681 } 682 // match: (Add8 x (Const8 <t> [c])) 683 // cond: x.Op != OpConst8 684 // result: (Add8 (Const8 <t> [c]) x) 685 for { 686 x := v.Args[0] 687 v_1 := v.Args[1] 688 if v_1.Op != OpConst8 { 689 break 690 } 691 t := v_1.Type 692 c := v_1.AuxInt 693 if !(x.Op != OpConst8) { 694 break 695 } 696 v.reset(OpAdd8) 697 v0 := b.NewValue0(v.Line, OpConst8, t) 698 v0.AuxInt = c 699 v.AddArg(v0) 700 v.AddArg(x) 701 return true 702 } 703 // match: (Add8 (Const8 [0]) x) 704 // cond: 705 // result: x 706 for { 707 v_0 := v.Args[0] 708 if v_0.Op != OpConst8 { 709 break 710 } 711 if v_0.AuxInt != 0 { 712 break 713 } 714 x := v.Args[1] 715 v.reset(OpCopy) 716 v.Type = x.Type 717 v.AddArg(x) 718 return true 719 } 720 return false 721 } 722 func rewriteValuegeneric_OpAddPtr(v *Value, config *Config) bool { 723 b := v.Block 724 _ = b 725 // match: (AddPtr <t> x (Const64 [c])) 726 // cond: 727 // result: (OffPtr <t> x [c]) 728 for { 729 t := v.Type 730 x := v.Args[0] 731 v_1 := v.Args[1] 732 if v_1.Op != OpConst64 { 733 break 734 } 735 c := v_1.AuxInt 736 v.reset(OpOffPtr) 737 v.Type = t 738 v.AuxInt = c 739 v.AddArg(x) 740 return true 741 } 742 return false 743 } 744 func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { 745 b := v.Block 746 _ = b 747 // match: (And16 x (Const16 <t> [c])) 748 // cond: x.Op != OpConst16 749 // result: (And16 (Const16 <t> [c]) x) 750 for { 751 x := v.Args[0] 752 v_1 := v.Args[1] 753 if v_1.Op != OpConst16 { 754 break 755 } 756 t := v_1.Type 757 c := v_1.AuxInt 758 if !(x.Op != OpConst16) { 759 break 760 } 761 v.reset(OpAnd16) 762 v0 := b.NewValue0(v.Line, OpConst16, t) 763 v0.AuxInt = c 764 v.AddArg(v0) 765 v.AddArg(x) 766 return true 767 } 768 // match: (And16 x x) 769 // cond: 770 // result: x 771 for { 772 x := v.Args[0] 773 if x != v.Args[1] { 774 break 775 } 776 v.reset(OpCopy) 777 v.Type = x.Type 778 v.AddArg(x) 779 return true 780 } 781 // match: (And16 (Const16 [-1]) x) 782 // cond: 783 // result: x 784 for { 785 v_0 := v.Args[0] 786 if v_0.Op != OpConst16 { 787 break 788 } 789 if v_0.AuxInt != -1 { 790 break 791 } 792 x := v.Args[1] 793 v.reset(OpCopy) 794 v.Type = x.Type 795 v.AddArg(x) 796 return true 797 } 798 // match: (And16 (Const16 [0]) _) 799 // cond: 800 // result: (Const16 [0]) 801 for { 802 v_0 := v.Args[0] 803 if v_0.Op != OpConst16 { 804 break 805 } 806 if v_0.AuxInt != 0 { 807 break 808 } 809 v.reset(OpConst16) 810 v.AuxInt = 0 811 return true 812 } 813 // match: (And16 x (And16 x y)) 814 // cond: 815 // result: (And16 x y) 816 for { 817 x := v.Args[0] 818 v_1 := v.Args[1] 819 if v_1.Op != OpAnd16 { 820 break 821 } 822 if x != v_1.Args[0] { 823 break 824 } 825 y := v_1.Args[1] 826 v.reset(OpAnd16) 827 v.AddArg(x) 828 v.AddArg(y) 829 return true 830 } 831 // match: (And16 x (And16 y x)) 832 // cond: 833 // result: (And16 x y) 834 for { 835 x := v.Args[0] 836 v_1 := v.Args[1] 837 if v_1.Op != OpAnd16 { 838 break 839 } 840 y := v_1.Args[0] 841 if x != v_1.Args[1] { 842 break 843 } 844 v.reset(OpAnd16) 845 v.AddArg(x) 846 v.AddArg(y) 847 return true 848 } 849 // match: (And16 (And16 x y) x) 850 // cond: 851 // result: (And16 x y) 852 for { 853 v_0 := v.Args[0] 854 if v_0.Op != OpAnd16 { 855 break 856 } 857 x := v_0.Args[0] 858 y := v_0.Args[1] 859 if x != v.Args[1] { 860 break 861 } 862 v.reset(OpAnd16) 863 v.AddArg(x) 864 v.AddArg(y) 865 return true 866 } 867 // match: (And16 (And16 x y) y) 868 // cond: 869 // result: (And16 x y) 870 for { 871 v_0 := v.Args[0] 872 if v_0.Op != OpAnd16 { 873 break 874 } 875 x := v_0.Args[0] 876 y := v_0.Args[1] 877 if y != v.Args[1] { 878 break 879 } 880 v.reset(OpAnd16) 881 v.AddArg(x) 882 v.AddArg(y) 883 return true 884 } 885 return false 886 } 887 func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { 888 b := v.Block 889 _ = b 890 // match: (And32 x (Const32 <t> [c])) 891 // cond: x.Op != OpConst32 892 // result: (And32 (Const32 <t> [c]) x) 893 for { 894 x := v.Args[0] 895 v_1 := v.Args[1] 896 if v_1.Op != OpConst32 { 897 break 898 } 899 t := v_1.Type 900 c := v_1.AuxInt 901 if !(x.Op != OpConst32) { 902 break 903 } 904 v.reset(OpAnd32) 905 v0 := b.NewValue0(v.Line, OpConst32, t) 906 v0.AuxInt = c 907 v.AddArg(v0) 908 v.AddArg(x) 909 return true 910 } 911 // match: (And32 x x) 912 // cond: 913 // result: x 914 for { 915 x := v.Args[0] 916 if x != v.Args[1] { 917 break 918 } 919 v.reset(OpCopy) 920 v.Type = x.Type 921 v.AddArg(x) 922 return true 923 } 924 // match: (And32 (Const32 [-1]) x) 925 // cond: 926 // result: x 927 for { 928 v_0 := v.Args[0] 929 if v_0.Op != OpConst32 { 930 break 931 } 932 if v_0.AuxInt != -1 { 933 break 934 } 935 x := v.Args[1] 936 v.reset(OpCopy) 937 v.Type = x.Type 938 v.AddArg(x) 939 return true 940 } 941 // match: (And32 (Const32 [0]) _) 942 // cond: 943 // result: (Const32 [0]) 944 for { 945 v_0 := v.Args[0] 946 if v_0.Op != OpConst32 { 947 break 948 } 949 if v_0.AuxInt != 0 { 950 break 951 } 952 v.reset(OpConst32) 953 v.AuxInt = 0 954 return true 955 } 956 // match: (And32 x (And32 x y)) 957 // cond: 958 // result: (And32 x y) 959 for { 960 x := v.Args[0] 961 v_1 := v.Args[1] 962 if v_1.Op != OpAnd32 { 963 break 964 } 965 if x != v_1.Args[0] { 966 break 967 } 968 y := v_1.Args[1] 969 v.reset(OpAnd32) 970 v.AddArg(x) 971 v.AddArg(y) 972 return true 973 } 974 // match: (And32 x (And32 y x)) 975 // cond: 976 // result: (And32 x y) 977 for { 978 x := v.Args[0] 979 v_1 := v.Args[1] 980 if v_1.Op != OpAnd32 { 981 break 982 } 983 y := v_1.Args[0] 984 if x != v_1.Args[1] { 985 break 986 } 987 v.reset(OpAnd32) 988 v.AddArg(x) 989 v.AddArg(y) 990 return true 991 } 992 // match: (And32 (And32 x y) x) 993 // cond: 994 // result: (And32 x y) 995 for { 996 v_0 := v.Args[0] 997 if v_0.Op != OpAnd32 { 998 break 999 } 1000 x := v_0.Args[0] 1001 y := v_0.Args[1] 1002 if x != v.Args[1] { 1003 break 1004 } 1005 v.reset(OpAnd32) 1006 v.AddArg(x) 1007 v.AddArg(y) 1008 return true 1009 } 1010 // match: (And32 (And32 x y) y) 1011 // cond: 1012 // result: (And32 x y) 1013 for { 1014 v_0 := v.Args[0] 1015 if v_0.Op != OpAnd32 { 1016 break 1017 } 1018 x := v_0.Args[0] 1019 y := v_0.Args[1] 1020 if y != v.Args[1] { 1021 break 1022 } 1023 v.reset(OpAnd32) 1024 v.AddArg(x) 1025 v.AddArg(y) 1026 return true 1027 } 1028 return false 1029 } 1030 func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { 1031 b := v.Block 1032 _ = b 1033 // match: (And64 x (Const64 <t> [c])) 1034 // cond: x.Op != OpConst64 1035 // result: (And64 (Const64 <t> [c]) x) 1036 for { 1037 x := v.Args[0] 1038 v_1 := v.Args[1] 1039 if v_1.Op != OpConst64 { 1040 break 1041 } 1042 t := v_1.Type 1043 c := v_1.AuxInt 1044 if !(x.Op != OpConst64) { 1045 break 1046 } 1047 v.reset(OpAnd64) 1048 v0 := b.NewValue0(v.Line, OpConst64, t) 1049 v0.AuxInt = c 1050 v.AddArg(v0) 1051 v.AddArg(x) 1052 return true 1053 } 1054 // match: (And64 x x) 1055 // cond: 1056 // result: x 1057 for { 1058 x := v.Args[0] 1059 if x != v.Args[1] { 1060 break 1061 } 1062 v.reset(OpCopy) 1063 v.Type = x.Type 1064 v.AddArg(x) 1065 return true 1066 } 1067 // match: (And64 (Const64 [-1]) x) 1068 // cond: 1069 // result: x 1070 for { 1071 v_0 := v.Args[0] 1072 if v_0.Op != OpConst64 { 1073 break 1074 } 1075 if v_0.AuxInt != -1 { 1076 break 1077 } 1078 x := v.Args[1] 1079 v.reset(OpCopy) 1080 v.Type = x.Type 1081 v.AddArg(x) 1082 return true 1083 } 1084 // match: (And64 (Const64 [0]) _) 1085 // cond: 1086 // result: (Const64 [0]) 1087 for { 1088 v_0 := v.Args[0] 1089 if v_0.Op != OpConst64 { 1090 break 1091 } 1092 if v_0.AuxInt != 0 { 1093 break 1094 } 1095 v.reset(OpConst64) 1096 v.AuxInt = 0 1097 return true 1098 } 1099 // match: (And64 x (And64 x y)) 1100 // cond: 1101 // result: (And64 x y) 1102 for { 1103 x := v.Args[0] 1104 v_1 := v.Args[1] 1105 if v_1.Op != OpAnd64 { 1106 break 1107 } 1108 if x != v_1.Args[0] { 1109 break 1110 } 1111 y := v_1.Args[1] 1112 v.reset(OpAnd64) 1113 v.AddArg(x) 1114 v.AddArg(y) 1115 return true 1116 } 1117 // match: (And64 x (And64 y x)) 1118 // cond: 1119 // result: (And64 x y) 1120 for { 1121 x := v.Args[0] 1122 v_1 := v.Args[1] 1123 if v_1.Op != OpAnd64 { 1124 break 1125 } 1126 y := v_1.Args[0] 1127 if x != v_1.Args[1] { 1128 break 1129 } 1130 v.reset(OpAnd64) 1131 v.AddArg(x) 1132 v.AddArg(y) 1133 return true 1134 } 1135 // match: (And64 (And64 x y) x) 1136 // cond: 1137 // result: (And64 x y) 1138 for { 1139 v_0 := v.Args[0] 1140 if v_0.Op != OpAnd64 { 1141 break 1142 } 1143 x := v_0.Args[0] 1144 y := v_0.Args[1] 1145 if x != v.Args[1] { 1146 break 1147 } 1148 v.reset(OpAnd64) 1149 v.AddArg(x) 1150 v.AddArg(y) 1151 return true 1152 } 1153 // match: (And64 (And64 x y) y) 1154 // cond: 1155 // result: (And64 x y) 1156 for { 1157 v_0 := v.Args[0] 1158 if v_0.Op != OpAnd64 { 1159 break 1160 } 1161 x := v_0.Args[0] 1162 y := v_0.Args[1] 1163 if y != v.Args[1] { 1164 break 1165 } 1166 v.reset(OpAnd64) 1167 v.AddArg(x) 1168 v.AddArg(y) 1169 return true 1170 } 1171 // match: (And64 <t> (Const64 [y]) x) 1172 // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 1173 // result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)])) 1174 for { 1175 t := v.Type 1176 v_0 := v.Args[0] 1177 if v_0.Op != OpConst64 { 1178 break 1179 } 1180 y := v_0.AuxInt 1181 x := v.Args[1] 1182 if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) { 1183 break 1184 } 1185 v.reset(OpRsh64Ux64) 1186 v0 := b.NewValue0(v.Line, OpLsh64x64, t) 1187 v0.AddArg(x) 1188 v1 := b.NewValue0(v.Line, OpConst64, t) 1189 v1.AuxInt = nlz(y) 1190 v0.AddArg(v1) 1191 v.AddArg(v0) 1192 v2 := b.NewValue0(v.Line, OpConst64, t) 1193 v2.AuxInt = nlz(y) 1194 v.AddArg(v2) 1195 return true 1196 } 1197 // match: (And64 <t> (Const64 [y]) x) 1198 // cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32 1199 // result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)])) 1200 for { 1201 t := v.Type 1202 v_0 := v.Args[0] 1203 if v_0.Op != OpConst64 { 1204 break 1205 } 1206 y := v_0.AuxInt 1207 x := v.Args[1] 1208 if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) { 1209 break 1210 } 1211 v.reset(OpLsh64x64) 1212 v0 := b.NewValue0(v.Line, OpRsh64Ux64, t) 1213 v0.AddArg(x) 1214 v1 := b.NewValue0(v.Line, OpConst64, t) 1215 v1.AuxInt = ntz(y) 1216 v0.AddArg(v1) 1217 v.AddArg(v0) 1218 v2 := b.NewValue0(v.Line, OpConst64, t) 1219 v2.AuxInt = ntz(y) 1220 v.AddArg(v2) 1221 return true 1222 } 1223 return false 1224 } 1225 func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { 1226 b := v.Block 1227 _ = b 1228 // match: (And8 x (Const8 <t> [c])) 1229 // cond: x.Op != OpConst8 1230 // result: (And8 (Const8 <t> [c]) x) 1231 for { 1232 x := v.Args[0] 1233 v_1 := v.Args[1] 1234 if v_1.Op != OpConst8 { 1235 break 1236 } 1237 t := v_1.Type 1238 c := v_1.AuxInt 1239 if !(x.Op != OpConst8) { 1240 break 1241 } 1242 v.reset(OpAnd8) 1243 v0 := b.NewValue0(v.Line, OpConst8, t) 1244 v0.AuxInt = c 1245 v.AddArg(v0) 1246 v.AddArg(x) 1247 return true 1248 } 1249 // match: (And8 x x) 1250 // cond: 1251 // result: x 1252 for { 1253 x := v.Args[0] 1254 if x != v.Args[1] { 1255 break 1256 } 1257 v.reset(OpCopy) 1258 v.Type = x.Type 1259 v.AddArg(x) 1260 return true 1261 } 1262 // match: (And8 (Const8 [-1]) x) 1263 // cond: 1264 // result: x 1265 for { 1266 v_0 := v.Args[0] 1267 if v_0.Op != OpConst8 { 1268 break 1269 } 1270 if v_0.AuxInt != -1 { 1271 break 1272 } 1273 x := v.Args[1] 1274 v.reset(OpCopy) 1275 v.Type = x.Type 1276 v.AddArg(x) 1277 return true 1278 } 1279 // match: (And8 (Const8 [0]) _) 1280 // cond: 1281 // result: (Const8 [0]) 1282 for { 1283 v_0 := v.Args[0] 1284 if v_0.Op != OpConst8 { 1285 break 1286 } 1287 if v_0.AuxInt != 0 { 1288 break 1289 } 1290 v.reset(OpConst8) 1291 v.AuxInt = 0 1292 return true 1293 } 1294 // match: (And8 x (And8 x y)) 1295 // cond: 1296 // result: (And8 x y) 1297 for { 1298 x := v.Args[0] 1299 v_1 := v.Args[1] 1300 if v_1.Op != OpAnd8 { 1301 break 1302 } 1303 if x != v_1.Args[0] { 1304 break 1305 } 1306 y := v_1.Args[1] 1307 v.reset(OpAnd8) 1308 v.AddArg(x) 1309 v.AddArg(y) 1310 return true 1311 } 1312 // match: (And8 x (And8 y x)) 1313 // cond: 1314 // result: (And8 x y) 1315 for { 1316 x := v.Args[0] 1317 v_1 := v.Args[1] 1318 if v_1.Op != OpAnd8 { 1319 break 1320 } 1321 y := v_1.Args[0] 1322 if x != v_1.Args[1] { 1323 break 1324 } 1325 v.reset(OpAnd8) 1326 v.AddArg(x) 1327 v.AddArg(y) 1328 return true 1329 } 1330 // match: (And8 (And8 x y) x) 1331 // cond: 1332 // result: (And8 x y) 1333 for { 1334 v_0 := v.Args[0] 1335 if v_0.Op != OpAnd8 { 1336 break 1337 } 1338 x := v_0.Args[0] 1339 y := v_0.Args[1] 1340 if x != v.Args[1] { 1341 break 1342 } 1343 v.reset(OpAnd8) 1344 v.AddArg(x) 1345 v.AddArg(y) 1346 return true 1347 } 1348 // match: (And8 (And8 x y) y) 1349 // cond: 1350 // result: (And8 x y) 1351 for { 1352 v_0 := v.Args[0] 1353 if v_0.Op != OpAnd8 { 1354 break 1355 } 1356 x := v_0.Args[0] 1357 y := v_0.Args[1] 1358 if y != v.Args[1] { 1359 break 1360 } 1361 v.reset(OpAnd8) 1362 v.AddArg(x) 1363 v.AddArg(y) 1364 return true 1365 } 1366 return false 1367 } 1368 func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { 1369 b := v.Block 1370 _ = b 1371 // match: (Arg {n} [off]) 1372 // cond: v.Type.IsString() 1373 // result: (StringMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize])) 1374 for { 1375 off := v.AuxInt 1376 n := v.Aux 1377 if !(v.Type.IsString()) { 1378 break 1379 } 1380 v.reset(OpStringMake) 1381 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1382 v0.AuxInt = off 1383 v0.Aux = n 1384 v.AddArg(v0) 1385 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1386 v1.AuxInt = off + config.PtrSize 1387 v1.Aux = n 1388 v.AddArg(v1) 1389 return true 1390 } 1391 // match: (Arg {n} [off]) 1392 // cond: v.Type.IsSlice() 1393 // result: (SliceMake (Arg <v.Type.ElemType().PtrTo()> {n} [off]) (Arg <config.fe.TypeInt()> {n} [off+config.PtrSize]) (Arg <config.fe.TypeInt()> {n} [off+2*config.PtrSize])) 1394 for { 1395 off := v.AuxInt 1396 n := v.Aux 1397 if !(v.Type.IsSlice()) { 1398 break 1399 } 1400 v.reset(OpSliceMake) 1401 v0 := b.NewValue0(v.Line, OpArg, v.Type.ElemType().PtrTo()) 1402 v0.AuxInt = off 1403 v0.Aux = n 1404 v.AddArg(v0) 1405 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1406 v1.AuxInt = off + config.PtrSize 1407 v1.Aux = n 1408 v.AddArg(v1) 1409 v2 := b.NewValue0(v.Line, OpArg, config.fe.TypeInt()) 1410 v2.AuxInt = off + 2*config.PtrSize 1411 v2.Aux = n 1412 v.AddArg(v2) 1413 return true 1414 } 1415 // match: (Arg {n} [off]) 1416 // cond: v.Type.IsInterface() 1417 // result: (IMake (Arg <config.fe.TypeBytePtr()> {n} [off]) (Arg <config.fe.TypeBytePtr()> {n} [off+config.PtrSize])) 1418 for { 1419 off := v.AuxInt 1420 n := v.Aux 1421 if !(v.Type.IsInterface()) { 1422 break 1423 } 1424 v.reset(OpIMake) 1425 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1426 v0.AuxInt = off 1427 v0.Aux = n 1428 v.AddArg(v0) 1429 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeBytePtr()) 1430 v1.AuxInt = off + config.PtrSize 1431 v1.Aux = n 1432 v.AddArg(v1) 1433 return true 1434 } 1435 // match: (Arg {n} [off]) 1436 // cond: v.Type.IsComplex() && v.Type.Size() == 16 1437 // result: (ComplexMake (Arg <config.fe.TypeFloat64()> {n} [off]) (Arg <config.fe.TypeFloat64()> {n} [off+8])) 1438 for { 1439 off := v.AuxInt 1440 n := v.Aux 1441 if !(v.Type.IsComplex() && v.Type.Size() == 16) { 1442 break 1443 } 1444 v.reset(OpComplexMake) 1445 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1446 v0.AuxInt = off 1447 v0.Aux = n 1448 v.AddArg(v0) 1449 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat64()) 1450 v1.AuxInt = off + 8 1451 v1.Aux = n 1452 v.AddArg(v1) 1453 return true 1454 } 1455 // match: (Arg {n} [off]) 1456 // cond: v.Type.IsComplex() && v.Type.Size() == 8 1457 // result: (ComplexMake (Arg <config.fe.TypeFloat32()> {n} [off]) (Arg <config.fe.TypeFloat32()> {n} [off+4])) 1458 for { 1459 off := v.AuxInt 1460 n := v.Aux 1461 if !(v.Type.IsComplex() && v.Type.Size() == 8) { 1462 break 1463 } 1464 v.reset(OpComplexMake) 1465 v0 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1466 v0.AuxInt = off 1467 v0.Aux = n 1468 v.AddArg(v0) 1469 v1 := b.NewValue0(v.Line, OpArg, config.fe.TypeFloat32()) 1470 v1.AuxInt = off + 4 1471 v1.Aux = n 1472 v.AddArg(v1) 1473 return true 1474 } 1475 // match: (Arg <t>) 1476 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 1477 // result: (StructMake0) 1478 for { 1479 t := v.Type 1480 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 1481 break 1482 } 1483 v.reset(OpStructMake0) 1484 return true 1485 } 1486 // match: (Arg <t> {n} [off]) 1487 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 1488 // result: (StructMake1 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)])) 1489 for { 1490 t := v.Type 1491 off := v.AuxInt 1492 n := v.Aux 1493 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 1494 break 1495 } 1496 v.reset(OpStructMake1) 1497 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1498 v0.AuxInt = off + t.FieldOff(0) 1499 v0.Aux = n 1500 v.AddArg(v0) 1501 return true 1502 } 1503 // match: (Arg <t> {n} [off]) 1504 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 1505 // result: (StructMake2 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)])) 1506 for { 1507 t := v.Type 1508 off := v.AuxInt 1509 n := v.Aux 1510 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 1511 break 1512 } 1513 v.reset(OpStructMake2) 1514 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1515 v0.AuxInt = off + t.FieldOff(0) 1516 v0.Aux = n 1517 v.AddArg(v0) 1518 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1519 v1.AuxInt = off + t.FieldOff(1) 1520 v1.Aux = n 1521 v.AddArg(v1) 1522 return true 1523 } 1524 // match: (Arg <t> {n} [off]) 1525 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 1526 // result: (StructMake3 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)])) 1527 for { 1528 t := v.Type 1529 off := v.AuxInt 1530 n := v.Aux 1531 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 1532 break 1533 } 1534 v.reset(OpStructMake3) 1535 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1536 v0.AuxInt = off + t.FieldOff(0) 1537 v0.Aux = n 1538 v.AddArg(v0) 1539 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1540 v1.AuxInt = off + t.FieldOff(1) 1541 v1.Aux = n 1542 v.AddArg(v1) 1543 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1544 v2.AuxInt = off + t.FieldOff(2) 1545 v2.Aux = n 1546 v.AddArg(v2) 1547 return true 1548 } 1549 // match: (Arg <t> {n} [off]) 1550 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 1551 // result: (StructMake4 (Arg <t.FieldType(0)> {n} [off+t.FieldOff(0)]) (Arg <t.FieldType(1)> {n} [off+t.FieldOff(1)]) (Arg <t.FieldType(2)> {n} [off+t.FieldOff(2)]) (Arg <t.FieldType(3)> {n} [off+t.FieldOff(3)])) 1552 for { 1553 t := v.Type 1554 off := v.AuxInt 1555 n := v.Aux 1556 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 1557 break 1558 } 1559 v.reset(OpStructMake4) 1560 v0 := b.NewValue0(v.Line, OpArg, t.FieldType(0)) 1561 v0.AuxInt = off + t.FieldOff(0) 1562 v0.Aux = n 1563 v.AddArg(v0) 1564 v1 := b.NewValue0(v.Line, OpArg, t.FieldType(1)) 1565 v1.AuxInt = off + t.FieldOff(1) 1566 v1.Aux = n 1567 v.AddArg(v1) 1568 v2 := b.NewValue0(v.Line, OpArg, t.FieldType(2)) 1569 v2.AuxInt = off + t.FieldOff(2) 1570 v2.Aux = n 1571 v.AddArg(v2) 1572 v3 := b.NewValue0(v.Line, OpArg, t.FieldType(3)) 1573 v3.AuxInt = off + t.FieldOff(3) 1574 v3.Aux = n 1575 v.AddArg(v3) 1576 return true 1577 } 1578 return false 1579 } 1580 func rewriteValuegeneric_OpArrayIndex(v *Value, config *Config) bool { 1581 b := v.Block 1582 _ = b 1583 // match: (ArrayIndex <t> [0] x:(Load ptr mem)) 1584 // cond: 1585 // result: @x.Block (Load <t> ptr mem) 1586 for { 1587 t := v.Type 1588 if v.AuxInt != 0 { 1589 break 1590 } 1591 x := v.Args[0] 1592 if x.Op != OpLoad { 1593 break 1594 } 1595 ptr := x.Args[0] 1596 mem := x.Args[1] 1597 b = x.Block 1598 v0 := b.NewValue0(v.Line, OpLoad, t) 1599 v.reset(OpCopy) 1600 v.AddArg(v0) 1601 v0.AddArg(ptr) 1602 v0.AddArg(mem) 1603 return true 1604 } 1605 return false 1606 } 1607 func rewriteValuegeneric_OpCom16(v *Value, config *Config) bool { 1608 b := v.Block 1609 _ = b 1610 // match: (Com16 (Com16 x)) 1611 // cond: 1612 // result: x 1613 for { 1614 v_0 := v.Args[0] 1615 if v_0.Op != OpCom16 { 1616 break 1617 } 1618 x := v_0.Args[0] 1619 v.reset(OpCopy) 1620 v.Type = x.Type 1621 v.AddArg(x) 1622 return true 1623 } 1624 return false 1625 } 1626 func rewriteValuegeneric_OpCom32(v *Value, config *Config) bool { 1627 b := v.Block 1628 _ = b 1629 // match: (Com32 (Com32 x)) 1630 // cond: 1631 // result: x 1632 for { 1633 v_0 := v.Args[0] 1634 if v_0.Op != OpCom32 { 1635 break 1636 } 1637 x := v_0.Args[0] 1638 v.reset(OpCopy) 1639 v.Type = x.Type 1640 v.AddArg(x) 1641 return true 1642 } 1643 return false 1644 } 1645 func rewriteValuegeneric_OpCom64(v *Value, config *Config) bool { 1646 b := v.Block 1647 _ = b 1648 // match: (Com64 (Com64 x)) 1649 // cond: 1650 // result: x 1651 for { 1652 v_0 := v.Args[0] 1653 if v_0.Op != OpCom64 { 1654 break 1655 } 1656 x := v_0.Args[0] 1657 v.reset(OpCopy) 1658 v.Type = x.Type 1659 v.AddArg(x) 1660 return true 1661 } 1662 return false 1663 } 1664 func rewriteValuegeneric_OpCom8(v *Value, config *Config) bool { 1665 b := v.Block 1666 _ = b 1667 // match: (Com8 (Com8 x)) 1668 // cond: 1669 // result: x 1670 for { 1671 v_0 := v.Args[0] 1672 if v_0.Op != OpCom8 { 1673 break 1674 } 1675 x := v_0.Args[0] 1676 v.reset(OpCopy) 1677 v.Type = x.Type 1678 v.AddArg(x) 1679 return true 1680 } 1681 return false 1682 } 1683 func rewriteValuegeneric_OpConstInterface(v *Value, config *Config) bool { 1684 b := v.Block 1685 _ = b 1686 // match: (ConstInterface) 1687 // cond: 1688 // result: (IMake (ConstNil <config.fe.TypeBytePtr()>) (ConstNil <config.fe.TypeBytePtr()>)) 1689 for { 1690 v.reset(OpIMake) 1691 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1692 v.AddArg(v0) 1693 v1 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1694 v.AddArg(v1) 1695 return true 1696 } 1697 } 1698 func rewriteValuegeneric_OpConstSlice(v *Value, config *Config) bool { 1699 b := v.Block 1700 _ = b 1701 // match: (ConstSlice) 1702 // cond: config.PtrSize == 4 1703 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const32 <config.fe.TypeInt()> [0]) (Const32 <config.fe.TypeInt()> [0])) 1704 for { 1705 if !(config.PtrSize == 4) { 1706 break 1707 } 1708 v.reset(OpSliceMake) 1709 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1710 v.AddArg(v0) 1711 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1712 v1.AuxInt = 0 1713 v.AddArg(v1) 1714 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1715 v2.AuxInt = 0 1716 v.AddArg(v2) 1717 return true 1718 } 1719 // match: (ConstSlice) 1720 // cond: config.PtrSize == 8 1721 // result: (SliceMake (ConstNil <v.Type.ElemType().PtrTo()>) (Const64 <config.fe.TypeInt()> [0]) (Const64 <config.fe.TypeInt()> [0])) 1722 for { 1723 if !(config.PtrSize == 8) { 1724 break 1725 } 1726 v.reset(OpSliceMake) 1727 v0 := b.NewValue0(v.Line, OpConstNil, v.Type.ElemType().PtrTo()) 1728 v.AddArg(v0) 1729 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1730 v1.AuxInt = 0 1731 v.AddArg(v1) 1732 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1733 v2.AuxInt = 0 1734 v.AddArg(v2) 1735 return true 1736 } 1737 return false 1738 } 1739 func rewriteValuegeneric_OpConstString(v *Value, config *Config) bool { 1740 b := v.Block 1741 _ = b 1742 // match: (ConstString {s}) 1743 // cond: config.PtrSize == 4 && s.(string) == "" 1744 // result: (StringMake (ConstNil) (Const32 <config.fe.TypeInt()> [0])) 1745 for { 1746 s := v.Aux 1747 if !(config.PtrSize == 4 && s.(string) == "") { 1748 break 1749 } 1750 v.reset(OpStringMake) 1751 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1752 v.AddArg(v0) 1753 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1754 v1.AuxInt = 0 1755 v.AddArg(v1) 1756 return true 1757 } 1758 // match: (ConstString {s}) 1759 // cond: config.PtrSize == 8 && s.(string) == "" 1760 // result: (StringMake (ConstNil) (Const64 <config.fe.TypeInt()> [0])) 1761 for { 1762 s := v.Aux 1763 if !(config.PtrSize == 8 && s.(string) == "") { 1764 break 1765 } 1766 v.reset(OpStringMake) 1767 v0 := b.NewValue0(v.Line, OpConstNil, config.fe.TypeBytePtr()) 1768 v.AddArg(v0) 1769 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1770 v1.AuxInt = 0 1771 v.AddArg(v1) 1772 return true 1773 } 1774 // match: (ConstString {s}) 1775 // cond: config.PtrSize == 4 && s.(string) != "" 1776 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const32 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1777 for { 1778 s := v.Aux 1779 if !(config.PtrSize == 4 && s.(string) != "") { 1780 break 1781 } 1782 v.reset(OpStringMake) 1783 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1784 v0.Aux = config.fe.StringData(s.(string)) 1785 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1786 v0.AddArg(v1) 1787 v.AddArg(v0) 1788 v2 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 1789 v2.AuxInt = int64(len(s.(string))) 1790 v.AddArg(v2) 1791 return true 1792 } 1793 // match: (ConstString {s}) 1794 // cond: config.PtrSize == 8 && s.(string) != "" 1795 // result: (StringMake (Addr <config.fe.TypeBytePtr()> {config.fe.StringData(s.(string))} (SB)) (Const64 <config.fe.TypeInt()> [int64(len(s.(string)))])) 1796 for { 1797 s := v.Aux 1798 if !(config.PtrSize == 8 && s.(string) != "") { 1799 break 1800 } 1801 v.reset(OpStringMake) 1802 v0 := b.NewValue0(v.Line, OpAddr, config.fe.TypeBytePtr()) 1803 v0.Aux = config.fe.StringData(s.(string)) 1804 v1 := b.NewValue0(v.Line, OpSB, config.fe.TypeUintptr()) 1805 v0.AddArg(v1) 1806 v.AddArg(v0) 1807 v2 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 1808 v2.AuxInt = int64(len(s.(string))) 1809 v.AddArg(v2) 1810 return true 1811 } 1812 return false 1813 } 1814 func rewriteValuegeneric_OpConvert(v *Value, config *Config) bool { 1815 b := v.Block 1816 _ = b 1817 // match: (Convert (Add64 (Convert ptr mem) off) mem) 1818 // cond: 1819 // result: (Add64 ptr off) 1820 for { 1821 v_0 := v.Args[0] 1822 if v_0.Op != OpAdd64 { 1823 break 1824 } 1825 v_0_0 := v_0.Args[0] 1826 if v_0_0.Op != OpConvert { 1827 break 1828 } 1829 ptr := v_0_0.Args[0] 1830 mem := v_0_0.Args[1] 1831 off := v_0.Args[1] 1832 if mem != v.Args[1] { 1833 break 1834 } 1835 v.reset(OpAdd64) 1836 v.AddArg(ptr) 1837 v.AddArg(off) 1838 return true 1839 } 1840 // match: (Convert (Add64 off (Convert ptr mem)) mem) 1841 // cond: 1842 // result: (Add64 ptr off) 1843 for { 1844 v_0 := v.Args[0] 1845 if v_0.Op != OpAdd64 { 1846 break 1847 } 1848 off := v_0.Args[0] 1849 v_0_1 := v_0.Args[1] 1850 if v_0_1.Op != OpConvert { 1851 break 1852 } 1853 ptr := v_0_1.Args[0] 1854 mem := v_0_1.Args[1] 1855 if mem != v.Args[1] { 1856 break 1857 } 1858 v.reset(OpAdd64) 1859 v.AddArg(ptr) 1860 v.AddArg(off) 1861 return true 1862 } 1863 // match: (Convert (Convert ptr mem) mem) 1864 // cond: 1865 // result: ptr 1866 for { 1867 v_0 := v.Args[0] 1868 if v_0.Op != OpConvert { 1869 break 1870 } 1871 ptr := v_0.Args[0] 1872 mem := v_0.Args[1] 1873 if mem != v.Args[1] { 1874 break 1875 } 1876 v.reset(OpCopy) 1877 v.Type = ptr.Type 1878 v.AddArg(ptr) 1879 return true 1880 } 1881 return false 1882 } 1883 func rewriteValuegeneric_OpCvt32Fto64F(v *Value, config *Config) bool { 1884 b := v.Block 1885 _ = b 1886 // match: (Cvt32Fto64F (Const32F [c])) 1887 // cond: 1888 // result: (Const64F [c]) 1889 for { 1890 v_0 := v.Args[0] 1891 if v_0.Op != OpConst32F { 1892 break 1893 } 1894 c := v_0.AuxInt 1895 v.reset(OpConst64F) 1896 v.AuxInt = c 1897 return true 1898 } 1899 return false 1900 } 1901 func rewriteValuegeneric_OpCvt64Fto32F(v *Value, config *Config) bool { 1902 b := v.Block 1903 _ = b 1904 // match: (Cvt64Fto32F (Const64F [c])) 1905 // cond: 1906 // result: (Const32F [f2i(float64(i2f32(c)))]) 1907 for { 1908 v_0 := v.Args[0] 1909 if v_0.Op != OpConst64F { 1910 break 1911 } 1912 c := v_0.AuxInt 1913 v.reset(OpConst32F) 1914 v.AuxInt = f2i(float64(i2f32(c))) 1915 return true 1916 } 1917 return false 1918 } 1919 func rewriteValuegeneric_OpDiv32F(v *Value, config *Config) bool { 1920 b := v.Block 1921 _ = b 1922 // match: (Div32F x (Const32F [f2i(1)])) 1923 // cond: 1924 // result: x 1925 for { 1926 x := v.Args[0] 1927 v_1 := v.Args[1] 1928 if v_1.Op != OpConst32F { 1929 break 1930 } 1931 if v_1.AuxInt != f2i(1) { 1932 break 1933 } 1934 v.reset(OpCopy) 1935 v.Type = x.Type 1936 v.AddArg(x) 1937 return true 1938 } 1939 // match: (Div32F x (Const32F [f2i(-1)])) 1940 // cond: 1941 // result: (Neg32F x) 1942 for { 1943 x := v.Args[0] 1944 v_1 := v.Args[1] 1945 if v_1.Op != OpConst32F { 1946 break 1947 } 1948 if v_1.AuxInt != f2i(-1) { 1949 break 1950 } 1951 v.reset(OpNeg32F) 1952 v.AddArg(x) 1953 return true 1954 } 1955 return false 1956 } 1957 func rewriteValuegeneric_OpDiv64(v *Value, config *Config) bool { 1958 b := v.Block 1959 _ = b 1960 // match: (Div64 <t> x (Const64 [c])) 1961 // cond: c > 0 && smagic64ok(c) && smagic64m(c) > 0 1962 // result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 1963 for { 1964 t := v.Type 1965 x := v.Args[0] 1966 v_1 := v.Args[1] 1967 if v_1.Op != OpConst64 { 1968 break 1969 } 1970 c := v_1.AuxInt 1971 if !(c > 0 && smagic64ok(c) && smagic64m(c) > 0) { 1972 break 1973 } 1974 v.reset(OpSub64) 1975 v.Type = t 1976 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 1977 v1 := b.NewValue0(v.Line, OpHmul64, t) 1978 v2 := b.NewValue0(v.Line, OpConst64, t) 1979 v2.AuxInt = smagic64m(c) 1980 v1.AddArg(v2) 1981 v1.AddArg(x) 1982 v0.AddArg(v1) 1983 v3 := b.NewValue0(v.Line, OpConst64, t) 1984 v3.AuxInt = smagic64s(c) 1985 v0.AddArg(v3) 1986 v.AddArg(v0) 1987 v4 := b.NewValue0(v.Line, OpRsh64x64, t) 1988 v4.AddArg(x) 1989 v5 := b.NewValue0(v.Line, OpConst64, t) 1990 v5.AuxInt = 63 1991 v4.AddArg(v5) 1992 v.AddArg(v4) 1993 return true 1994 } 1995 // match: (Div64 <t> x (Const64 [c])) 1996 // cond: c > 0 && smagic64ok(c) && smagic64m(c) < 0 1997 // result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63]))) 1998 for { 1999 t := v.Type 2000 x := v.Args[0] 2001 v_1 := v.Args[1] 2002 if v_1.Op != OpConst64 { 2003 break 2004 } 2005 c := v_1.AuxInt 2006 if !(c > 0 && smagic64ok(c) && smagic64m(c) < 0) { 2007 break 2008 } 2009 v.reset(OpSub64) 2010 v.Type = t 2011 v0 := b.NewValue0(v.Line, OpRsh64x64, t) 2012 v1 := b.NewValue0(v.Line, OpAdd64, t) 2013 v2 := b.NewValue0(v.Line, OpHmul64, t) 2014 v3 := b.NewValue0(v.Line, OpConst64, t) 2015 v3.AuxInt = smagic64m(c) 2016 v2.AddArg(v3) 2017 v2.AddArg(x) 2018 v1.AddArg(v2) 2019 v1.AddArg(x) 2020 v0.AddArg(v1) 2021 v4 := b.NewValue0(v.Line, OpConst64, t) 2022 v4.AuxInt = smagic64s(c) 2023 v0.AddArg(v4) 2024 v.AddArg(v0) 2025 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 2026 v5.AddArg(x) 2027 v6 := b.NewValue0(v.Line, OpConst64, t) 2028 v6.AuxInt = 63 2029 v5.AddArg(v6) 2030 v.AddArg(v5) 2031 return true 2032 } 2033 // match: (Div64 <t> x (Const64 [c])) 2034 // cond: c < 0 && smagic64ok(c) && smagic64m(c) > 0 2035 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 2036 for { 2037 t := v.Type 2038 x := v.Args[0] 2039 v_1 := v.Args[1] 2040 if v_1.Op != OpConst64 { 2041 break 2042 } 2043 c := v_1.AuxInt 2044 if !(c < 0 && smagic64ok(c) && smagic64m(c) > 0) { 2045 break 2046 } 2047 v.reset(OpNeg64) 2048 v.Type = t 2049 v0 := b.NewValue0(v.Line, OpSub64, t) 2050 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 2051 v2 := b.NewValue0(v.Line, OpHmul64, t) 2052 v3 := b.NewValue0(v.Line, OpConst64, t) 2053 v3.AuxInt = smagic64m(c) 2054 v2.AddArg(v3) 2055 v2.AddArg(x) 2056 v1.AddArg(v2) 2057 v4 := b.NewValue0(v.Line, OpConst64, t) 2058 v4.AuxInt = smagic64s(c) 2059 v1.AddArg(v4) 2060 v0.AddArg(v1) 2061 v5 := b.NewValue0(v.Line, OpRsh64x64, t) 2062 v5.AddArg(x) 2063 v6 := b.NewValue0(v.Line, OpConst64, t) 2064 v6.AuxInt = 63 2065 v5.AddArg(v6) 2066 v0.AddArg(v5) 2067 v.AddArg(v0) 2068 return true 2069 } 2070 // match: (Div64 <t> x (Const64 [c])) 2071 // cond: c < 0 && smagic64ok(c) && smagic64m(c) < 0 2072 // result: (Neg64 <t> (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <t> [smagic64m(c)]) x) x) (Const64 <t> [smagic64s(c)])) (Rsh64x64 <t> x (Const64 <t> [63])))) 2073 for { 2074 t := v.Type 2075 x := v.Args[0] 2076 v_1 := v.Args[1] 2077 if v_1.Op != OpConst64 { 2078 break 2079 } 2080 c := v_1.AuxInt 2081 if !(c < 0 && smagic64ok(c) && smagic64m(c) < 0) { 2082 break 2083 } 2084 v.reset(OpNeg64) 2085 v.Type = t 2086 v0 := b.NewValue0(v.Line, OpSub64, t) 2087 v1 := b.NewValue0(v.Line, OpRsh64x64, t) 2088 v2 := b.NewValue0(v.Line, OpAdd64, t) 2089 v3 := b.NewValue0(v.Line, OpHmul64, t) 2090 v4 := b.NewValue0(v.Line, OpConst64, t) 2091 v4.AuxInt = smagic64m(c) 2092 v3.AddArg(v4) 2093 v3.AddArg(x) 2094 v2.AddArg(v3) 2095 v2.AddArg(x) 2096 v1.AddArg(v2) 2097 v5 := b.NewValue0(v.Line, OpConst64, t) 2098 v5.AuxInt = smagic64s(c) 2099 v1.AddArg(v5) 2100 v0.AddArg(v1) 2101 v6 := b.NewValue0(v.Line, OpRsh64x64, t) 2102 v6.AddArg(x) 2103 v7 := b.NewValue0(v.Line, OpConst64, t) 2104 v7.AuxInt = 63 2105 v6.AddArg(v7) 2106 v0.AddArg(v6) 2107 v.AddArg(v0) 2108 return true 2109 } 2110 return false 2111 } 2112 func rewriteValuegeneric_OpDiv64F(v *Value, config *Config) bool { 2113 b := v.Block 2114 _ = b 2115 // match: (Div64F x (Const64F [f2i(1)])) 2116 // cond: 2117 // result: x 2118 for { 2119 x := v.Args[0] 2120 v_1 := v.Args[1] 2121 if v_1.Op != OpConst64F { 2122 break 2123 } 2124 if v_1.AuxInt != f2i(1) { 2125 break 2126 } 2127 v.reset(OpCopy) 2128 v.Type = x.Type 2129 v.AddArg(x) 2130 return true 2131 } 2132 // match: (Div64F x (Const64F [f2i(-1)])) 2133 // cond: 2134 // result: (Neg32F x) 2135 for { 2136 x := v.Args[0] 2137 v_1 := v.Args[1] 2138 if v_1.Op != OpConst64F { 2139 break 2140 } 2141 if v_1.AuxInt != f2i(-1) { 2142 break 2143 } 2144 v.reset(OpNeg32F) 2145 v.AddArg(x) 2146 return true 2147 } 2148 return false 2149 } 2150 func rewriteValuegeneric_OpDiv64u(v *Value, config *Config) bool { 2151 b := v.Block 2152 _ = b 2153 // match: (Div64u <t> n (Const64 [c])) 2154 // cond: isPowerOfTwo(c) 2155 // result: (Rsh64Ux64 n (Const64 <t> [log2(c)])) 2156 for { 2157 t := v.Type 2158 n := v.Args[0] 2159 v_1 := v.Args[1] 2160 if v_1.Op != OpConst64 { 2161 break 2162 } 2163 c := v_1.AuxInt 2164 if !(isPowerOfTwo(c)) { 2165 break 2166 } 2167 v.reset(OpRsh64Ux64) 2168 v.AddArg(n) 2169 v0 := b.NewValue0(v.Line, OpConst64, t) 2170 v0.AuxInt = log2(c) 2171 v.AddArg(v0) 2172 return true 2173 } 2174 // match: (Div64u <t> x (Const64 [c])) 2175 // cond: umagic64ok(c) && !umagic64a(c) 2176 // result: (Rsh64Ux64 (Hmul64u <t> (Const64 <t> [umagic64m(c)]) x) (Const64 <t> [umagic64s(c)])) 2177 for { 2178 t := v.Type 2179 x := v.Args[0] 2180 v_1 := v.Args[1] 2181 if v_1.Op != OpConst64 { 2182 break 2183 } 2184 c := v_1.AuxInt 2185 if !(umagic64ok(c) && !umagic64a(c)) { 2186 break 2187 } 2188 v.reset(OpRsh64Ux64) 2189 v0 := b.NewValue0(v.Line, OpHmul64u, t) 2190 v1 := b.NewValue0(v.Line, OpConst64, t) 2191 v1.AuxInt = umagic64m(c) 2192 v0.AddArg(v1) 2193 v0.AddArg(x) 2194 v.AddArg(v0) 2195 v2 := b.NewValue0(v.Line, OpConst64, t) 2196 v2.AuxInt = umagic64s(c) 2197 v.AddArg(v2) 2198 return true 2199 } 2200 // match: (Div64u <t> x (Const64 [c])) 2201 // cond: umagic64ok(c) && umagic64a(c) 2202 // result: (Rsh64Ux64 (Avg64u <t> (Hmul64u <t> x (Const64 <t> [umagic64m(c)])) x) (Const64 <t> [umagic64s(c)-1])) 2203 for { 2204 t := v.Type 2205 x := v.Args[0] 2206 v_1 := v.Args[1] 2207 if v_1.Op != OpConst64 { 2208 break 2209 } 2210 c := v_1.AuxInt 2211 if !(umagic64ok(c) && umagic64a(c)) { 2212 break 2213 } 2214 v.reset(OpRsh64Ux64) 2215 v0 := b.NewValue0(v.Line, OpAvg64u, t) 2216 v1 := b.NewValue0(v.Line, OpHmul64u, t) 2217 v1.AddArg(x) 2218 v2 := b.NewValue0(v.Line, OpConst64, t) 2219 v2.AuxInt = umagic64m(c) 2220 v1.AddArg(v2) 2221 v0.AddArg(v1) 2222 v0.AddArg(x) 2223 v.AddArg(v0) 2224 v3 := b.NewValue0(v.Line, OpConst64, t) 2225 v3.AuxInt = umagic64s(c) - 1 2226 v.AddArg(v3) 2227 return true 2228 } 2229 return false 2230 } 2231 func rewriteValuegeneric_OpEq16(v *Value, config *Config) bool { 2232 b := v.Block 2233 _ = b 2234 // match: (Eq16 x x) 2235 // cond: 2236 // result: (ConstBool [1]) 2237 for { 2238 x := v.Args[0] 2239 if x != v.Args[1] { 2240 break 2241 } 2242 v.reset(OpConstBool) 2243 v.AuxInt = 1 2244 return true 2245 } 2246 // match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 2247 // cond: 2248 // result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x) 2249 for { 2250 v_0 := v.Args[0] 2251 if v_0.Op != OpConst16 { 2252 break 2253 } 2254 t := v_0.Type 2255 c := v_0.AuxInt 2256 v_1 := v.Args[1] 2257 if v_1.Op != OpAdd16 { 2258 break 2259 } 2260 v_1_0 := v_1.Args[0] 2261 if v_1_0.Op != OpConst16 { 2262 break 2263 } 2264 if v_1_0.Type != t { 2265 break 2266 } 2267 d := v_1_0.AuxInt 2268 x := v_1.Args[1] 2269 v.reset(OpEq16) 2270 v0 := b.NewValue0(v.Line, OpConst16, t) 2271 v0.AuxInt = int64(int16(c - d)) 2272 v.AddArg(v0) 2273 v.AddArg(x) 2274 return true 2275 } 2276 // match: (Eq16 x (Const16 <t> [c])) 2277 // cond: x.Op != OpConst16 2278 // result: (Eq16 (Const16 <t> [c]) x) 2279 for { 2280 x := v.Args[0] 2281 v_1 := v.Args[1] 2282 if v_1.Op != OpConst16 { 2283 break 2284 } 2285 t := v_1.Type 2286 c := v_1.AuxInt 2287 if !(x.Op != OpConst16) { 2288 break 2289 } 2290 v.reset(OpEq16) 2291 v0 := b.NewValue0(v.Line, OpConst16, t) 2292 v0.AuxInt = c 2293 v.AddArg(v0) 2294 v.AddArg(x) 2295 return true 2296 } 2297 // match: (Eq16 (Const16 [c]) (Const16 [d])) 2298 // cond: 2299 // result: (ConstBool [b2i(c == d)]) 2300 for { 2301 v_0 := v.Args[0] 2302 if v_0.Op != OpConst16 { 2303 break 2304 } 2305 c := v_0.AuxInt 2306 v_1 := v.Args[1] 2307 if v_1.Op != OpConst16 { 2308 break 2309 } 2310 d := v_1.AuxInt 2311 v.reset(OpConstBool) 2312 v.AuxInt = b2i(c == d) 2313 return true 2314 } 2315 return false 2316 } 2317 func rewriteValuegeneric_OpEq32(v *Value, config *Config) bool { 2318 b := v.Block 2319 _ = b 2320 // match: (Eq32 x x) 2321 // cond: 2322 // result: (ConstBool [1]) 2323 for { 2324 x := v.Args[0] 2325 if x != v.Args[1] { 2326 break 2327 } 2328 v.reset(OpConstBool) 2329 v.AuxInt = 1 2330 return true 2331 } 2332 // match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 2333 // cond: 2334 // result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x) 2335 for { 2336 v_0 := v.Args[0] 2337 if v_0.Op != OpConst32 { 2338 break 2339 } 2340 t := v_0.Type 2341 c := v_0.AuxInt 2342 v_1 := v.Args[1] 2343 if v_1.Op != OpAdd32 { 2344 break 2345 } 2346 v_1_0 := v_1.Args[0] 2347 if v_1_0.Op != OpConst32 { 2348 break 2349 } 2350 if v_1_0.Type != t { 2351 break 2352 } 2353 d := v_1_0.AuxInt 2354 x := v_1.Args[1] 2355 v.reset(OpEq32) 2356 v0 := b.NewValue0(v.Line, OpConst32, t) 2357 v0.AuxInt = int64(int32(c - d)) 2358 v.AddArg(v0) 2359 v.AddArg(x) 2360 return true 2361 } 2362 // match: (Eq32 x (Const32 <t> [c])) 2363 // cond: x.Op != OpConst32 2364 // result: (Eq32 (Const32 <t> [c]) x) 2365 for { 2366 x := v.Args[0] 2367 v_1 := v.Args[1] 2368 if v_1.Op != OpConst32 { 2369 break 2370 } 2371 t := v_1.Type 2372 c := v_1.AuxInt 2373 if !(x.Op != OpConst32) { 2374 break 2375 } 2376 v.reset(OpEq32) 2377 v0 := b.NewValue0(v.Line, OpConst32, t) 2378 v0.AuxInt = c 2379 v.AddArg(v0) 2380 v.AddArg(x) 2381 return true 2382 } 2383 // match: (Eq32 (Const32 [c]) (Const32 [d])) 2384 // cond: 2385 // result: (ConstBool [b2i(c == d)]) 2386 for { 2387 v_0 := v.Args[0] 2388 if v_0.Op != OpConst32 { 2389 break 2390 } 2391 c := v_0.AuxInt 2392 v_1 := v.Args[1] 2393 if v_1.Op != OpConst32 { 2394 break 2395 } 2396 d := v_1.AuxInt 2397 v.reset(OpConstBool) 2398 v.AuxInt = b2i(c == d) 2399 return true 2400 } 2401 return false 2402 } 2403 func rewriteValuegeneric_OpEq64(v *Value, config *Config) bool { 2404 b := v.Block 2405 _ = b 2406 // match: (Eq64 x x) 2407 // cond: 2408 // result: (ConstBool [1]) 2409 for { 2410 x := v.Args[0] 2411 if x != v.Args[1] { 2412 break 2413 } 2414 v.reset(OpConstBool) 2415 v.AuxInt = 1 2416 return true 2417 } 2418 // match: (Eq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 2419 // cond: 2420 // result: (Eq64 (Const64 <t> [c-d]) x) 2421 for { 2422 v_0 := v.Args[0] 2423 if v_0.Op != OpConst64 { 2424 break 2425 } 2426 t := v_0.Type 2427 c := v_0.AuxInt 2428 v_1 := v.Args[1] 2429 if v_1.Op != OpAdd64 { 2430 break 2431 } 2432 v_1_0 := v_1.Args[0] 2433 if v_1_0.Op != OpConst64 { 2434 break 2435 } 2436 if v_1_0.Type != t { 2437 break 2438 } 2439 d := v_1_0.AuxInt 2440 x := v_1.Args[1] 2441 v.reset(OpEq64) 2442 v0 := b.NewValue0(v.Line, OpConst64, t) 2443 v0.AuxInt = c - d 2444 v.AddArg(v0) 2445 v.AddArg(x) 2446 return true 2447 } 2448 // match: (Eq64 x (Const64 <t> [c])) 2449 // cond: x.Op != OpConst64 2450 // result: (Eq64 (Const64 <t> [c]) x) 2451 for { 2452 x := v.Args[0] 2453 v_1 := v.Args[1] 2454 if v_1.Op != OpConst64 { 2455 break 2456 } 2457 t := v_1.Type 2458 c := v_1.AuxInt 2459 if !(x.Op != OpConst64) { 2460 break 2461 } 2462 v.reset(OpEq64) 2463 v0 := b.NewValue0(v.Line, OpConst64, t) 2464 v0.AuxInt = c 2465 v.AddArg(v0) 2466 v.AddArg(x) 2467 return true 2468 } 2469 // match: (Eq64 (Const64 [c]) (Const64 [d])) 2470 // cond: 2471 // result: (ConstBool [b2i(c == d)]) 2472 for { 2473 v_0 := v.Args[0] 2474 if v_0.Op != OpConst64 { 2475 break 2476 } 2477 c := v_0.AuxInt 2478 v_1 := v.Args[1] 2479 if v_1.Op != OpConst64 { 2480 break 2481 } 2482 d := v_1.AuxInt 2483 v.reset(OpConstBool) 2484 v.AuxInt = b2i(c == d) 2485 return true 2486 } 2487 return false 2488 } 2489 func rewriteValuegeneric_OpEq8(v *Value, config *Config) bool { 2490 b := v.Block 2491 _ = b 2492 // match: (Eq8 x x) 2493 // cond: 2494 // result: (ConstBool [1]) 2495 for { 2496 x := v.Args[0] 2497 if x != v.Args[1] { 2498 break 2499 } 2500 v.reset(OpConstBool) 2501 v.AuxInt = 1 2502 return true 2503 } 2504 // match: (Eq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 2505 // cond: 2506 // result: (Eq8 (Const8 <t> [int64(int8(c-d))]) x) 2507 for { 2508 v_0 := v.Args[0] 2509 if v_0.Op != OpConst8 { 2510 break 2511 } 2512 t := v_0.Type 2513 c := v_0.AuxInt 2514 v_1 := v.Args[1] 2515 if v_1.Op != OpAdd8 { 2516 break 2517 } 2518 v_1_0 := v_1.Args[0] 2519 if v_1_0.Op != OpConst8 { 2520 break 2521 } 2522 if v_1_0.Type != t { 2523 break 2524 } 2525 d := v_1_0.AuxInt 2526 x := v_1.Args[1] 2527 v.reset(OpEq8) 2528 v0 := b.NewValue0(v.Line, OpConst8, t) 2529 v0.AuxInt = int64(int8(c - d)) 2530 v.AddArg(v0) 2531 v.AddArg(x) 2532 return true 2533 } 2534 // match: (Eq8 x (Const8 <t> [c])) 2535 // cond: x.Op != OpConst8 2536 // result: (Eq8 (Const8 <t> [c]) x) 2537 for { 2538 x := v.Args[0] 2539 v_1 := v.Args[1] 2540 if v_1.Op != OpConst8 { 2541 break 2542 } 2543 t := v_1.Type 2544 c := v_1.AuxInt 2545 if !(x.Op != OpConst8) { 2546 break 2547 } 2548 v.reset(OpEq8) 2549 v0 := b.NewValue0(v.Line, OpConst8, t) 2550 v0.AuxInt = c 2551 v.AddArg(v0) 2552 v.AddArg(x) 2553 return true 2554 } 2555 // match: (Eq8 (Const8 [c]) (Const8 [d])) 2556 // cond: 2557 // result: (ConstBool [b2i(c == d)]) 2558 for { 2559 v_0 := v.Args[0] 2560 if v_0.Op != OpConst8 { 2561 break 2562 } 2563 c := v_0.AuxInt 2564 v_1 := v.Args[1] 2565 if v_1.Op != OpConst8 { 2566 break 2567 } 2568 d := v_1.AuxInt 2569 v.reset(OpConstBool) 2570 v.AuxInt = b2i(c == d) 2571 return true 2572 } 2573 return false 2574 } 2575 func rewriteValuegeneric_OpEqB(v *Value, config *Config) bool { 2576 b := v.Block 2577 _ = b 2578 // match: (EqB (ConstBool [c]) (ConstBool [d])) 2579 // cond: 2580 // result: (ConstBool [b2i(c == d)]) 2581 for { 2582 v_0 := v.Args[0] 2583 if v_0.Op != OpConstBool { 2584 break 2585 } 2586 c := v_0.AuxInt 2587 v_1 := v.Args[1] 2588 if v_1.Op != OpConstBool { 2589 break 2590 } 2591 d := v_1.AuxInt 2592 v.reset(OpConstBool) 2593 v.AuxInt = b2i(c == d) 2594 return true 2595 } 2596 // match: (EqB (ConstBool [0]) x) 2597 // cond: 2598 // result: (Not x) 2599 for { 2600 v_0 := v.Args[0] 2601 if v_0.Op != OpConstBool { 2602 break 2603 } 2604 if v_0.AuxInt != 0 { 2605 break 2606 } 2607 x := v.Args[1] 2608 v.reset(OpNot) 2609 v.AddArg(x) 2610 return true 2611 } 2612 // match: (EqB (ConstBool [1]) x) 2613 // cond: 2614 // result: x 2615 for { 2616 v_0 := v.Args[0] 2617 if v_0.Op != OpConstBool { 2618 break 2619 } 2620 if v_0.AuxInt != 1 { 2621 break 2622 } 2623 x := v.Args[1] 2624 v.reset(OpCopy) 2625 v.Type = x.Type 2626 v.AddArg(x) 2627 return true 2628 } 2629 return false 2630 } 2631 func rewriteValuegeneric_OpEqInter(v *Value, config *Config) bool { 2632 b := v.Block 2633 _ = b 2634 // match: (EqInter x y) 2635 // cond: 2636 // result: (EqPtr (ITab x) (ITab y)) 2637 for { 2638 x := v.Args[0] 2639 y := v.Args[1] 2640 v.reset(OpEqPtr) 2641 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2642 v0.AddArg(x) 2643 v.AddArg(v0) 2644 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 2645 v1.AddArg(y) 2646 v.AddArg(v1) 2647 return true 2648 } 2649 } 2650 func rewriteValuegeneric_OpEqPtr(v *Value, config *Config) bool { 2651 b := v.Block 2652 _ = b 2653 // match: (EqPtr p (ConstNil)) 2654 // cond: 2655 // result: (Not (IsNonNil p)) 2656 for { 2657 p := v.Args[0] 2658 v_1 := v.Args[1] 2659 if v_1.Op != OpConstNil { 2660 break 2661 } 2662 v.reset(OpNot) 2663 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2664 v0.AddArg(p) 2665 v.AddArg(v0) 2666 return true 2667 } 2668 // match: (EqPtr (ConstNil) p) 2669 // cond: 2670 // result: (Not (IsNonNil p)) 2671 for { 2672 v_0 := v.Args[0] 2673 if v_0.Op != OpConstNil { 2674 break 2675 } 2676 p := v.Args[1] 2677 v.reset(OpNot) 2678 v0 := b.NewValue0(v.Line, OpIsNonNil, config.fe.TypeBool()) 2679 v0.AddArg(p) 2680 v.AddArg(v0) 2681 return true 2682 } 2683 return false 2684 } 2685 func rewriteValuegeneric_OpEqSlice(v *Value, config *Config) bool { 2686 b := v.Block 2687 _ = b 2688 // match: (EqSlice x y) 2689 // cond: 2690 // result: (EqPtr (SlicePtr x) (SlicePtr y)) 2691 for { 2692 x := v.Args[0] 2693 y := v.Args[1] 2694 v.reset(OpEqPtr) 2695 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2696 v0.AddArg(x) 2697 v.AddArg(v0) 2698 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 2699 v1.AddArg(y) 2700 v.AddArg(v1) 2701 return true 2702 } 2703 } 2704 func rewriteValuegeneric_OpGeq16(v *Value, config *Config) bool { 2705 b := v.Block 2706 _ = b 2707 // match: (Geq16 (Const16 [c]) (Const16 [d])) 2708 // cond: 2709 // result: (ConstBool [b2i(c >= d)]) 2710 for { 2711 v_0 := v.Args[0] 2712 if v_0.Op != OpConst16 { 2713 break 2714 } 2715 c := v_0.AuxInt 2716 v_1 := v.Args[1] 2717 if v_1.Op != OpConst16 { 2718 break 2719 } 2720 d := v_1.AuxInt 2721 v.reset(OpConstBool) 2722 v.AuxInt = b2i(c >= d) 2723 return true 2724 } 2725 return false 2726 } 2727 func rewriteValuegeneric_OpGeq16U(v *Value, config *Config) bool { 2728 b := v.Block 2729 _ = b 2730 // match: (Geq16U (Const16 [c]) (Const16 [d])) 2731 // cond: 2732 // result: (ConstBool [b2i(uint16(c) >= uint16(d))]) 2733 for { 2734 v_0 := v.Args[0] 2735 if v_0.Op != OpConst16 { 2736 break 2737 } 2738 c := v_0.AuxInt 2739 v_1 := v.Args[1] 2740 if v_1.Op != OpConst16 { 2741 break 2742 } 2743 d := v_1.AuxInt 2744 v.reset(OpConstBool) 2745 v.AuxInt = b2i(uint16(c) >= uint16(d)) 2746 return true 2747 } 2748 return false 2749 } 2750 func rewriteValuegeneric_OpGeq32(v *Value, config *Config) bool { 2751 b := v.Block 2752 _ = b 2753 // match: (Geq32 (Const32 [c]) (Const32 [d])) 2754 // cond: 2755 // result: (ConstBool [b2i(c >= d)]) 2756 for { 2757 v_0 := v.Args[0] 2758 if v_0.Op != OpConst32 { 2759 break 2760 } 2761 c := v_0.AuxInt 2762 v_1 := v.Args[1] 2763 if v_1.Op != OpConst32 { 2764 break 2765 } 2766 d := v_1.AuxInt 2767 v.reset(OpConstBool) 2768 v.AuxInt = b2i(c >= d) 2769 return true 2770 } 2771 return false 2772 } 2773 func rewriteValuegeneric_OpGeq32U(v *Value, config *Config) bool { 2774 b := v.Block 2775 _ = b 2776 // match: (Geq32U (Const32 [c]) (Const32 [d])) 2777 // cond: 2778 // result: (ConstBool [b2i(uint32(c) >= uint32(d))]) 2779 for { 2780 v_0 := v.Args[0] 2781 if v_0.Op != OpConst32 { 2782 break 2783 } 2784 c := v_0.AuxInt 2785 v_1 := v.Args[1] 2786 if v_1.Op != OpConst32 { 2787 break 2788 } 2789 d := v_1.AuxInt 2790 v.reset(OpConstBool) 2791 v.AuxInt = b2i(uint32(c) >= uint32(d)) 2792 return true 2793 } 2794 return false 2795 } 2796 func rewriteValuegeneric_OpGeq64(v *Value, config *Config) bool { 2797 b := v.Block 2798 _ = b 2799 // match: (Geq64 (Const64 [c]) (Const64 [d])) 2800 // cond: 2801 // result: (ConstBool [b2i(c >= d)]) 2802 for { 2803 v_0 := v.Args[0] 2804 if v_0.Op != OpConst64 { 2805 break 2806 } 2807 c := v_0.AuxInt 2808 v_1 := v.Args[1] 2809 if v_1.Op != OpConst64 { 2810 break 2811 } 2812 d := v_1.AuxInt 2813 v.reset(OpConstBool) 2814 v.AuxInt = b2i(c >= d) 2815 return true 2816 } 2817 return false 2818 } 2819 func rewriteValuegeneric_OpGeq64U(v *Value, config *Config) bool { 2820 b := v.Block 2821 _ = b 2822 // match: (Geq64U (Const64 [c]) (Const64 [d])) 2823 // cond: 2824 // result: (ConstBool [b2i(uint64(c) >= uint64(d))]) 2825 for { 2826 v_0 := v.Args[0] 2827 if v_0.Op != OpConst64 { 2828 break 2829 } 2830 c := v_0.AuxInt 2831 v_1 := v.Args[1] 2832 if v_1.Op != OpConst64 { 2833 break 2834 } 2835 d := v_1.AuxInt 2836 v.reset(OpConstBool) 2837 v.AuxInt = b2i(uint64(c) >= uint64(d)) 2838 return true 2839 } 2840 return false 2841 } 2842 func rewriteValuegeneric_OpGeq8(v *Value, config *Config) bool { 2843 b := v.Block 2844 _ = b 2845 // match: (Geq8 (Const8 [c]) (Const8 [d])) 2846 // cond: 2847 // result: (ConstBool [b2i(c >= d)]) 2848 for { 2849 v_0 := v.Args[0] 2850 if v_0.Op != OpConst8 { 2851 break 2852 } 2853 c := v_0.AuxInt 2854 v_1 := v.Args[1] 2855 if v_1.Op != OpConst8 { 2856 break 2857 } 2858 d := v_1.AuxInt 2859 v.reset(OpConstBool) 2860 v.AuxInt = b2i(c >= d) 2861 return true 2862 } 2863 return false 2864 } 2865 func rewriteValuegeneric_OpGeq8U(v *Value, config *Config) bool { 2866 b := v.Block 2867 _ = b 2868 // match: (Geq8U (Const8 [c]) (Const8 [d])) 2869 // cond: 2870 // result: (ConstBool [b2i(uint8(c) >= uint8(d))]) 2871 for { 2872 v_0 := v.Args[0] 2873 if v_0.Op != OpConst8 { 2874 break 2875 } 2876 c := v_0.AuxInt 2877 v_1 := v.Args[1] 2878 if v_1.Op != OpConst8 { 2879 break 2880 } 2881 d := v_1.AuxInt 2882 v.reset(OpConstBool) 2883 v.AuxInt = b2i(uint8(c) >= uint8(d)) 2884 return true 2885 } 2886 return false 2887 } 2888 func rewriteValuegeneric_OpGreater16(v *Value, config *Config) bool { 2889 b := v.Block 2890 _ = b 2891 // match: (Greater16 (Const16 [c]) (Const16 [d])) 2892 // cond: 2893 // result: (ConstBool [b2i(c > d)]) 2894 for { 2895 v_0 := v.Args[0] 2896 if v_0.Op != OpConst16 { 2897 break 2898 } 2899 c := v_0.AuxInt 2900 v_1 := v.Args[1] 2901 if v_1.Op != OpConst16 { 2902 break 2903 } 2904 d := v_1.AuxInt 2905 v.reset(OpConstBool) 2906 v.AuxInt = b2i(c > d) 2907 return true 2908 } 2909 return false 2910 } 2911 func rewriteValuegeneric_OpGreater16U(v *Value, config *Config) bool { 2912 b := v.Block 2913 _ = b 2914 // match: (Greater16U (Const16 [c]) (Const16 [d])) 2915 // cond: 2916 // result: (ConstBool [b2i(uint16(c) > uint16(d))]) 2917 for { 2918 v_0 := v.Args[0] 2919 if v_0.Op != OpConst16 { 2920 break 2921 } 2922 c := v_0.AuxInt 2923 v_1 := v.Args[1] 2924 if v_1.Op != OpConst16 { 2925 break 2926 } 2927 d := v_1.AuxInt 2928 v.reset(OpConstBool) 2929 v.AuxInt = b2i(uint16(c) > uint16(d)) 2930 return true 2931 } 2932 return false 2933 } 2934 func rewriteValuegeneric_OpGreater32(v *Value, config *Config) bool { 2935 b := v.Block 2936 _ = b 2937 // match: (Greater32 (Const32 [c]) (Const32 [d])) 2938 // cond: 2939 // result: (ConstBool [b2i(c > d)]) 2940 for { 2941 v_0 := v.Args[0] 2942 if v_0.Op != OpConst32 { 2943 break 2944 } 2945 c := v_0.AuxInt 2946 v_1 := v.Args[1] 2947 if v_1.Op != OpConst32 { 2948 break 2949 } 2950 d := v_1.AuxInt 2951 v.reset(OpConstBool) 2952 v.AuxInt = b2i(c > d) 2953 return true 2954 } 2955 return false 2956 } 2957 func rewriteValuegeneric_OpGreater32U(v *Value, config *Config) bool { 2958 b := v.Block 2959 _ = b 2960 // match: (Greater32U (Const32 [c]) (Const32 [d])) 2961 // cond: 2962 // result: (ConstBool [b2i(uint32(c) > uint32(d))]) 2963 for { 2964 v_0 := v.Args[0] 2965 if v_0.Op != OpConst32 { 2966 break 2967 } 2968 c := v_0.AuxInt 2969 v_1 := v.Args[1] 2970 if v_1.Op != OpConst32 { 2971 break 2972 } 2973 d := v_1.AuxInt 2974 v.reset(OpConstBool) 2975 v.AuxInt = b2i(uint32(c) > uint32(d)) 2976 return true 2977 } 2978 return false 2979 } 2980 func rewriteValuegeneric_OpGreater64(v *Value, config *Config) bool { 2981 b := v.Block 2982 _ = b 2983 // match: (Greater64 (Const64 [c]) (Const64 [d])) 2984 // cond: 2985 // result: (ConstBool [b2i(c > d)]) 2986 for { 2987 v_0 := v.Args[0] 2988 if v_0.Op != OpConst64 { 2989 break 2990 } 2991 c := v_0.AuxInt 2992 v_1 := v.Args[1] 2993 if v_1.Op != OpConst64 { 2994 break 2995 } 2996 d := v_1.AuxInt 2997 v.reset(OpConstBool) 2998 v.AuxInt = b2i(c > d) 2999 return true 3000 } 3001 return false 3002 } 3003 func rewriteValuegeneric_OpGreater64U(v *Value, config *Config) bool { 3004 b := v.Block 3005 _ = b 3006 // match: (Greater64U (Const64 [c]) (Const64 [d])) 3007 // cond: 3008 // result: (ConstBool [b2i(uint64(c) > uint64(d))]) 3009 for { 3010 v_0 := v.Args[0] 3011 if v_0.Op != OpConst64 { 3012 break 3013 } 3014 c := v_0.AuxInt 3015 v_1 := v.Args[1] 3016 if v_1.Op != OpConst64 { 3017 break 3018 } 3019 d := v_1.AuxInt 3020 v.reset(OpConstBool) 3021 v.AuxInt = b2i(uint64(c) > uint64(d)) 3022 return true 3023 } 3024 return false 3025 } 3026 func rewriteValuegeneric_OpGreater8(v *Value, config *Config) bool { 3027 b := v.Block 3028 _ = b 3029 // match: (Greater8 (Const8 [c]) (Const8 [d])) 3030 // cond: 3031 // result: (ConstBool [b2i(c > d)]) 3032 for { 3033 v_0 := v.Args[0] 3034 if v_0.Op != OpConst8 { 3035 break 3036 } 3037 c := v_0.AuxInt 3038 v_1 := v.Args[1] 3039 if v_1.Op != OpConst8 { 3040 break 3041 } 3042 d := v_1.AuxInt 3043 v.reset(OpConstBool) 3044 v.AuxInt = b2i(c > d) 3045 return true 3046 } 3047 return false 3048 } 3049 func rewriteValuegeneric_OpGreater8U(v *Value, config *Config) bool { 3050 b := v.Block 3051 _ = b 3052 // match: (Greater8U (Const8 [c]) (Const8 [d])) 3053 // cond: 3054 // result: (ConstBool [b2i(uint8(c) > uint8(d))]) 3055 for { 3056 v_0 := v.Args[0] 3057 if v_0.Op != OpConst8 { 3058 break 3059 } 3060 c := v_0.AuxInt 3061 v_1 := v.Args[1] 3062 if v_1.Op != OpConst8 { 3063 break 3064 } 3065 d := v_1.AuxInt 3066 v.reset(OpConstBool) 3067 v.AuxInt = b2i(uint8(c) > uint8(d)) 3068 return true 3069 } 3070 return false 3071 } 3072 func rewriteValuegeneric_OpIsInBounds(v *Value, config *Config) bool { 3073 b := v.Block 3074 _ = b 3075 // match: (IsInBounds (ZeroExt8to32 _) (Const32 [c])) 3076 // cond: (1 << 8) <= c 3077 // result: (ConstBool [1]) 3078 for { 3079 v_0 := v.Args[0] 3080 if v_0.Op != OpZeroExt8to32 { 3081 break 3082 } 3083 v_1 := v.Args[1] 3084 if v_1.Op != OpConst32 { 3085 break 3086 } 3087 c := v_1.AuxInt 3088 if !((1 << 8) <= c) { 3089 break 3090 } 3091 v.reset(OpConstBool) 3092 v.AuxInt = 1 3093 return true 3094 } 3095 // match: (IsInBounds (ZeroExt8to64 _) (Const64 [c])) 3096 // cond: (1 << 8) <= c 3097 // result: (ConstBool [1]) 3098 for { 3099 v_0 := v.Args[0] 3100 if v_0.Op != OpZeroExt8to64 { 3101 break 3102 } 3103 v_1 := v.Args[1] 3104 if v_1.Op != OpConst64 { 3105 break 3106 } 3107 c := v_1.AuxInt 3108 if !((1 << 8) <= c) { 3109 break 3110 } 3111 v.reset(OpConstBool) 3112 v.AuxInt = 1 3113 return true 3114 } 3115 // match: (IsInBounds (ZeroExt16to32 _) (Const32 [c])) 3116 // cond: (1 << 16) <= c 3117 // result: (ConstBool [1]) 3118 for { 3119 v_0 := v.Args[0] 3120 if v_0.Op != OpZeroExt16to32 { 3121 break 3122 } 3123 v_1 := v.Args[1] 3124 if v_1.Op != OpConst32 { 3125 break 3126 } 3127 c := v_1.AuxInt 3128 if !((1 << 16) <= c) { 3129 break 3130 } 3131 v.reset(OpConstBool) 3132 v.AuxInt = 1 3133 return true 3134 } 3135 // match: (IsInBounds (ZeroExt16to64 _) (Const64 [c])) 3136 // cond: (1 << 16) <= c 3137 // result: (ConstBool [1]) 3138 for { 3139 v_0 := v.Args[0] 3140 if v_0.Op != OpZeroExt16to64 { 3141 break 3142 } 3143 v_1 := v.Args[1] 3144 if v_1.Op != OpConst64 { 3145 break 3146 } 3147 c := v_1.AuxInt 3148 if !((1 << 16) <= c) { 3149 break 3150 } 3151 v.reset(OpConstBool) 3152 v.AuxInt = 1 3153 return true 3154 } 3155 // match: (IsInBounds x x) 3156 // cond: 3157 // result: (ConstBool [0]) 3158 for { 3159 x := v.Args[0] 3160 if x != v.Args[1] { 3161 break 3162 } 3163 v.reset(OpConstBool) 3164 v.AuxInt = 0 3165 return true 3166 } 3167 // match: (IsInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3168 // cond: 0 <= c && c < d 3169 // result: (ConstBool [1]) 3170 for { 3171 v_0 := v.Args[0] 3172 if v_0.Op != OpAnd32 { 3173 break 3174 } 3175 v_0_0 := v_0.Args[0] 3176 if v_0_0.Op != OpConst32 { 3177 break 3178 } 3179 c := v_0_0.AuxInt 3180 v_1 := v.Args[1] 3181 if v_1.Op != OpConst32 { 3182 break 3183 } 3184 d := v_1.AuxInt 3185 if !(0 <= c && c < d) { 3186 break 3187 } 3188 v.reset(OpConstBool) 3189 v.AuxInt = 1 3190 return true 3191 } 3192 // match: (IsInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3193 // cond: 0 <= c && c < d 3194 // result: (ConstBool [1]) 3195 for { 3196 v_0 := v.Args[0] 3197 if v_0.Op != OpAnd64 { 3198 break 3199 } 3200 v_0_0 := v_0.Args[0] 3201 if v_0_0.Op != OpConst64 { 3202 break 3203 } 3204 c := v_0_0.AuxInt 3205 v_1 := v.Args[1] 3206 if v_1.Op != OpConst64 { 3207 break 3208 } 3209 d := v_1.AuxInt 3210 if !(0 <= c && c < d) { 3211 break 3212 } 3213 v.reset(OpConstBool) 3214 v.AuxInt = 1 3215 return true 3216 } 3217 // match: (IsInBounds (Const32 [c]) (Const32 [d])) 3218 // cond: 3219 // result: (ConstBool [b2i(0 <= c && c < d)]) 3220 for { 3221 v_0 := v.Args[0] 3222 if v_0.Op != OpConst32 { 3223 break 3224 } 3225 c := v_0.AuxInt 3226 v_1 := v.Args[1] 3227 if v_1.Op != OpConst32 { 3228 break 3229 } 3230 d := v_1.AuxInt 3231 v.reset(OpConstBool) 3232 v.AuxInt = b2i(0 <= c && c < d) 3233 return true 3234 } 3235 // match: (IsInBounds (Const64 [c]) (Const64 [d])) 3236 // cond: 3237 // result: (ConstBool [b2i(0 <= c && c < d)]) 3238 for { 3239 v_0 := v.Args[0] 3240 if v_0.Op != OpConst64 { 3241 break 3242 } 3243 c := v_0.AuxInt 3244 v_1 := v.Args[1] 3245 if v_1.Op != OpConst64 { 3246 break 3247 } 3248 d := v_1.AuxInt 3249 v.reset(OpConstBool) 3250 v.AuxInt = b2i(0 <= c && c < d) 3251 return true 3252 } 3253 // match: (IsInBounds (Mod32u _ y) y) 3254 // cond: 3255 // result: (ConstBool [1]) 3256 for { 3257 v_0 := v.Args[0] 3258 if v_0.Op != OpMod32u { 3259 break 3260 } 3261 y := v_0.Args[1] 3262 if y != v.Args[1] { 3263 break 3264 } 3265 v.reset(OpConstBool) 3266 v.AuxInt = 1 3267 return true 3268 } 3269 // match: (IsInBounds (Mod64u _ y) y) 3270 // cond: 3271 // result: (ConstBool [1]) 3272 for { 3273 v_0 := v.Args[0] 3274 if v_0.Op != OpMod64u { 3275 break 3276 } 3277 y := v_0.Args[1] 3278 if y != v.Args[1] { 3279 break 3280 } 3281 v.reset(OpConstBool) 3282 v.AuxInt = 1 3283 return true 3284 } 3285 return false 3286 } 3287 func rewriteValuegeneric_OpIsSliceInBounds(v *Value, config *Config) bool { 3288 b := v.Block 3289 _ = b 3290 // match: (IsSliceInBounds x x) 3291 // cond: 3292 // result: (ConstBool [1]) 3293 for { 3294 x := v.Args[0] 3295 if x != v.Args[1] { 3296 break 3297 } 3298 v.reset(OpConstBool) 3299 v.AuxInt = 1 3300 return true 3301 } 3302 // match: (IsSliceInBounds (And32 (Const32 [c]) _) (Const32 [d])) 3303 // cond: 0 <= c && c <= d 3304 // result: (ConstBool [1]) 3305 for { 3306 v_0 := v.Args[0] 3307 if v_0.Op != OpAnd32 { 3308 break 3309 } 3310 v_0_0 := v_0.Args[0] 3311 if v_0_0.Op != OpConst32 { 3312 break 3313 } 3314 c := v_0_0.AuxInt 3315 v_1 := v.Args[1] 3316 if v_1.Op != OpConst32 { 3317 break 3318 } 3319 d := v_1.AuxInt 3320 if !(0 <= c && c <= d) { 3321 break 3322 } 3323 v.reset(OpConstBool) 3324 v.AuxInt = 1 3325 return true 3326 } 3327 // match: (IsSliceInBounds (And64 (Const64 [c]) _) (Const64 [d])) 3328 // cond: 0 <= c && c <= d 3329 // result: (ConstBool [1]) 3330 for { 3331 v_0 := v.Args[0] 3332 if v_0.Op != OpAnd64 { 3333 break 3334 } 3335 v_0_0 := v_0.Args[0] 3336 if v_0_0.Op != OpConst64 { 3337 break 3338 } 3339 c := v_0_0.AuxInt 3340 v_1 := v.Args[1] 3341 if v_1.Op != OpConst64 { 3342 break 3343 } 3344 d := v_1.AuxInt 3345 if !(0 <= c && c <= d) { 3346 break 3347 } 3348 v.reset(OpConstBool) 3349 v.AuxInt = 1 3350 return true 3351 } 3352 // match: (IsSliceInBounds (Const32 [0]) _) 3353 // cond: 3354 // result: (ConstBool [1]) 3355 for { 3356 v_0 := v.Args[0] 3357 if v_0.Op != OpConst32 { 3358 break 3359 } 3360 if v_0.AuxInt != 0 { 3361 break 3362 } 3363 v.reset(OpConstBool) 3364 v.AuxInt = 1 3365 return true 3366 } 3367 // match: (IsSliceInBounds (Const64 [0]) _) 3368 // cond: 3369 // result: (ConstBool [1]) 3370 for { 3371 v_0 := v.Args[0] 3372 if v_0.Op != OpConst64 { 3373 break 3374 } 3375 if v_0.AuxInt != 0 { 3376 break 3377 } 3378 v.reset(OpConstBool) 3379 v.AuxInt = 1 3380 return true 3381 } 3382 // match: (IsSliceInBounds (Const32 [c]) (Const32 [d])) 3383 // cond: 3384 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3385 for { 3386 v_0 := v.Args[0] 3387 if v_0.Op != OpConst32 { 3388 break 3389 } 3390 c := v_0.AuxInt 3391 v_1 := v.Args[1] 3392 if v_1.Op != OpConst32 { 3393 break 3394 } 3395 d := v_1.AuxInt 3396 v.reset(OpConstBool) 3397 v.AuxInt = b2i(0 <= c && c <= d) 3398 return true 3399 } 3400 // match: (IsSliceInBounds (Const64 [c]) (Const64 [d])) 3401 // cond: 3402 // result: (ConstBool [b2i(0 <= c && c <= d)]) 3403 for { 3404 v_0 := v.Args[0] 3405 if v_0.Op != OpConst64 { 3406 break 3407 } 3408 c := v_0.AuxInt 3409 v_1 := v.Args[1] 3410 if v_1.Op != OpConst64 { 3411 break 3412 } 3413 d := v_1.AuxInt 3414 v.reset(OpConstBool) 3415 v.AuxInt = b2i(0 <= c && c <= d) 3416 return true 3417 } 3418 // match: (IsSliceInBounds (SliceLen x) (SliceCap x)) 3419 // cond: 3420 // result: (ConstBool [1]) 3421 for { 3422 v_0 := v.Args[0] 3423 if v_0.Op != OpSliceLen { 3424 break 3425 } 3426 x := v_0.Args[0] 3427 v_1 := v.Args[1] 3428 if v_1.Op != OpSliceCap { 3429 break 3430 } 3431 if x != v_1.Args[0] { 3432 break 3433 } 3434 v.reset(OpConstBool) 3435 v.AuxInt = 1 3436 return true 3437 } 3438 return false 3439 } 3440 func rewriteValuegeneric_OpLeq16(v *Value, config *Config) bool { 3441 b := v.Block 3442 _ = b 3443 // match: (Leq16 (Const16 [c]) (Const16 [d])) 3444 // cond: 3445 // result: (ConstBool [b2i(c <= d)]) 3446 for { 3447 v_0 := v.Args[0] 3448 if v_0.Op != OpConst16 { 3449 break 3450 } 3451 c := v_0.AuxInt 3452 v_1 := v.Args[1] 3453 if v_1.Op != OpConst16 { 3454 break 3455 } 3456 d := v_1.AuxInt 3457 v.reset(OpConstBool) 3458 v.AuxInt = b2i(c <= d) 3459 return true 3460 } 3461 return false 3462 } 3463 func rewriteValuegeneric_OpLeq16U(v *Value, config *Config) bool { 3464 b := v.Block 3465 _ = b 3466 // match: (Leq16U (Const16 [c]) (Const16 [d])) 3467 // cond: 3468 // result: (ConstBool [b2i(uint16(c) <= uint16(d))]) 3469 for { 3470 v_0 := v.Args[0] 3471 if v_0.Op != OpConst16 { 3472 break 3473 } 3474 c := v_0.AuxInt 3475 v_1 := v.Args[1] 3476 if v_1.Op != OpConst16 { 3477 break 3478 } 3479 d := v_1.AuxInt 3480 v.reset(OpConstBool) 3481 v.AuxInt = b2i(uint16(c) <= uint16(d)) 3482 return true 3483 } 3484 return false 3485 } 3486 func rewriteValuegeneric_OpLeq32(v *Value, config *Config) bool { 3487 b := v.Block 3488 _ = b 3489 // match: (Leq32 (Const32 [c]) (Const32 [d])) 3490 // cond: 3491 // result: (ConstBool [b2i(c <= d)]) 3492 for { 3493 v_0 := v.Args[0] 3494 if v_0.Op != OpConst32 { 3495 break 3496 } 3497 c := v_0.AuxInt 3498 v_1 := v.Args[1] 3499 if v_1.Op != OpConst32 { 3500 break 3501 } 3502 d := v_1.AuxInt 3503 v.reset(OpConstBool) 3504 v.AuxInt = b2i(c <= d) 3505 return true 3506 } 3507 return false 3508 } 3509 func rewriteValuegeneric_OpLeq32U(v *Value, config *Config) bool { 3510 b := v.Block 3511 _ = b 3512 // match: (Leq32U (Const32 [c]) (Const32 [d])) 3513 // cond: 3514 // result: (ConstBool [b2i(uint32(c) <= uint32(d))]) 3515 for { 3516 v_0 := v.Args[0] 3517 if v_0.Op != OpConst32 { 3518 break 3519 } 3520 c := v_0.AuxInt 3521 v_1 := v.Args[1] 3522 if v_1.Op != OpConst32 { 3523 break 3524 } 3525 d := v_1.AuxInt 3526 v.reset(OpConstBool) 3527 v.AuxInt = b2i(uint32(c) <= uint32(d)) 3528 return true 3529 } 3530 return false 3531 } 3532 func rewriteValuegeneric_OpLeq64(v *Value, config *Config) bool { 3533 b := v.Block 3534 _ = b 3535 // match: (Leq64 (Const64 [c]) (Const64 [d])) 3536 // cond: 3537 // result: (ConstBool [b2i(c <= d)]) 3538 for { 3539 v_0 := v.Args[0] 3540 if v_0.Op != OpConst64 { 3541 break 3542 } 3543 c := v_0.AuxInt 3544 v_1 := v.Args[1] 3545 if v_1.Op != OpConst64 { 3546 break 3547 } 3548 d := v_1.AuxInt 3549 v.reset(OpConstBool) 3550 v.AuxInt = b2i(c <= d) 3551 return true 3552 } 3553 return false 3554 } 3555 func rewriteValuegeneric_OpLeq64U(v *Value, config *Config) bool { 3556 b := v.Block 3557 _ = b 3558 // match: (Leq64U (Const64 [c]) (Const64 [d])) 3559 // cond: 3560 // result: (ConstBool [b2i(uint64(c) <= uint64(d))]) 3561 for { 3562 v_0 := v.Args[0] 3563 if v_0.Op != OpConst64 { 3564 break 3565 } 3566 c := v_0.AuxInt 3567 v_1 := v.Args[1] 3568 if v_1.Op != OpConst64 { 3569 break 3570 } 3571 d := v_1.AuxInt 3572 v.reset(OpConstBool) 3573 v.AuxInt = b2i(uint64(c) <= uint64(d)) 3574 return true 3575 } 3576 return false 3577 } 3578 func rewriteValuegeneric_OpLeq8(v *Value, config *Config) bool { 3579 b := v.Block 3580 _ = b 3581 // match: (Leq8 (Const8 [c]) (Const8 [d])) 3582 // cond: 3583 // result: (ConstBool [b2i(c <= d)]) 3584 for { 3585 v_0 := v.Args[0] 3586 if v_0.Op != OpConst8 { 3587 break 3588 } 3589 c := v_0.AuxInt 3590 v_1 := v.Args[1] 3591 if v_1.Op != OpConst8 { 3592 break 3593 } 3594 d := v_1.AuxInt 3595 v.reset(OpConstBool) 3596 v.AuxInt = b2i(c <= d) 3597 return true 3598 } 3599 return false 3600 } 3601 func rewriteValuegeneric_OpLeq8U(v *Value, config *Config) bool { 3602 b := v.Block 3603 _ = b 3604 // match: (Leq8U (Const8 [c]) (Const8 [d])) 3605 // cond: 3606 // result: (ConstBool [b2i(uint8(c) <= uint8(d))]) 3607 for { 3608 v_0 := v.Args[0] 3609 if v_0.Op != OpConst8 { 3610 break 3611 } 3612 c := v_0.AuxInt 3613 v_1 := v.Args[1] 3614 if v_1.Op != OpConst8 { 3615 break 3616 } 3617 d := v_1.AuxInt 3618 v.reset(OpConstBool) 3619 v.AuxInt = b2i(uint8(c) <= uint8(d)) 3620 return true 3621 } 3622 return false 3623 } 3624 func rewriteValuegeneric_OpLess16(v *Value, config *Config) bool { 3625 b := v.Block 3626 _ = b 3627 // match: (Less16 (Const16 [c]) (Const16 [d])) 3628 // cond: 3629 // result: (ConstBool [b2i(c < d)]) 3630 for { 3631 v_0 := v.Args[0] 3632 if v_0.Op != OpConst16 { 3633 break 3634 } 3635 c := v_0.AuxInt 3636 v_1 := v.Args[1] 3637 if v_1.Op != OpConst16 { 3638 break 3639 } 3640 d := v_1.AuxInt 3641 v.reset(OpConstBool) 3642 v.AuxInt = b2i(c < d) 3643 return true 3644 } 3645 return false 3646 } 3647 func rewriteValuegeneric_OpLess16U(v *Value, config *Config) bool { 3648 b := v.Block 3649 _ = b 3650 // match: (Less16U (Const16 [c]) (Const16 [d])) 3651 // cond: 3652 // result: (ConstBool [b2i(uint16(c) < uint16(d))]) 3653 for { 3654 v_0 := v.Args[0] 3655 if v_0.Op != OpConst16 { 3656 break 3657 } 3658 c := v_0.AuxInt 3659 v_1 := v.Args[1] 3660 if v_1.Op != OpConst16 { 3661 break 3662 } 3663 d := v_1.AuxInt 3664 v.reset(OpConstBool) 3665 v.AuxInt = b2i(uint16(c) < uint16(d)) 3666 return true 3667 } 3668 return false 3669 } 3670 func rewriteValuegeneric_OpLess32(v *Value, config *Config) bool { 3671 b := v.Block 3672 _ = b 3673 // match: (Less32 (Const32 [c]) (Const32 [d])) 3674 // cond: 3675 // result: (ConstBool [b2i(c < d)]) 3676 for { 3677 v_0 := v.Args[0] 3678 if v_0.Op != OpConst32 { 3679 break 3680 } 3681 c := v_0.AuxInt 3682 v_1 := v.Args[1] 3683 if v_1.Op != OpConst32 { 3684 break 3685 } 3686 d := v_1.AuxInt 3687 v.reset(OpConstBool) 3688 v.AuxInt = b2i(c < d) 3689 return true 3690 } 3691 return false 3692 } 3693 func rewriteValuegeneric_OpLess32U(v *Value, config *Config) bool { 3694 b := v.Block 3695 _ = b 3696 // match: (Less32U (Const32 [c]) (Const32 [d])) 3697 // cond: 3698 // result: (ConstBool [b2i(uint32(c) < uint32(d))]) 3699 for { 3700 v_0 := v.Args[0] 3701 if v_0.Op != OpConst32 { 3702 break 3703 } 3704 c := v_0.AuxInt 3705 v_1 := v.Args[1] 3706 if v_1.Op != OpConst32 { 3707 break 3708 } 3709 d := v_1.AuxInt 3710 v.reset(OpConstBool) 3711 v.AuxInt = b2i(uint32(c) < uint32(d)) 3712 return true 3713 } 3714 return false 3715 } 3716 func rewriteValuegeneric_OpLess64(v *Value, config *Config) bool { 3717 b := v.Block 3718 _ = b 3719 // match: (Less64 (Const64 [c]) (Const64 [d])) 3720 // cond: 3721 // result: (ConstBool [b2i(c < d)]) 3722 for { 3723 v_0 := v.Args[0] 3724 if v_0.Op != OpConst64 { 3725 break 3726 } 3727 c := v_0.AuxInt 3728 v_1 := v.Args[1] 3729 if v_1.Op != OpConst64 { 3730 break 3731 } 3732 d := v_1.AuxInt 3733 v.reset(OpConstBool) 3734 v.AuxInt = b2i(c < d) 3735 return true 3736 } 3737 return false 3738 } 3739 func rewriteValuegeneric_OpLess64U(v *Value, config *Config) bool { 3740 b := v.Block 3741 _ = b 3742 // match: (Less64U (Const64 [c]) (Const64 [d])) 3743 // cond: 3744 // result: (ConstBool [b2i(uint64(c) < uint64(d))]) 3745 for { 3746 v_0 := v.Args[0] 3747 if v_0.Op != OpConst64 { 3748 break 3749 } 3750 c := v_0.AuxInt 3751 v_1 := v.Args[1] 3752 if v_1.Op != OpConst64 { 3753 break 3754 } 3755 d := v_1.AuxInt 3756 v.reset(OpConstBool) 3757 v.AuxInt = b2i(uint64(c) < uint64(d)) 3758 return true 3759 } 3760 return false 3761 } 3762 func rewriteValuegeneric_OpLess8(v *Value, config *Config) bool { 3763 b := v.Block 3764 _ = b 3765 // match: (Less8 (Const8 [c]) (Const8 [d])) 3766 // cond: 3767 // result: (ConstBool [b2i(c < d)]) 3768 for { 3769 v_0 := v.Args[0] 3770 if v_0.Op != OpConst8 { 3771 break 3772 } 3773 c := v_0.AuxInt 3774 v_1 := v.Args[1] 3775 if v_1.Op != OpConst8 { 3776 break 3777 } 3778 d := v_1.AuxInt 3779 v.reset(OpConstBool) 3780 v.AuxInt = b2i(c < d) 3781 return true 3782 } 3783 return false 3784 } 3785 func rewriteValuegeneric_OpLess8U(v *Value, config *Config) bool { 3786 b := v.Block 3787 _ = b 3788 // match: (Less8U (Const8 [c]) (Const8 [d])) 3789 // cond: 3790 // result: (ConstBool [b2i(uint8(c) < uint8(d))]) 3791 for { 3792 v_0 := v.Args[0] 3793 if v_0.Op != OpConst8 { 3794 break 3795 } 3796 c := v_0.AuxInt 3797 v_1 := v.Args[1] 3798 if v_1.Op != OpConst8 { 3799 break 3800 } 3801 d := v_1.AuxInt 3802 v.reset(OpConstBool) 3803 v.AuxInt = b2i(uint8(c) < uint8(d)) 3804 return true 3805 } 3806 return false 3807 } 3808 func rewriteValuegeneric_OpLoad(v *Value, config *Config) bool { 3809 b := v.Block 3810 _ = b 3811 // match: (Load <t1> p1 (Store [w] p2 x _)) 3812 // cond: isSamePtr(p1,p2) && t1.Compare(x.Type)==CMPeq && w == t1.Size() 3813 // result: x 3814 for { 3815 t1 := v.Type 3816 p1 := v.Args[0] 3817 v_1 := v.Args[1] 3818 if v_1.Op != OpStore { 3819 break 3820 } 3821 w := v_1.AuxInt 3822 p2 := v_1.Args[0] 3823 x := v_1.Args[1] 3824 if !(isSamePtr(p1, p2) && t1.Compare(x.Type) == CMPeq && w == t1.Size()) { 3825 break 3826 } 3827 v.reset(OpCopy) 3828 v.Type = x.Type 3829 v.AddArg(x) 3830 return true 3831 } 3832 // match: (Load <t> _ _) 3833 // cond: t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t) 3834 // result: (StructMake0) 3835 for { 3836 t := v.Type 3837 if !(t.IsStruct() && t.NumFields() == 0 && config.fe.CanSSA(t)) { 3838 break 3839 } 3840 v.reset(OpStructMake0) 3841 return true 3842 } 3843 // match: (Load <t> ptr mem) 3844 // cond: t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t) 3845 // result: (StructMake1 (Load <t.FieldType(0)> ptr mem)) 3846 for { 3847 t := v.Type 3848 ptr := v.Args[0] 3849 mem := v.Args[1] 3850 if !(t.IsStruct() && t.NumFields() == 1 && config.fe.CanSSA(t)) { 3851 break 3852 } 3853 v.reset(OpStructMake1) 3854 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3855 v0.AddArg(ptr) 3856 v0.AddArg(mem) 3857 v.AddArg(v0) 3858 return true 3859 } 3860 // match: (Load <t> ptr mem) 3861 // cond: t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t) 3862 // result: (StructMake2 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem)) 3863 for { 3864 t := v.Type 3865 ptr := v.Args[0] 3866 mem := v.Args[1] 3867 if !(t.IsStruct() && t.NumFields() == 2 && config.fe.CanSSA(t)) { 3868 break 3869 } 3870 v.reset(OpStructMake2) 3871 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3872 v0.AddArg(ptr) 3873 v0.AddArg(mem) 3874 v.AddArg(v0) 3875 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3876 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3877 v2.AuxInt = t.FieldOff(1) 3878 v2.AddArg(ptr) 3879 v1.AddArg(v2) 3880 v1.AddArg(mem) 3881 v.AddArg(v1) 3882 return true 3883 } 3884 // match: (Load <t> ptr mem) 3885 // cond: t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t) 3886 // result: (StructMake3 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem)) 3887 for { 3888 t := v.Type 3889 ptr := v.Args[0] 3890 mem := v.Args[1] 3891 if !(t.IsStruct() && t.NumFields() == 3 && config.fe.CanSSA(t)) { 3892 break 3893 } 3894 v.reset(OpStructMake3) 3895 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3896 v0.AddArg(ptr) 3897 v0.AddArg(mem) 3898 v.AddArg(v0) 3899 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3900 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3901 v2.AuxInt = t.FieldOff(1) 3902 v2.AddArg(ptr) 3903 v1.AddArg(v2) 3904 v1.AddArg(mem) 3905 v.AddArg(v1) 3906 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 3907 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 3908 v4.AuxInt = t.FieldOff(2) 3909 v4.AddArg(ptr) 3910 v3.AddArg(v4) 3911 v3.AddArg(mem) 3912 v.AddArg(v3) 3913 return true 3914 } 3915 // match: (Load <t> ptr mem) 3916 // cond: t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t) 3917 // result: (StructMake4 (Load <t.FieldType(0)> ptr mem) (Load <t.FieldType(1)> (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] ptr) mem) (Load <t.FieldType(2)> (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] ptr) mem) (Load <t.FieldType(3)> (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] ptr) mem)) 3918 for { 3919 t := v.Type 3920 ptr := v.Args[0] 3921 mem := v.Args[1] 3922 if !(t.IsStruct() && t.NumFields() == 4 && config.fe.CanSSA(t)) { 3923 break 3924 } 3925 v.reset(OpStructMake4) 3926 v0 := b.NewValue0(v.Line, OpLoad, t.FieldType(0)) 3927 v0.AddArg(ptr) 3928 v0.AddArg(mem) 3929 v.AddArg(v0) 3930 v1 := b.NewValue0(v.Line, OpLoad, t.FieldType(1)) 3931 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 3932 v2.AuxInt = t.FieldOff(1) 3933 v2.AddArg(ptr) 3934 v1.AddArg(v2) 3935 v1.AddArg(mem) 3936 v.AddArg(v1) 3937 v3 := b.NewValue0(v.Line, OpLoad, t.FieldType(2)) 3938 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 3939 v4.AuxInt = t.FieldOff(2) 3940 v4.AddArg(ptr) 3941 v3.AddArg(v4) 3942 v3.AddArg(mem) 3943 v.AddArg(v3) 3944 v5 := b.NewValue0(v.Line, OpLoad, t.FieldType(3)) 3945 v6 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 3946 v6.AuxInt = t.FieldOff(3) 3947 v6.AddArg(ptr) 3948 v5.AddArg(v6) 3949 v5.AddArg(mem) 3950 v.AddArg(v5) 3951 return true 3952 } 3953 return false 3954 } 3955 func rewriteValuegeneric_OpLsh16x16(v *Value, config *Config) bool { 3956 b := v.Block 3957 _ = b 3958 // match: (Lsh16x16 <t> x (Const16 [c])) 3959 // cond: 3960 // result: (Lsh16x64 x (Const64 <t> [int64(uint16(c))])) 3961 for { 3962 t := v.Type 3963 x := v.Args[0] 3964 v_1 := v.Args[1] 3965 if v_1.Op != OpConst16 { 3966 break 3967 } 3968 c := v_1.AuxInt 3969 v.reset(OpLsh16x64) 3970 v.AddArg(x) 3971 v0 := b.NewValue0(v.Line, OpConst64, t) 3972 v0.AuxInt = int64(uint16(c)) 3973 v.AddArg(v0) 3974 return true 3975 } 3976 // match: (Lsh16x16 (Const16 [0]) _) 3977 // cond: 3978 // result: (Const16 [0]) 3979 for { 3980 v_0 := v.Args[0] 3981 if v_0.Op != OpConst16 { 3982 break 3983 } 3984 if v_0.AuxInt != 0 { 3985 break 3986 } 3987 v.reset(OpConst16) 3988 v.AuxInt = 0 3989 return true 3990 } 3991 return false 3992 } 3993 func rewriteValuegeneric_OpLsh16x32(v *Value, config *Config) bool { 3994 b := v.Block 3995 _ = b 3996 // match: (Lsh16x32 <t> x (Const32 [c])) 3997 // cond: 3998 // result: (Lsh16x64 x (Const64 <t> [int64(uint32(c))])) 3999 for { 4000 t := v.Type 4001 x := v.Args[0] 4002 v_1 := v.Args[1] 4003 if v_1.Op != OpConst32 { 4004 break 4005 } 4006 c := v_1.AuxInt 4007 v.reset(OpLsh16x64) 4008 v.AddArg(x) 4009 v0 := b.NewValue0(v.Line, OpConst64, t) 4010 v0.AuxInt = int64(uint32(c)) 4011 v.AddArg(v0) 4012 return true 4013 } 4014 // match: (Lsh16x32 (Const16 [0]) _) 4015 // cond: 4016 // result: (Const16 [0]) 4017 for { 4018 v_0 := v.Args[0] 4019 if v_0.Op != OpConst16 { 4020 break 4021 } 4022 if v_0.AuxInt != 0 { 4023 break 4024 } 4025 v.reset(OpConst16) 4026 v.AuxInt = 0 4027 return true 4028 } 4029 return false 4030 } 4031 func rewriteValuegeneric_OpLsh16x64(v *Value, config *Config) bool { 4032 b := v.Block 4033 _ = b 4034 // match: (Lsh16x64 (Const16 [c]) (Const64 [d])) 4035 // cond: 4036 // result: (Const16 [int64(int16(c) << uint64(d))]) 4037 for { 4038 v_0 := v.Args[0] 4039 if v_0.Op != OpConst16 { 4040 break 4041 } 4042 c := v_0.AuxInt 4043 v_1 := v.Args[1] 4044 if v_1.Op != OpConst64 { 4045 break 4046 } 4047 d := v_1.AuxInt 4048 v.reset(OpConst16) 4049 v.AuxInt = int64(int16(c) << uint64(d)) 4050 return true 4051 } 4052 // match: (Lsh16x64 x (Const64 [0])) 4053 // cond: 4054 // result: x 4055 for { 4056 x := v.Args[0] 4057 v_1 := v.Args[1] 4058 if v_1.Op != OpConst64 { 4059 break 4060 } 4061 if v_1.AuxInt != 0 { 4062 break 4063 } 4064 v.reset(OpCopy) 4065 v.Type = x.Type 4066 v.AddArg(x) 4067 return true 4068 } 4069 // match: (Lsh16x64 (Const16 [0]) _) 4070 // cond: 4071 // result: (Const16 [0]) 4072 for { 4073 v_0 := v.Args[0] 4074 if v_0.Op != OpConst16 { 4075 break 4076 } 4077 if v_0.AuxInt != 0 { 4078 break 4079 } 4080 v.reset(OpConst16) 4081 v.AuxInt = 0 4082 return true 4083 } 4084 // match: (Lsh16x64 _ (Const64 [c])) 4085 // cond: uint64(c) >= 16 4086 // result: (Const16 [0]) 4087 for { 4088 v_1 := v.Args[1] 4089 if v_1.Op != OpConst64 { 4090 break 4091 } 4092 c := v_1.AuxInt 4093 if !(uint64(c) >= 16) { 4094 break 4095 } 4096 v.reset(OpConst16) 4097 v.AuxInt = 0 4098 return true 4099 } 4100 // match: (Lsh16x64 <t> (Lsh16x64 x (Const64 [c])) (Const64 [d])) 4101 // cond: !uaddOvf(c,d) 4102 // result: (Lsh16x64 x (Const64 <t> [c+d])) 4103 for { 4104 t := v.Type 4105 v_0 := v.Args[0] 4106 if v_0.Op != OpLsh16x64 { 4107 break 4108 } 4109 x := v_0.Args[0] 4110 v_0_1 := v_0.Args[1] 4111 if v_0_1.Op != OpConst64 { 4112 break 4113 } 4114 c := v_0_1.AuxInt 4115 v_1 := v.Args[1] 4116 if v_1.Op != OpConst64 { 4117 break 4118 } 4119 d := v_1.AuxInt 4120 if !(!uaddOvf(c, d)) { 4121 break 4122 } 4123 v.reset(OpLsh16x64) 4124 v.AddArg(x) 4125 v0 := b.NewValue0(v.Line, OpConst64, t) 4126 v0.AuxInt = c + d 4127 v.AddArg(v0) 4128 return true 4129 } 4130 // match: (Lsh16x64 (Rsh16Ux64 (Lsh16x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4131 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4132 // result: (Lsh16x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4133 for { 4134 v_0 := v.Args[0] 4135 if v_0.Op != OpRsh16Ux64 { 4136 break 4137 } 4138 v_0_0 := v_0.Args[0] 4139 if v_0_0.Op != OpLsh16x64 { 4140 break 4141 } 4142 x := v_0_0.Args[0] 4143 v_0_0_1 := v_0_0.Args[1] 4144 if v_0_0_1.Op != OpConst64 { 4145 break 4146 } 4147 c1 := v_0_0_1.AuxInt 4148 v_0_1 := v_0.Args[1] 4149 if v_0_1.Op != OpConst64 { 4150 break 4151 } 4152 c2 := v_0_1.AuxInt 4153 v_1 := v.Args[1] 4154 if v_1.Op != OpConst64 { 4155 break 4156 } 4157 c3 := v_1.AuxInt 4158 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4159 break 4160 } 4161 v.reset(OpLsh16x64) 4162 v.AddArg(x) 4163 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4164 v0.AuxInt = c1 - c2 + c3 4165 v.AddArg(v0) 4166 return true 4167 } 4168 return false 4169 } 4170 func rewriteValuegeneric_OpLsh16x8(v *Value, config *Config) bool { 4171 b := v.Block 4172 _ = b 4173 // match: (Lsh16x8 <t> x (Const8 [c])) 4174 // cond: 4175 // result: (Lsh16x64 x (Const64 <t> [int64(uint8(c))])) 4176 for { 4177 t := v.Type 4178 x := v.Args[0] 4179 v_1 := v.Args[1] 4180 if v_1.Op != OpConst8 { 4181 break 4182 } 4183 c := v_1.AuxInt 4184 v.reset(OpLsh16x64) 4185 v.AddArg(x) 4186 v0 := b.NewValue0(v.Line, OpConst64, t) 4187 v0.AuxInt = int64(uint8(c)) 4188 v.AddArg(v0) 4189 return true 4190 } 4191 // match: (Lsh16x8 (Const16 [0]) _) 4192 // cond: 4193 // result: (Const16 [0]) 4194 for { 4195 v_0 := v.Args[0] 4196 if v_0.Op != OpConst16 { 4197 break 4198 } 4199 if v_0.AuxInt != 0 { 4200 break 4201 } 4202 v.reset(OpConst16) 4203 v.AuxInt = 0 4204 return true 4205 } 4206 return false 4207 } 4208 func rewriteValuegeneric_OpLsh32x16(v *Value, config *Config) bool { 4209 b := v.Block 4210 _ = b 4211 // match: (Lsh32x16 <t> x (Const16 [c])) 4212 // cond: 4213 // result: (Lsh32x64 x (Const64 <t> [int64(uint16(c))])) 4214 for { 4215 t := v.Type 4216 x := v.Args[0] 4217 v_1 := v.Args[1] 4218 if v_1.Op != OpConst16 { 4219 break 4220 } 4221 c := v_1.AuxInt 4222 v.reset(OpLsh32x64) 4223 v.AddArg(x) 4224 v0 := b.NewValue0(v.Line, OpConst64, t) 4225 v0.AuxInt = int64(uint16(c)) 4226 v.AddArg(v0) 4227 return true 4228 } 4229 // match: (Lsh32x16 (Const32 [0]) _) 4230 // cond: 4231 // result: (Const32 [0]) 4232 for { 4233 v_0 := v.Args[0] 4234 if v_0.Op != OpConst32 { 4235 break 4236 } 4237 if v_0.AuxInt != 0 { 4238 break 4239 } 4240 v.reset(OpConst32) 4241 v.AuxInt = 0 4242 return true 4243 } 4244 return false 4245 } 4246 func rewriteValuegeneric_OpLsh32x32(v *Value, config *Config) bool { 4247 b := v.Block 4248 _ = b 4249 // match: (Lsh32x32 <t> x (Const32 [c])) 4250 // cond: 4251 // result: (Lsh32x64 x (Const64 <t> [int64(uint32(c))])) 4252 for { 4253 t := v.Type 4254 x := v.Args[0] 4255 v_1 := v.Args[1] 4256 if v_1.Op != OpConst32 { 4257 break 4258 } 4259 c := v_1.AuxInt 4260 v.reset(OpLsh32x64) 4261 v.AddArg(x) 4262 v0 := b.NewValue0(v.Line, OpConst64, t) 4263 v0.AuxInt = int64(uint32(c)) 4264 v.AddArg(v0) 4265 return true 4266 } 4267 // match: (Lsh32x32 (Const32 [0]) _) 4268 // cond: 4269 // result: (Const32 [0]) 4270 for { 4271 v_0 := v.Args[0] 4272 if v_0.Op != OpConst32 { 4273 break 4274 } 4275 if v_0.AuxInt != 0 { 4276 break 4277 } 4278 v.reset(OpConst32) 4279 v.AuxInt = 0 4280 return true 4281 } 4282 return false 4283 } 4284 func rewriteValuegeneric_OpLsh32x64(v *Value, config *Config) bool { 4285 b := v.Block 4286 _ = b 4287 // match: (Lsh32x64 (Const32 [c]) (Const64 [d])) 4288 // cond: 4289 // result: (Const32 [int64(int32(c) << uint64(d))]) 4290 for { 4291 v_0 := v.Args[0] 4292 if v_0.Op != OpConst32 { 4293 break 4294 } 4295 c := v_0.AuxInt 4296 v_1 := v.Args[1] 4297 if v_1.Op != OpConst64 { 4298 break 4299 } 4300 d := v_1.AuxInt 4301 v.reset(OpConst32) 4302 v.AuxInt = int64(int32(c) << uint64(d)) 4303 return true 4304 } 4305 // match: (Lsh32x64 x (Const64 [0])) 4306 // cond: 4307 // result: x 4308 for { 4309 x := v.Args[0] 4310 v_1 := v.Args[1] 4311 if v_1.Op != OpConst64 { 4312 break 4313 } 4314 if v_1.AuxInt != 0 { 4315 break 4316 } 4317 v.reset(OpCopy) 4318 v.Type = x.Type 4319 v.AddArg(x) 4320 return true 4321 } 4322 // match: (Lsh32x64 (Const32 [0]) _) 4323 // cond: 4324 // result: (Const32 [0]) 4325 for { 4326 v_0 := v.Args[0] 4327 if v_0.Op != OpConst32 { 4328 break 4329 } 4330 if v_0.AuxInt != 0 { 4331 break 4332 } 4333 v.reset(OpConst32) 4334 v.AuxInt = 0 4335 return true 4336 } 4337 // match: (Lsh32x64 _ (Const64 [c])) 4338 // cond: uint64(c) >= 32 4339 // result: (Const32 [0]) 4340 for { 4341 v_1 := v.Args[1] 4342 if v_1.Op != OpConst64 { 4343 break 4344 } 4345 c := v_1.AuxInt 4346 if !(uint64(c) >= 32) { 4347 break 4348 } 4349 v.reset(OpConst32) 4350 v.AuxInt = 0 4351 return true 4352 } 4353 // match: (Lsh32x64 <t> (Lsh32x64 x (Const64 [c])) (Const64 [d])) 4354 // cond: !uaddOvf(c,d) 4355 // result: (Lsh32x64 x (Const64 <t> [c+d])) 4356 for { 4357 t := v.Type 4358 v_0 := v.Args[0] 4359 if v_0.Op != OpLsh32x64 { 4360 break 4361 } 4362 x := v_0.Args[0] 4363 v_0_1 := v_0.Args[1] 4364 if v_0_1.Op != OpConst64 { 4365 break 4366 } 4367 c := v_0_1.AuxInt 4368 v_1 := v.Args[1] 4369 if v_1.Op != OpConst64 { 4370 break 4371 } 4372 d := v_1.AuxInt 4373 if !(!uaddOvf(c, d)) { 4374 break 4375 } 4376 v.reset(OpLsh32x64) 4377 v.AddArg(x) 4378 v0 := b.NewValue0(v.Line, OpConst64, t) 4379 v0.AuxInt = c + d 4380 v.AddArg(v0) 4381 return true 4382 } 4383 // match: (Lsh32x64 (Rsh32Ux64 (Lsh32x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4384 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4385 // result: (Lsh32x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4386 for { 4387 v_0 := v.Args[0] 4388 if v_0.Op != OpRsh32Ux64 { 4389 break 4390 } 4391 v_0_0 := v_0.Args[0] 4392 if v_0_0.Op != OpLsh32x64 { 4393 break 4394 } 4395 x := v_0_0.Args[0] 4396 v_0_0_1 := v_0_0.Args[1] 4397 if v_0_0_1.Op != OpConst64 { 4398 break 4399 } 4400 c1 := v_0_0_1.AuxInt 4401 v_0_1 := v_0.Args[1] 4402 if v_0_1.Op != OpConst64 { 4403 break 4404 } 4405 c2 := v_0_1.AuxInt 4406 v_1 := v.Args[1] 4407 if v_1.Op != OpConst64 { 4408 break 4409 } 4410 c3 := v_1.AuxInt 4411 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4412 break 4413 } 4414 v.reset(OpLsh32x64) 4415 v.AddArg(x) 4416 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4417 v0.AuxInt = c1 - c2 + c3 4418 v.AddArg(v0) 4419 return true 4420 } 4421 return false 4422 } 4423 func rewriteValuegeneric_OpLsh32x8(v *Value, config *Config) bool { 4424 b := v.Block 4425 _ = b 4426 // match: (Lsh32x8 <t> x (Const8 [c])) 4427 // cond: 4428 // result: (Lsh32x64 x (Const64 <t> [int64(uint8(c))])) 4429 for { 4430 t := v.Type 4431 x := v.Args[0] 4432 v_1 := v.Args[1] 4433 if v_1.Op != OpConst8 { 4434 break 4435 } 4436 c := v_1.AuxInt 4437 v.reset(OpLsh32x64) 4438 v.AddArg(x) 4439 v0 := b.NewValue0(v.Line, OpConst64, t) 4440 v0.AuxInt = int64(uint8(c)) 4441 v.AddArg(v0) 4442 return true 4443 } 4444 // match: (Lsh32x8 (Const32 [0]) _) 4445 // cond: 4446 // result: (Const32 [0]) 4447 for { 4448 v_0 := v.Args[0] 4449 if v_0.Op != OpConst32 { 4450 break 4451 } 4452 if v_0.AuxInt != 0 { 4453 break 4454 } 4455 v.reset(OpConst32) 4456 v.AuxInt = 0 4457 return true 4458 } 4459 return false 4460 } 4461 func rewriteValuegeneric_OpLsh64x16(v *Value, config *Config) bool { 4462 b := v.Block 4463 _ = b 4464 // match: (Lsh64x16 <t> x (Const16 [c])) 4465 // cond: 4466 // result: (Lsh64x64 x (Const64 <t> [int64(uint16(c))])) 4467 for { 4468 t := v.Type 4469 x := v.Args[0] 4470 v_1 := v.Args[1] 4471 if v_1.Op != OpConst16 { 4472 break 4473 } 4474 c := v_1.AuxInt 4475 v.reset(OpLsh64x64) 4476 v.AddArg(x) 4477 v0 := b.NewValue0(v.Line, OpConst64, t) 4478 v0.AuxInt = int64(uint16(c)) 4479 v.AddArg(v0) 4480 return true 4481 } 4482 // match: (Lsh64x16 (Const64 [0]) _) 4483 // cond: 4484 // result: (Const64 [0]) 4485 for { 4486 v_0 := v.Args[0] 4487 if v_0.Op != OpConst64 { 4488 break 4489 } 4490 if v_0.AuxInt != 0 { 4491 break 4492 } 4493 v.reset(OpConst64) 4494 v.AuxInt = 0 4495 return true 4496 } 4497 return false 4498 } 4499 func rewriteValuegeneric_OpLsh64x32(v *Value, config *Config) bool { 4500 b := v.Block 4501 _ = b 4502 // match: (Lsh64x32 <t> x (Const32 [c])) 4503 // cond: 4504 // result: (Lsh64x64 x (Const64 <t> [int64(uint32(c))])) 4505 for { 4506 t := v.Type 4507 x := v.Args[0] 4508 v_1 := v.Args[1] 4509 if v_1.Op != OpConst32 { 4510 break 4511 } 4512 c := v_1.AuxInt 4513 v.reset(OpLsh64x64) 4514 v.AddArg(x) 4515 v0 := b.NewValue0(v.Line, OpConst64, t) 4516 v0.AuxInt = int64(uint32(c)) 4517 v.AddArg(v0) 4518 return true 4519 } 4520 // match: (Lsh64x32 (Const64 [0]) _) 4521 // cond: 4522 // result: (Const64 [0]) 4523 for { 4524 v_0 := v.Args[0] 4525 if v_0.Op != OpConst64 { 4526 break 4527 } 4528 if v_0.AuxInt != 0 { 4529 break 4530 } 4531 v.reset(OpConst64) 4532 v.AuxInt = 0 4533 return true 4534 } 4535 return false 4536 } 4537 func rewriteValuegeneric_OpLsh64x64(v *Value, config *Config) bool { 4538 b := v.Block 4539 _ = b 4540 // match: (Lsh64x64 (Const64 [c]) (Const64 [d])) 4541 // cond: 4542 // result: (Const64 [c << uint64(d)]) 4543 for { 4544 v_0 := v.Args[0] 4545 if v_0.Op != OpConst64 { 4546 break 4547 } 4548 c := v_0.AuxInt 4549 v_1 := v.Args[1] 4550 if v_1.Op != OpConst64 { 4551 break 4552 } 4553 d := v_1.AuxInt 4554 v.reset(OpConst64) 4555 v.AuxInt = c << uint64(d) 4556 return true 4557 } 4558 // match: (Lsh64x64 x (Const64 [0])) 4559 // cond: 4560 // result: x 4561 for { 4562 x := v.Args[0] 4563 v_1 := v.Args[1] 4564 if v_1.Op != OpConst64 { 4565 break 4566 } 4567 if v_1.AuxInt != 0 { 4568 break 4569 } 4570 v.reset(OpCopy) 4571 v.Type = x.Type 4572 v.AddArg(x) 4573 return true 4574 } 4575 // match: (Lsh64x64 (Const64 [0]) _) 4576 // cond: 4577 // result: (Const64 [0]) 4578 for { 4579 v_0 := v.Args[0] 4580 if v_0.Op != OpConst64 { 4581 break 4582 } 4583 if v_0.AuxInt != 0 { 4584 break 4585 } 4586 v.reset(OpConst64) 4587 v.AuxInt = 0 4588 return true 4589 } 4590 // match: (Lsh64x64 _ (Const64 [c])) 4591 // cond: uint64(c) >= 64 4592 // result: (Const64 [0]) 4593 for { 4594 v_1 := v.Args[1] 4595 if v_1.Op != OpConst64 { 4596 break 4597 } 4598 c := v_1.AuxInt 4599 if !(uint64(c) >= 64) { 4600 break 4601 } 4602 v.reset(OpConst64) 4603 v.AuxInt = 0 4604 return true 4605 } 4606 // match: (Lsh64x64 <t> (Lsh64x64 x (Const64 [c])) (Const64 [d])) 4607 // cond: !uaddOvf(c,d) 4608 // result: (Lsh64x64 x (Const64 <t> [c+d])) 4609 for { 4610 t := v.Type 4611 v_0 := v.Args[0] 4612 if v_0.Op != OpLsh64x64 { 4613 break 4614 } 4615 x := v_0.Args[0] 4616 v_0_1 := v_0.Args[1] 4617 if v_0_1.Op != OpConst64 { 4618 break 4619 } 4620 c := v_0_1.AuxInt 4621 v_1 := v.Args[1] 4622 if v_1.Op != OpConst64 { 4623 break 4624 } 4625 d := v_1.AuxInt 4626 if !(!uaddOvf(c, d)) { 4627 break 4628 } 4629 v.reset(OpLsh64x64) 4630 v.AddArg(x) 4631 v0 := b.NewValue0(v.Line, OpConst64, t) 4632 v0.AuxInt = c + d 4633 v.AddArg(v0) 4634 return true 4635 } 4636 // match: (Lsh64x64 (Rsh64Ux64 (Lsh64x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4637 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4638 // result: (Lsh64x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4639 for { 4640 v_0 := v.Args[0] 4641 if v_0.Op != OpRsh64Ux64 { 4642 break 4643 } 4644 v_0_0 := v_0.Args[0] 4645 if v_0_0.Op != OpLsh64x64 { 4646 break 4647 } 4648 x := v_0_0.Args[0] 4649 v_0_0_1 := v_0_0.Args[1] 4650 if v_0_0_1.Op != OpConst64 { 4651 break 4652 } 4653 c1 := v_0_0_1.AuxInt 4654 v_0_1 := v_0.Args[1] 4655 if v_0_1.Op != OpConst64 { 4656 break 4657 } 4658 c2 := v_0_1.AuxInt 4659 v_1 := v.Args[1] 4660 if v_1.Op != OpConst64 { 4661 break 4662 } 4663 c3 := v_1.AuxInt 4664 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4665 break 4666 } 4667 v.reset(OpLsh64x64) 4668 v.AddArg(x) 4669 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4670 v0.AuxInt = c1 - c2 + c3 4671 v.AddArg(v0) 4672 return true 4673 } 4674 return false 4675 } 4676 func rewriteValuegeneric_OpLsh64x8(v *Value, config *Config) bool { 4677 b := v.Block 4678 _ = b 4679 // match: (Lsh64x8 <t> x (Const8 [c])) 4680 // cond: 4681 // result: (Lsh64x64 x (Const64 <t> [int64(uint8(c))])) 4682 for { 4683 t := v.Type 4684 x := v.Args[0] 4685 v_1 := v.Args[1] 4686 if v_1.Op != OpConst8 { 4687 break 4688 } 4689 c := v_1.AuxInt 4690 v.reset(OpLsh64x64) 4691 v.AddArg(x) 4692 v0 := b.NewValue0(v.Line, OpConst64, t) 4693 v0.AuxInt = int64(uint8(c)) 4694 v.AddArg(v0) 4695 return true 4696 } 4697 // match: (Lsh64x8 (Const64 [0]) _) 4698 // cond: 4699 // result: (Const64 [0]) 4700 for { 4701 v_0 := v.Args[0] 4702 if v_0.Op != OpConst64 { 4703 break 4704 } 4705 if v_0.AuxInt != 0 { 4706 break 4707 } 4708 v.reset(OpConst64) 4709 v.AuxInt = 0 4710 return true 4711 } 4712 return false 4713 } 4714 func rewriteValuegeneric_OpLsh8x16(v *Value, config *Config) bool { 4715 b := v.Block 4716 _ = b 4717 // match: (Lsh8x16 <t> x (Const16 [c])) 4718 // cond: 4719 // result: (Lsh8x64 x (Const64 <t> [int64(uint16(c))])) 4720 for { 4721 t := v.Type 4722 x := v.Args[0] 4723 v_1 := v.Args[1] 4724 if v_1.Op != OpConst16 { 4725 break 4726 } 4727 c := v_1.AuxInt 4728 v.reset(OpLsh8x64) 4729 v.AddArg(x) 4730 v0 := b.NewValue0(v.Line, OpConst64, t) 4731 v0.AuxInt = int64(uint16(c)) 4732 v.AddArg(v0) 4733 return true 4734 } 4735 // match: (Lsh8x16 (Const8 [0]) _) 4736 // cond: 4737 // result: (Const8 [0]) 4738 for { 4739 v_0 := v.Args[0] 4740 if v_0.Op != OpConst8 { 4741 break 4742 } 4743 if v_0.AuxInt != 0 { 4744 break 4745 } 4746 v.reset(OpConst8) 4747 v.AuxInt = 0 4748 return true 4749 } 4750 return false 4751 } 4752 func rewriteValuegeneric_OpLsh8x32(v *Value, config *Config) bool { 4753 b := v.Block 4754 _ = b 4755 // match: (Lsh8x32 <t> x (Const32 [c])) 4756 // cond: 4757 // result: (Lsh8x64 x (Const64 <t> [int64(uint32(c))])) 4758 for { 4759 t := v.Type 4760 x := v.Args[0] 4761 v_1 := v.Args[1] 4762 if v_1.Op != OpConst32 { 4763 break 4764 } 4765 c := v_1.AuxInt 4766 v.reset(OpLsh8x64) 4767 v.AddArg(x) 4768 v0 := b.NewValue0(v.Line, OpConst64, t) 4769 v0.AuxInt = int64(uint32(c)) 4770 v.AddArg(v0) 4771 return true 4772 } 4773 // match: (Lsh8x32 (Const8 [0]) _) 4774 // cond: 4775 // result: (Const8 [0]) 4776 for { 4777 v_0 := v.Args[0] 4778 if v_0.Op != OpConst8 { 4779 break 4780 } 4781 if v_0.AuxInt != 0 { 4782 break 4783 } 4784 v.reset(OpConst8) 4785 v.AuxInt = 0 4786 return true 4787 } 4788 return false 4789 } 4790 func rewriteValuegeneric_OpLsh8x64(v *Value, config *Config) bool { 4791 b := v.Block 4792 _ = b 4793 // match: (Lsh8x64 (Const8 [c]) (Const64 [d])) 4794 // cond: 4795 // result: (Const8 [int64(int8(c) << uint64(d))]) 4796 for { 4797 v_0 := v.Args[0] 4798 if v_0.Op != OpConst8 { 4799 break 4800 } 4801 c := v_0.AuxInt 4802 v_1 := v.Args[1] 4803 if v_1.Op != OpConst64 { 4804 break 4805 } 4806 d := v_1.AuxInt 4807 v.reset(OpConst8) 4808 v.AuxInt = int64(int8(c) << uint64(d)) 4809 return true 4810 } 4811 // match: (Lsh8x64 x (Const64 [0])) 4812 // cond: 4813 // result: x 4814 for { 4815 x := v.Args[0] 4816 v_1 := v.Args[1] 4817 if v_1.Op != OpConst64 { 4818 break 4819 } 4820 if v_1.AuxInt != 0 { 4821 break 4822 } 4823 v.reset(OpCopy) 4824 v.Type = x.Type 4825 v.AddArg(x) 4826 return true 4827 } 4828 // match: (Lsh8x64 (Const8 [0]) _) 4829 // cond: 4830 // result: (Const8 [0]) 4831 for { 4832 v_0 := v.Args[0] 4833 if v_0.Op != OpConst8 { 4834 break 4835 } 4836 if v_0.AuxInt != 0 { 4837 break 4838 } 4839 v.reset(OpConst8) 4840 v.AuxInt = 0 4841 return true 4842 } 4843 // match: (Lsh8x64 _ (Const64 [c])) 4844 // cond: uint64(c) >= 8 4845 // result: (Const8 [0]) 4846 for { 4847 v_1 := v.Args[1] 4848 if v_1.Op != OpConst64 { 4849 break 4850 } 4851 c := v_1.AuxInt 4852 if !(uint64(c) >= 8) { 4853 break 4854 } 4855 v.reset(OpConst8) 4856 v.AuxInt = 0 4857 return true 4858 } 4859 // match: (Lsh8x64 <t> (Lsh8x64 x (Const64 [c])) (Const64 [d])) 4860 // cond: !uaddOvf(c,d) 4861 // result: (Lsh8x64 x (Const64 <t> [c+d])) 4862 for { 4863 t := v.Type 4864 v_0 := v.Args[0] 4865 if v_0.Op != OpLsh8x64 { 4866 break 4867 } 4868 x := v_0.Args[0] 4869 v_0_1 := v_0.Args[1] 4870 if v_0_1.Op != OpConst64 { 4871 break 4872 } 4873 c := v_0_1.AuxInt 4874 v_1 := v.Args[1] 4875 if v_1.Op != OpConst64 { 4876 break 4877 } 4878 d := v_1.AuxInt 4879 if !(!uaddOvf(c, d)) { 4880 break 4881 } 4882 v.reset(OpLsh8x64) 4883 v.AddArg(x) 4884 v0 := b.NewValue0(v.Line, OpConst64, t) 4885 v0.AuxInt = c + d 4886 v.AddArg(v0) 4887 return true 4888 } 4889 // match: (Lsh8x64 (Rsh8Ux64 (Lsh8x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 4890 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 4891 // result: (Lsh8x64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 4892 for { 4893 v_0 := v.Args[0] 4894 if v_0.Op != OpRsh8Ux64 { 4895 break 4896 } 4897 v_0_0 := v_0.Args[0] 4898 if v_0_0.Op != OpLsh8x64 { 4899 break 4900 } 4901 x := v_0_0.Args[0] 4902 v_0_0_1 := v_0_0.Args[1] 4903 if v_0_0_1.Op != OpConst64 { 4904 break 4905 } 4906 c1 := v_0_0_1.AuxInt 4907 v_0_1 := v_0.Args[1] 4908 if v_0_1.Op != OpConst64 { 4909 break 4910 } 4911 c2 := v_0_1.AuxInt 4912 v_1 := v.Args[1] 4913 if v_1.Op != OpConst64 { 4914 break 4915 } 4916 c3 := v_1.AuxInt 4917 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 4918 break 4919 } 4920 v.reset(OpLsh8x64) 4921 v.AddArg(x) 4922 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 4923 v0.AuxInt = c1 - c2 + c3 4924 v.AddArg(v0) 4925 return true 4926 } 4927 return false 4928 } 4929 func rewriteValuegeneric_OpLsh8x8(v *Value, config *Config) bool { 4930 b := v.Block 4931 _ = b 4932 // match: (Lsh8x8 <t> x (Const8 [c])) 4933 // cond: 4934 // result: (Lsh8x64 x (Const64 <t> [int64(uint8(c))])) 4935 for { 4936 t := v.Type 4937 x := v.Args[0] 4938 v_1 := v.Args[1] 4939 if v_1.Op != OpConst8 { 4940 break 4941 } 4942 c := v_1.AuxInt 4943 v.reset(OpLsh8x64) 4944 v.AddArg(x) 4945 v0 := b.NewValue0(v.Line, OpConst64, t) 4946 v0.AuxInt = int64(uint8(c)) 4947 v.AddArg(v0) 4948 return true 4949 } 4950 // match: (Lsh8x8 (Const8 [0]) _) 4951 // cond: 4952 // result: (Const8 [0]) 4953 for { 4954 v_0 := v.Args[0] 4955 if v_0.Op != OpConst8 { 4956 break 4957 } 4958 if v_0.AuxInt != 0 { 4959 break 4960 } 4961 v.reset(OpConst8) 4962 v.AuxInt = 0 4963 return true 4964 } 4965 return false 4966 } 4967 func rewriteValuegeneric_OpMod16(v *Value, config *Config) bool { 4968 b := v.Block 4969 _ = b 4970 // match: (Mod16 (Const16 [c]) (Const16 [d])) 4971 // cond: d != 0 4972 // result: (Const16 [int64(int16(c % d))]) 4973 for { 4974 v_0 := v.Args[0] 4975 if v_0.Op != OpConst16 { 4976 break 4977 } 4978 c := v_0.AuxInt 4979 v_1 := v.Args[1] 4980 if v_1.Op != OpConst16 { 4981 break 4982 } 4983 d := v_1.AuxInt 4984 if !(d != 0) { 4985 break 4986 } 4987 v.reset(OpConst16) 4988 v.AuxInt = int64(int16(c % d)) 4989 return true 4990 } 4991 return false 4992 } 4993 func rewriteValuegeneric_OpMod16u(v *Value, config *Config) bool { 4994 b := v.Block 4995 _ = b 4996 // match: (Mod16u (Const16 [c]) (Const16 [d])) 4997 // cond: d != 0 4998 // result: (Const16 [int64(uint16(c) % uint16(d))]) 4999 for { 5000 v_0 := v.Args[0] 5001 if v_0.Op != OpConst16 { 5002 break 5003 } 5004 c := v_0.AuxInt 5005 v_1 := v.Args[1] 5006 if v_1.Op != OpConst16 { 5007 break 5008 } 5009 d := v_1.AuxInt 5010 if !(d != 0) { 5011 break 5012 } 5013 v.reset(OpConst16) 5014 v.AuxInt = int64(uint16(c) % uint16(d)) 5015 return true 5016 } 5017 return false 5018 } 5019 func rewriteValuegeneric_OpMod32(v *Value, config *Config) bool { 5020 b := v.Block 5021 _ = b 5022 // match: (Mod32 (Const32 [c]) (Const32 [d])) 5023 // cond: d != 0 5024 // result: (Const32 [int64(int32(c % d))]) 5025 for { 5026 v_0 := v.Args[0] 5027 if v_0.Op != OpConst32 { 5028 break 5029 } 5030 c := v_0.AuxInt 5031 v_1 := v.Args[1] 5032 if v_1.Op != OpConst32 { 5033 break 5034 } 5035 d := v_1.AuxInt 5036 if !(d != 0) { 5037 break 5038 } 5039 v.reset(OpConst32) 5040 v.AuxInt = int64(int32(c % d)) 5041 return true 5042 } 5043 return false 5044 } 5045 func rewriteValuegeneric_OpMod32u(v *Value, config *Config) bool { 5046 b := v.Block 5047 _ = b 5048 // match: (Mod32u (Const32 [c]) (Const32 [d])) 5049 // cond: d != 0 5050 // result: (Const32 [int64(uint32(c) % uint32(d))]) 5051 for { 5052 v_0 := v.Args[0] 5053 if v_0.Op != OpConst32 { 5054 break 5055 } 5056 c := v_0.AuxInt 5057 v_1 := v.Args[1] 5058 if v_1.Op != OpConst32 { 5059 break 5060 } 5061 d := v_1.AuxInt 5062 if !(d != 0) { 5063 break 5064 } 5065 v.reset(OpConst32) 5066 v.AuxInt = int64(uint32(c) % uint32(d)) 5067 return true 5068 } 5069 return false 5070 } 5071 func rewriteValuegeneric_OpMod64(v *Value, config *Config) bool { 5072 b := v.Block 5073 _ = b 5074 // match: (Mod64 (Const64 [c]) (Const64 [d])) 5075 // cond: d != 0 5076 // result: (Const64 [c % d]) 5077 for { 5078 v_0 := v.Args[0] 5079 if v_0.Op != OpConst64 { 5080 break 5081 } 5082 c := v_0.AuxInt 5083 v_1 := v.Args[1] 5084 if v_1.Op != OpConst64 { 5085 break 5086 } 5087 d := v_1.AuxInt 5088 if !(d != 0) { 5089 break 5090 } 5091 v.reset(OpConst64) 5092 v.AuxInt = c % d 5093 return true 5094 } 5095 // match: (Mod64 <t> x (Const64 [c])) 5096 // cond: x.Op != OpConst64 && smagic64ok(c) 5097 // result: (Sub64 x (Mul64 <t> (Div64 <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5098 for { 5099 t := v.Type 5100 x := v.Args[0] 5101 v_1 := v.Args[1] 5102 if v_1.Op != OpConst64 { 5103 break 5104 } 5105 c := v_1.AuxInt 5106 if !(x.Op != OpConst64 && smagic64ok(c)) { 5107 break 5108 } 5109 v.reset(OpSub64) 5110 v.AddArg(x) 5111 v0 := b.NewValue0(v.Line, OpMul64, t) 5112 v1 := b.NewValue0(v.Line, OpDiv64, t) 5113 v1.AddArg(x) 5114 v2 := b.NewValue0(v.Line, OpConst64, t) 5115 v2.AuxInt = c 5116 v1.AddArg(v2) 5117 v0.AddArg(v1) 5118 v3 := b.NewValue0(v.Line, OpConst64, t) 5119 v3.AuxInt = c 5120 v0.AddArg(v3) 5121 v.AddArg(v0) 5122 return true 5123 } 5124 return false 5125 } 5126 func rewriteValuegeneric_OpMod64u(v *Value, config *Config) bool { 5127 b := v.Block 5128 _ = b 5129 // match: (Mod64u (Const64 [c]) (Const64 [d])) 5130 // cond: d != 0 5131 // result: (Const64 [int64(uint64(c) % uint64(d))]) 5132 for { 5133 v_0 := v.Args[0] 5134 if v_0.Op != OpConst64 { 5135 break 5136 } 5137 c := v_0.AuxInt 5138 v_1 := v.Args[1] 5139 if v_1.Op != OpConst64 { 5140 break 5141 } 5142 d := v_1.AuxInt 5143 if !(d != 0) { 5144 break 5145 } 5146 v.reset(OpConst64) 5147 v.AuxInt = int64(uint64(c) % uint64(d)) 5148 return true 5149 } 5150 // match: (Mod64u <t> n (Const64 [c])) 5151 // cond: isPowerOfTwo(c) 5152 // result: (And64 n (Const64 <t> [c-1])) 5153 for { 5154 t := v.Type 5155 n := v.Args[0] 5156 v_1 := v.Args[1] 5157 if v_1.Op != OpConst64 { 5158 break 5159 } 5160 c := v_1.AuxInt 5161 if !(isPowerOfTwo(c)) { 5162 break 5163 } 5164 v.reset(OpAnd64) 5165 v.AddArg(n) 5166 v0 := b.NewValue0(v.Line, OpConst64, t) 5167 v0.AuxInt = c - 1 5168 v.AddArg(v0) 5169 return true 5170 } 5171 // match: (Mod64u <t> x (Const64 [c])) 5172 // cond: x.Op != OpConst64 && umagic64ok(c) 5173 // result: (Sub64 x (Mul64 <t> (Div64u <t> x (Const64 <t> [c])) (Const64 <t> [c]))) 5174 for { 5175 t := v.Type 5176 x := v.Args[0] 5177 v_1 := v.Args[1] 5178 if v_1.Op != OpConst64 { 5179 break 5180 } 5181 c := v_1.AuxInt 5182 if !(x.Op != OpConst64 && umagic64ok(c)) { 5183 break 5184 } 5185 v.reset(OpSub64) 5186 v.AddArg(x) 5187 v0 := b.NewValue0(v.Line, OpMul64, t) 5188 v1 := b.NewValue0(v.Line, OpDiv64u, t) 5189 v1.AddArg(x) 5190 v2 := b.NewValue0(v.Line, OpConst64, t) 5191 v2.AuxInt = c 5192 v1.AddArg(v2) 5193 v0.AddArg(v1) 5194 v3 := b.NewValue0(v.Line, OpConst64, t) 5195 v3.AuxInt = c 5196 v0.AddArg(v3) 5197 v.AddArg(v0) 5198 return true 5199 } 5200 return false 5201 } 5202 func rewriteValuegeneric_OpMod8(v *Value, config *Config) bool { 5203 b := v.Block 5204 _ = b 5205 // match: (Mod8 (Const8 [c]) (Const8 [d])) 5206 // cond: d != 0 5207 // result: (Const8 [int64(int8(c % d))]) 5208 for { 5209 v_0 := v.Args[0] 5210 if v_0.Op != OpConst8 { 5211 break 5212 } 5213 c := v_0.AuxInt 5214 v_1 := v.Args[1] 5215 if v_1.Op != OpConst8 { 5216 break 5217 } 5218 d := v_1.AuxInt 5219 if !(d != 0) { 5220 break 5221 } 5222 v.reset(OpConst8) 5223 v.AuxInt = int64(int8(c % d)) 5224 return true 5225 } 5226 return false 5227 } 5228 func rewriteValuegeneric_OpMod8u(v *Value, config *Config) bool { 5229 b := v.Block 5230 _ = b 5231 // match: (Mod8u (Const8 [c]) (Const8 [d])) 5232 // cond: d != 0 5233 // result: (Const8 [int64(uint8(c) % uint8(d))]) 5234 for { 5235 v_0 := v.Args[0] 5236 if v_0.Op != OpConst8 { 5237 break 5238 } 5239 c := v_0.AuxInt 5240 v_1 := v.Args[1] 5241 if v_1.Op != OpConst8 { 5242 break 5243 } 5244 d := v_1.AuxInt 5245 if !(d != 0) { 5246 break 5247 } 5248 v.reset(OpConst8) 5249 v.AuxInt = int64(uint8(c) % uint8(d)) 5250 return true 5251 } 5252 return false 5253 } 5254 func rewriteValuegeneric_OpMul16(v *Value, config *Config) bool { 5255 b := v.Block 5256 _ = b 5257 // match: (Mul16 (Const16 [c]) (Const16 [d])) 5258 // cond: 5259 // result: (Const16 [int64(int16(c*d))]) 5260 for { 5261 v_0 := v.Args[0] 5262 if v_0.Op != OpConst16 { 5263 break 5264 } 5265 c := v_0.AuxInt 5266 v_1 := v.Args[1] 5267 if v_1.Op != OpConst16 { 5268 break 5269 } 5270 d := v_1.AuxInt 5271 v.reset(OpConst16) 5272 v.AuxInt = int64(int16(c * d)) 5273 return true 5274 } 5275 // match: (Mul16 (Const16 [-1]) x) 5276 // cond: 5277 // result: (Neg16 x) 5278 for { 5279 v_0 := v.Args[0] 5280 if v_0.Op != OpConst16 { 5281 break 5282 } 5283 if v_0.AuxInt != -1 { 5284 break 5285 } 5286 x := v.Args[1] 5287 v.reset(OpNeg16) 5288 v.AddArg(x) 5289 return true 5290 } 5291 // match: (Mul16 x (Const16 <t> [c])) 5292 // cond: x.Op != OpConst16 5293 // result: (Mul16 (Const16 <t> [c]) x) 5294 for { 5295 x := v.Args[0] 5296 v_1 := v.Args[1] 5297 if v_1.Op != OpConst16 { 5298 break 5299 } 5300 t := v_1.Type 5301 c := v_1.AuxInt 5302 if !(x.Op != OpConst16) { 5303 break 5304 } 5305 v.reset(OpMul16) 5306 v0 := b.NewValue0(v.Line, OpConst16, t) 5307 v0.AuxInt = c 5308 v.AddArg(v0) 5309 v.AddArg(x) 5310 return true 5311 } 5312 // match: (Mul16 (Const16 [0]) _) 5313 // cond: 5314 // result: (Const16 [0]) 5315 for { 5316 v_0 := v.Args[0] 5317 if v_0.Op != OpConst16 { 5318 break 5319 } 5320 if v_0.AuxInt != 0 { 5321 break 5322 } 5323 v.reset(OpConst16) 5324 v.AuxInt = 0 5325 return true 5326 } 5327 return false 5328 } 5329 func rewriteValuegeneric_OpMul32(v *Value, config *Config) bool { 5330 b := v.Block 5331 _ = b 5332 // match: (Mul32 (Const32 [c]) (Const32 [d])) 5333 // cond: 5334 // result: (Const32 [int64(int32(c*d))]) 5335 for { 5336 v_0 := v.Args[0] 5337 if v_0.Op != OpConst32 { 5338 break 5339 } 5340 c := v_0.AuxInt 5341 v_1 := v.Args[1] 5342 if v_1.Op != OpConst32 { 5343 break 5344 } 5345 d := v_1.AuxInt 5346 v.reset(OpConst32) 5347 v.AuxInt = int64(int32(c * d)) 5348 return true 5349 } 5350 // match: (Mul32 (Const32 [-1]) x) 5351 // cond: 5352 // result: (Neg32 x) 5353 for { 5354 v_0 := v.Args[0] 5355 if v_0.Op != OpConst32 { 5356 break 5357 } 5358 if v_0.AuxInt != -1 { 5359 break 5360 } 5361 x := v.Args[1] 5362 v.reset(OpNeg32) 5363 v.AddArg(x) 5364 return true 5365 } 5366 // match: (Mul32 x (Const32 <t> [c])) 5367 // cond: x.Op != OpConst32 5368 // result: (Mul32 (Const32 <t> [c]) x) 5369 for { 5370 x := v.Args[0] 5371 v_1 := v.Args[1] 5372 if v_1.Op != OpConst32 { 5373 break 5374 } 5375 t := v_1.Type 5376 c := v_1.AuxInt 5377 if !(x.Op != OpConst32) { 5378 break 5379 } 5380 v.reset(OpMul32) 5381 v0 := b.NewValue0(v.Line, OpConst32, t) 5382 v0.AuxInt = c 5383 v.AddArg(v0) 5384 v.AddArg(x) 5385 return true 5386 } 5387 // match: (Mul32 (Const32 <t> [c]) (Add32 <t> (Const32 <t> [d]) x)) 5388 // cond: 5389 // result: (Add32 (Const32 <t> [int64(int32(c*d))]) (Mul32 <t> (Const32 <t> [c]) x)) 5390 for { 5391 v_0 := v.Args[0] 5392 if v_0.Op != OpConst32 { 5393 break 5394 } 5395 t := v_0.Type 5396 c := v_0.AuxInt 5397 v_1 := v.Args[1] 5398 if v_1.Op != OpAdd32 { 5399 break 5400 } 5401 if v_1.Type != t { 5402 break 5403 } 5404 v_1_0 := v_1.Args[0] 5405 if v_1_0.Op != OpConst32 { 5406 break 5407 } 5408 if v_1_0.Type != t { 5409 break 5410 } 5411 d := v_1_0.AuxInt 5412 x := v_1.Args[1] 5413 v.reset(OpAdd32) 5414 v0 := b.NewValue0(v.Line, OpConst32, t) 5415 v0.AuxInt = int64(int32(c * d)) 5416 v.AddArg(v0) 5417 v1 := b.NewValue0(v.Line, OpMul32, t) 5418 v2 := b.NewValue0(v.Line, OpConst32, t) 5419 v2.AuxInt = c 5420 v1.AddArg(v2) 5421 v1.AddArg(x) 5422 v.AddArg(v1) 5423 return true 5424 } 5425 // match: (Mul32 (Const32 [0]) _) 5426 // cond: 5427 // result: (Const32 [0]) 5428 for { 5429 v_0 := v.Args[0] 5430 if v_0.Op != OpConst32 { 5431 break 5432 } 5433 if v_0.AuxInt != 0 { 5434 break 5435 } 5436 v.reset(OpConst32) 5437 v.AuxInt = 0 5438 return true 5439 } 5440 return false 5441 } 5442 func rewriteValuegeneric_OpMul32F(v *Value, config *Config) bool { 5443 b := v.Block 5444 _ = b 5445 // match: (Mul32F (Const32F [c]) (Const32F [d])) 5446 // cond: 5447 // result: (Const32F [f2i(float64(i2f32(c) * i2f32(d)))]) 5448 for { 5449 v_0 := v.Args[0] 5450 if v_0.Op != OpConst32F { 5451 break 5452 } 5453 c := v_0.AuxInt 5454 v_1 := v.Args[1] 5455 if v_1.Op != OpConst32F { 5456 break 5457 } 5458 d := v_1.AuxInt 5459 v.reset(OpConst32F) 5460 v.AuxInt = f2i(float64(i2f32(c) * i2f32(d))) 5461 return true 5462 } 5463 // match: (Mul32F x (Const32F [f2i(1)])) 5464 // cond: 5465 // result: x 5466 for { 5467 x := v.Args[0] 5468 v_1 := v.Args[1] 5469 if v_1.Op != OpConst32F { 5470 break 5471 } 5472 if v_1.AuxInt != f2i(1) { 5473 break 5474 } 5475 v.reset(OpCopy) 5476 v.Type = x.Type 5477 v.AddArg(x) 5478 return true 5479 } 5480 // match: (Mul32F (Const32F [f2i(1)]) x) 5481 // cond: 5482 // result: x 5483 for { 5484 v_0 := v.Args[0] 5485 if v_0.Op != OpConst32F { 5486 break 5487 } 5488 if v_0.AuxInt != f2i(1) { 5489 break 5490 } 5491 x := v.Args[1] 5492 v.reset(OpCopy) 5493 v.Type = x.Type 5494 v.AddArg(x) 5495 return true 5496 } 5497 // match: (Mul32F x (Const32F [f2i(-1)])) 5498 // cond: 5499 // result: (Neg32F x) 5500 for { 5501 x := v.Args[0] 5502 v_1 := v.Args[1] 5503 if v_1.Op != OpConst32F { 5504 break 5505 } 5506 if v_1.AuxInt != f2i(-1) { 5507 break 5508 } 5509 v.reset(OpNeg32F) 5510 v.AddArg(x) 5511 return true 5512 } 5513 // match: (Mul32F (Const32F [f2i(-1)]) x) 5514 // cond: 5515 // result: (Neg32F x) 5516 for { 5517 v_0 := v.Args[0] 5518 if v_0.Op != OpConst32F { 5519 break 5520 } 5521 if v_0.AuxInt != f2i(-1) { 5522 break 5523 } 5524 x := v.Args[1] 5525 v.reset(OpNeg32F) 5526 v.AddArg(x) 5527 return true 5528 } 5529 return false 5530 } 5531 func rewriteValuegeneric_OpMul64(v *Value, config *Config) bool { 5532 b := v.Block 5533 _ = b 5534 // match: (Mul64 (Const64 [c]) (Const64 [d])) 5535 // cond: 5536 // result: (Const64 [c*d]) 5537 for { 5538 v_0 := v.Args[0] 5539 if v_0.Op != OpConst64 { 5540 break 5541 } 5542 c := v_0.AuxInt 5543 v_1 := v.Args[1] 5544 if v_1.Op != OpConst64 { 5545 break 5546 } 5547 d := v_1.AuxInt 5548 v.reset(OpConst64) 5549 v.AuxInt = c * d 5550 return true 5551 } 5552 // match: (Mul64 (Const64 [-1]) x) 5553 // cond: 5554 // result: (Neg64 x) 5555 for { 5556 v_0 := v.Args[0] 5557 if v_0.Op != OpConst64 { 5558 break 5559 } 5560 if v_0.AuxInt != -1 { 5561 break 5562 } 5563 x := v.Args[1] 5564 v.reset(OpNeg64) 5565 v.AddArg(x) 5566 return true 5567 } 5568 // match: (Mul64 x (Const64 <t> [c])) 5569 // cond: x.Op != OpConst64 5570 // result: (Mul64 (Const64 <t> [c]) x) 5571 for { 5572 x := v.Args[0] 5573 v_1 := v.Args[1] 5574 if v_1.Op != OpConst64 { 5575 break 5576 } 5577 t := v_1.Type 5578 c := v_1.AuxInt 5579 if !(x.Op != OpConst64) { 5580 break 5581 } 5582 v.reset(OpMul64) 5583 v0 := b.NewValue0(v.Line, OpConst64, t) 5584 v0.AuxInt = c 5585 v.AddArg(v0) 5586 v.AddArg(x) 5587 return true 5588 } 5589 // match: (Mul64 (Const64 <t> [c]) (Add64 <t> (Const64 <t> [d]) x)) 5590 // cond: 5591 // result: (Add64 (Const64 <t> [c*d]) (Mul64 <t> (Const64 <t> [c]) x)) 5592 for { 5593 v_0 := v.Args[0] 5594 if v_0.Op != OpConst64 { 5595 break 5596 } 5597 t := v_0.Type 5598 c := v_0.AuxInt 5599 v_1 := v.Args[1] 5600 if v_1.Op != OpAdd64 { 5601 break 5602 } 5603 if v_1.Type != t { 5604 break 5605 } 5606 v_1_0 := v_1.Args[0] 5607 if v_1_0.Op != OpConst64 { 5608 break 5609 } 5610 if v_1_0.Type != t { 5611 break 5612 } 5613 d := v_1_0.AuxInt 5614 x := v_1.Args[1] 5615 v.reset(OpAdd64) 5616 v0 := b.NewValue0(v.Line, OpConst64, t) 5617 v0.AuxInt = c * d 5618 v.AddArg(v0) 5619 v1 := b.NewValue0(v.Line, OpMul64, t) 5620 v2 := b.NewValue0(v.Line, OpConst64, t) 5621 v2.AuxInt = c 5622 v1.AddArg(v2) 5623 v1.AddArg(x) 5624 v.AddArg(v1) 5625 return true 5626 } 5627 // match: (Mul64 (Const64 [0]) _) 5628 // cond: 5629 // result: (Const64 [0]) 5630 for { 5631 v_0 := v.Args[0] 5632 if v_0.Op != OpConst64 { 5633 break 5634 } 5635 if v_0.AuxInt != 0 { 5636 break 5637 } 5638 v.reset(OpConst64) 5639 v.AuxInt = 0 5640 return true 5641 } 5642 return false 5643 } 5644 func rewriteValuegeneric_OpMul64F(v *Value, config *Config) bool { 5645 b := v.Block 5646 _ = b 5647 // match: (Mul64F (Const64F [c]) (Const64F [d])) 5648 // cond: 5649 // result: (Const64F [f2i(i2f(c) * i2f(d))]) 5650 for { 5651 v_0 := v.Args[0] 5652 if v_0.Op != OpConst64F { 5653 break 5654 } 5655 c := v_0.AuxInt 5656 v_1 := v.Args[1] 5657 if v_1.Op != OpConst64F { 5658 break 5659 } 5660 d := v_1.AuxInt 5661 v.reset(OpConst64F) 5662 v.AuxInt = f2i(i2f(c) * i2f(d)) 5663 return true 5664 } 5665 // match: (Mul64F x (Const64F [f2i(1)])) 5666 // cond: 5667 // result: x 5668 for { 5669 x := v.Args[0] 5670 v_1 := v.Args[1] 5671 if v_1.Op != OpConst64F { 5672 break 5673 } 5674 if v_1.AuxInt != f2i(1) { 5675 break 5676 } 5677 v.reset(OpCopy) 5678 v.Type = x.Type 5679 v.AddArg(x) 5680 return true 5681 } 5682 // match: (Mul64F (Const64F [f2i(1)]) x) 5683 // cond: 5684 // result: x 5685 for { 5686 v_0 := v.Args[0] 5687 if v_0.Op != OpConst64F { 5688 break 5689 } 5690 if v_0.AuxInt != f2i(1) { 5691 break 5692 } 5693 x := v.Args[1] 5694 v.reset(OpCopy) 5695 v.Type = x.Type 5696 v.AddArg(x) 5697 return true 5698 } 5699 // match: (Mul64F x (Const64F [f2i(-1)])) 5700 // cond: 5701 // result: (Neg64F x) 5702 for { 5703 x := v.Args[0] 5704 v_1 := v.Args[1] 5705 if v_1.Op != OpConst64F { 5706 break 5707 } 5708 if v_1.AuxInt != f2i(-1) { 5709 break 5710 } 5711 v.reset(OpNeg64F) 5712 v.AddArg(x) 5713 return true 5714 } 5715 // match: (Mul64F (Const64F [f2i(-1)]) x) 5716 // cond: 5717 // result: (Neg64F x) 5718 for { 5719 v_0 := v.Args[0] 5720 if v_0.Op != OpConst64F { 5721 break 5722 } 5723 if v_0.AuxInt != f2i(-1) { 5724 break 5725 } 5726 x := v.Args[1] 5727 v.reset(OpNeg64F) 5728 v.AddArg(x) 5729 return true 5730 } 5731 return false 5732 } 5733 func rewriteValuegeneric_OpMul8(v *Value, config *Config) bool { 5734 b := v.Block 5735 _ = b 5736 // match: (Mul8 (Const8 [c]) (Const8 [d])) 5737 // cond: 5738 // result: (Const8 [int64(int8(c*d))]) 5739 for { 5740 v_0 := v.Args[0] 5741 if v_0.Op != OpConst8 { 5742 break 5743 } 5744 c := v_0.AuxInt 5745 v_1 := v.Args[1] 5746 if v_1.Op != OpConst8 { 5747 break 5748 } 5749 d := v_1.AuxInt 5750 v.reset(OpConst8) 5751 v.AuxInt = int64(int8(c * d)) 5752 return true 5753 } 5754 // match: (Mul8 (Const8 [-1]) x) 5755 // cond: 5756 // result: (Neg8 x) 5757 for { 5758 v_0 := v.Args[0] 5759 if v_0.Op != OpConst8 { 5760 break 5761 } 5762 if v_0.AuxInt != -1 { 5763 break 5764 } 5765 x := v.Args[1] 5766 v.reset(OpNeg8) 5767 v.AddArg(x) 5768 return true 5769 } 5770 // match: (Mul8 x (Const8 <t> [c])) 5771 // cond: x.Op != OpConst8 5772 // result: (Mul8 (Const8 <t> [c]) x) 5773 for { 5774 x := v.Args[0] 5775 v_1 := v.Args[1] 5776 if v_1.Op != OpConst8 { 5777 break 5778 } 5779 t := v_1.Type 5780 c := v_1.AuxInt 5781 if !(x.Op != OpConst8) { 5782 break 5783 } 5784 v.reset(OpMul8) 5785 v0 := b.NewValue0(v.Line, OpConst8, t) 5786 v0.AuxInt = c 5787 v.AddArg(v0) 5788 v.AddArg(x) 5789 return true 5790 } 5791 // match: (Mul8 (Const8 [0]) _) 5792 // cond: 5793 // result: (Const8 [0]) 5794 for { 5795 v_0 := v.Args[0] 5796 if v_0.Op != OpConst8 { 5797 break 5798 } 5799 if v_0.AuxInt != 0 { 5800 break 5801 } 5802 v.reset(OpConst8) 5803 v.AuxInt = 0 5804 return true 5805 } 5806 return false 5807 } 5808 func rewriteValuegeneric_OpNeg16(v *Value, config *Config) bool { 5809 b := v.Block 5810 _ = b 5811 // match: (Neg16 (Sub16 x y)) 5812 // cond: 5813 // result: (Sub16 y x) 5814 for { 5815 v_0 := v.Args[0] 5816 if v_0.Op != OpSub16 { 5817 break 5818 } 5819 x := v_0.Args[0] 5820 y := v_0.Args[1] 5821 v.reset(OpSub16) 5822 v.AddArg(y) 5823 v.AddArg(x) 5824 return true 5825 } 5826 return false 5827 } 5828 func rewriteValuegeneric_OpNeg32(v *Value, config *Config) bool { 5829 b := v.Block 5830 _ = b 5831 // match: (Neg32 (Sub32 x y)) 5832 // cond: 5833 // result: (Sub32 y x) 5834 for { 5835 v_0 := v.Args[0] 5836 if v_0.Op != OpSub32 { 5837 break 5838 } 5839 x := v_0.Args[0] 5840 y := v_0.Args[1] 5841 v.reset(OpSub32) 5842 v.AddArg(y) 5843 v.AddArg(x) 5844 return true 5845 } 5846 return false 5847 } 5848 func rewriteValuegeneric_OpNeg64(v *Value, config *Config) bool { 5849 b := v.Block 5850 _ = b 5851 // match: (Neg64 (Sub64 x y)) 5852 // cond: 5853 // result: (Sub64 y x) 5854 for { 5855 v_0 := v.Args[0] 5856 if v_0.Op != OpSub64 { 5857 break 5858 } 5859 x := v_0.Args[0] 5860 y := v_0.Args[1] 5861 v.reset(OpSub64) 5862 v.AddArg(y) 5863 v.AddArg(x) 5864 return true 5865 } 5866 return false 5867 } 5868 func rewriteValuegeneric_OpNeg8(v *Value, config *Config) bool { 5869 b := v.Block 5870 _ = b 5871 // match: (Neg8 (Sub8 x y)) 5872 // cond: 5873 // result: (Sub8 y x) 5874 for { 5875 v_0 := v.Args[0] 5876 if v_0.Op != OpSub8 { 5877 break 5878 } 5879 x := v_0.Args[0] 5880 y := v_0.Args[1] 5881 v.reset(OpSub8) 5882 v.AddArg(y) 5883 v.AddArg(x) 5884 return true 5885 } 5886 return false 5887 } 5888 func rewriteValuegeneric_OpNeq16(v *Value, config *Config) bool { 5889 b := v.Block 5890 _ = b 5891 // match: (Neq16 x x) 5892 // cond: 5893 // result: (ConstBool [0]) 5894 for { 5895 x := v.Args[0] 5896 if x != v.Args[1] { 5897 break 5898 } 5899 v.reset(OpConstBool) 5900 v.AuxInt = 0 5901 return true 5902 } 5903 // match: (Neq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x)) 5904 // cond: 5905 // result: (Neq16 (Const16 <t> [int64(int16(c-d))]) x) 5906 for { 5907 v_0 := v.Args[0] 5908 if v_0.Op != OpConst16 { 5909 break 5910 } 5911 t := v_0.Type 5912 c := v_0.AuxInt 5913 v_1 := v.Args[1] 5914 if v_1.Op != OpAdd16 { 5915 break 5916 } 5917 v_1_0 := v_1.Args[0] 5918 if v_1_0.Op != OpConst16 { 5919 break 5920 } 5921 if v_1_0.Type != t { 5922 break 5923 } 5924 d := v_1_0.AuxInt 5925 x := v_1.Args[1] 5926 v.reset(OpNeq16) 5927 v0 := b.NewValue0(v.Line, OpConst16, t) 5928 v0.AuxInt = int64(int16(c - d)) 5929 v.AddArg(v0) 5930 v.AddArg(x) 5931 return true 5932 } 5933 // match: (Neq16 x (Const16 <t> [c])) 5934 // cond: x.Op != OpConst16 5935 // result: (Neq16 (Const16 <t> [c]) x) 5936 for { 5937 x := v.Args[0] 5938 v_1 := v.Args[1] 5939 if v_1.Op != OpConst16 { 5940 break 5941 } 5942 t := v_1.Type 5943 c := v_1.AuxInt 5944 if !(x.Op != OpConst16) { 5945 break 5946 } 5947 v.reset(OpNeq16) 5948 v0 := b.NewValue0(v.Line, OpConst16, t) 5949 v0.AuxInt = c 5950 v.AddArg(v0) 5951 v.AddArg(x) 5952 return true 5953 } 5954 // match: (Neq16 (Const16 [c]) (Const16 [d])) 5955 // cond: 5956 // result: (ConstBool [b2i(c != d)]) 5957 for { 5958 v_0 := v.Args[0] 5959 if v_0.Op != OpConst16 { 5960 break 5961 } 5962 c := v_0.AuxInt 5963 v_1 := v.Args[1] 5964 if v_1.Op != OpConst16 { 5965 break 5966 } 5967 d := v_1.AuxInt 5968 v.reset(OpConstBool) 5969 v.AuxInt = b2i(c != d) 5970 return true 5971 } 5972 return false 5973 } 5974 func rewriteValuegeneric_OpNeq32(v *Value, config *Config) bool { 5975 b := v.Block 5976 _ = b 5977 // match: (Neq32 x x) 5978 // cond: 5979 // result: (ConstBool [0]) 5980 for { 5981 x := v.Args[0] 5982 if x != v.Args[1] { 5983 break 5984 } 5985 v.reset(OpConstBool) 5986 v.AuxInt = 0 5987 return true 5988 } 5989 // match: (Neq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x)) 5990 // cond: 5991 // result: (Neq32 (Const32 <t> [int64(int32(c-d))]) x) 5992 for { 5993 v_0 := v.Args[0] 5994 if v_0.Op != OpConst32 { 5995 break 5996 } 5997 t := v_0.Type 5998 c := v_0.AuxInt 5999 v_1 := v.Args[1] 6000 if v_1.Op != OpAdd32 { 6001 break 6002 } 6003 v_1_0 := v_1.Args[0] 6004 if v_1_0.Op != OpConst32 { 6005 break 6006 } 6007 if v_1_0.Type != t { 6008 break 6009 } 6010 d := v_1_0.AuxInt 6011 x := v_1.Args[1] 6012 v.reset(OpNeq32) 6013 v0 := b.NewValue0(v.Line, OpConst32, t) 6014 v0.AuxInt = int64(int32(c - d)) 6015 v.AddArg(v0) 6016 v.AddArg(x) 6017 return true 6018 } 6019 // match: (Neq32 x (Const32 <t> [c])) 6020 // cond: x.Op != OpConst32 6021 // result: (Neq32 (Const32 <t> [c]) x) 6022 for { 6023 x := v.Args[0] 6024 v_1 := v.Args[1] 6025 if v_1.Op != OpConst32 { 6026 break 6027 } 6028 t := v_1.Type 6029 c := v_1.AuxInt 6030 if !(x.Op != OpConst32) { 6031 break 6032 } 6033 v.reset(OpNeq32) 6034 v0 := b.NewValue0(v.Line, OpConst32, t) 6035 v0.AuxInt = c 6036 v.AddArg(v0) 6037 v.AddArg(x) 6038 return true 6039 } 6040 // match: (Neq32 (Const32 [c]) (Const32 [d])) 6041 // cond: 6042 // result: (ConstBool [b2i(c != d)]) 6043 for { 6044 v_0 := v.Args[0] 6045 if v_0.Op != OpConst32 { 6046 break 6047 } 6048 c := v_0.AuxInt 6049 v_1 := v.Args[1] 6050 if v_1.Op != OpConst32 { 6051 break 6052 } 6053 d := v_1.AuxInt 6054 v.reset(OpConstBool) 6055 v.AuxInt = b2i(c != d) 6056 return true 6057 } 6058 return false 6059 } 6060 func rewriteValuegeneric_OpNeq64(v *Value, config *Config) bool { 6061 b := v.Block 6062 _ = b 6063 // match: (Neq64 x x) 6064 // cond: 6065 // result: (ConstBool [0]) 6066 for { 6067 x := v.Args[0] 6068 if x != v.Args[1] { 6069 break 6070 } 6071 v.reset(OpConstBool) 6072 v.AuxInt = 0 6073 return true 6074 } 6075 // match: (Neq64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x)) 6076 // cond: 6077 // result: (Neq64 (Const64 <t> [c-d]) x) 6078 for { 6079 v_0 := v.Args[0] 6080 if v_0.Op != OpConst64 { 6081 break 6082 } 6083 t := v_0.Type 6084 c := v_0.AuxInt 6085 v_1 := v.Args[1] 6086 if v_1.Op != OpAdd64 { 6087 break 6088 } 6089 v_1_0 := v_1.Args[0] 6090 if v_1_0.Op != OpConst64 { 6091 break 6092 } 6093 if v_1_0.Type != t { 6094 break 6095 } 6096 d := v_1_0.AuxInt 6097 x := v_1.Args[1] 6098 v.reset(OpNeq64) 6099 v0 := b.NewValue0(v.Line, OpConst64, t) 6100 v0.AuxInt = c - d 6101 v.AddArg(v0) 6102 v.AddArg(x) 6103 return true 6104 } 6105 // match: (Neq64 x (Const64 <t> [c])) 6106 // cond: x.Op != OpConst64 6107 // result: (Neq64 (Const64 <t> [c]) x) 6108 for { 6109 x := v.Args[0] 6110 v_1 := v.Args[1] 6111 if v_1.Op != OpConst64 { 6112 break 6113 } 6114 t := v_1.Type 6115 c := v_1.AuxInt 6116 if !(x.Op != OpConst64) { 6117 break 6118 } 6119 v.reset(OpNeq64) 6120 v0 := b.NewValue0(v.Line, OpConst64, t) 6121 v0.AuxInt = c 6122 v.AddArg(v0) 6123 v.AddArg(x) 6124 return true 6125 } 6126 // match: (Neq64 (Const64 [c]) (Const64 [d])) 6127 // cond: 6128 // result: (ConstBool [b2i(c != d)]) 6129 for { 6130 v_0 := v.Args[0] 6131 if v_0.Op != OpConst64 { 6132 break 6133 } 6134 c := v_0.AuxInt 6135 v_1 := v.Args[1] 6136 if v_1.Op != OpConst64 { 6137 break 6138 } 6139 d := v_1.AuxInt 6140 v.reset(OpConstBool) 6141 v.AuxInt = b2i(c != d) 6142 return true 6143 } 6144 return false 6145 } 6146 func rewriteValuegeneric_OpNeq8(v *Value, config *Config) bool { 6147 b := v.Block 6148 _ = b 6149 // match: (Neq8 x x) 6150 // cond: 6151 // result: (ConstBool [0]) 6152 for { 6153 x := v.Args[0] 6154 if x != v.Args[1] { 6155 break 6156 } 6157 v.reset(OpConstBool) 6158 v.AuxInt = 0 6159 return true 6160 } 6161 // match: (Neq8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x)) 6162 // cond: 6163 // result: (Neq8 (Const8 <t> [int64(int8(c-d))]) x) 6164 for { 6165 v_0 := v.Args[0] 6166 if v_0.Op != OpConst8 { 6167 break 6168 } 6169 t := v_0.Type 6170 c := v_0.AuxInt 6171 v_1 := v.Args[1] 6172 if v_1.Op != OpAdd8 { 6173 break 6174 } 6175 v_1_0 := v_1.Args[0] 6176 if v_1_0.Op != OpConst8 { 6177 break 6178 } 6179 if v_1_0.Type != t { 6180 break 6181 } 6182 d := v_1_0.AuxInt 6183 x := v_1.Args[1] 6184 v.reset(OpNeq8) 6185 v0 := b.NewValue0(v.Line, OpConst8, t) 6186 v0.AuxInt = int64(int8(c - d)) 6187 v.AddArg(v0) 6188 v.AddArg(x) 6189 return true 6190 } 6191 // match: (Neq8 x (Const8 <t> [c])) 6192 // cond: x.Op != OpConst8 6193 // result: (Neq8 (Const8 <t> [c]) x) 6194 for { 6195 x := v.Args[0] 6196 v_1 := v.Args[1] 6197 if v_1.Op != OpConst8 { 6198 break 6199 } 6200 t := v_1.Type 6201 c := v_1.AuxInt 6202 if !(x.Op != OpConst8) { 6203 break 6204 } 6205 v.reset(OpNeq8) 6206 v0 := b.NewValue0(v.Line, OpConst8, t) 6207 v0.AuxInt = c 6208 v.AddArg(v0) 6209 v.AddArg(x) 6210 return true 6211 } 6212 // match: (Neq8 (Const8 [c]) (Const8 [d])) 6213 // cond: 6214 // result: (ConstBool [b2i(c != d)]) 6215 for { 6216 v_0 := v.Args[0] 6217 if v_0.Op != OpConst8 { 6218 break 6219 } 6220 c := v_0.AuxInt 6221 v_1 := v.Args[1] 6222 if v_1.Op != OpConst8 { 6223 break 6224 } 6225 d := v_1.AuxInt 6226 v.reset(OpConstBool) 6227 v.AuxInt = b2i(c != d) 6228 return true 6229 } 6230 return false 6231 } 6232 func rewriteValuegeneric_OpNeqB(v *Value, config *Config) bool { 6233 b := v.Block 6234 _ = b 6235 // match: (NeqB (ConstBool [c]) (ConstBool [d])) 6236 // cond: 6237 // result: (ConstBool [b2i(c != d)]) 6238 for { 6239 v_0 := v.Args[0] 6240 if v_0.Op != OpConstBool { 6241 break 6242 } 6243 c := v_0.AuxInt 6244 v_1 := v.Args[1] 6245 if v_1.Op != OpConstBool { 6246 break 6247 } 6248 d := v_1.AuxInt 6249 v.reset(OpConstBool) 6250 v.AuxInt = b2i(c != d) 6251 return true 6252 } 6253 // match: (NeqB (ConstBool [0]) x) 6254 // cond: 6255 // result: x 6256 for { 6257 v_0 := v.Args[0] 6258 if v_0.Op != OpConstBool { 6259 break 6260 } 6261 if v_0.AuxInt != 0 { 6262 break 6263 } 6264 x := v.Args[1] 6265 v.reset(OpCopy) 6266 v.Type = x.Type 6267 v.AddArg(x) 6268 return true 6269 } 6270 // match: (NeqB (ConstBool [1]) x) 6271 // cond: 6272 // result: (Not x) 6273 for { 6274 v_0 := v.Args[0] 6275 if v_0.Op != OpConstBool { 6276 break 6277 } 6278 if v_0.AuxInt != 1 { 6279 break 6280 } 6281 x := v.Args[1] 6282 v.reset(OpNot) 6283 v.AddArg(x) 6284 return true 6285 } 6286 return false 6287 } 6288 func rewriteValuegeneric_OpNeqInter(v *Value, config *Config) bool { 6289 b := v.Block 6290 _ = b 6291 // match: (NeqInter x y) 6292 // cond: 6293 // result: (NeqPtr (ITab x) (ITab y)) 6294 for { 6295 x := v.Args[0] 6296 y := v.Args[1] 6297 v.reset(OpNeqPtr) 6298 v0 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 6299 v0.AddArg(x) 6300 v.AddArg(v0) 6301 v1 := b.NewValue0(v.Line, OpITab, config.fe.TypeBytePtr()) 6302 v1.AddArg(y) 6303 v.AddArg(v1) 6304 return true 6305 } 6306 } 6307 func rewriteValuegeneric_OpNeqPtr(v *Value, config *Config) bool { 6308 b := v.Block 6309 _ = b 6310 // match: (NeqPtr p (ConstNil)) 6311 // cond: 6312 // result: (IsNonNil p) 6313 for { 6314 p := v.Args[0] 6315 v_1 := v.Args[1] 6316 if v_1.Op != OpConstNil { 6317 break 6318 } 6319 v.reset(OpIsNonNil) 6320 v.AddArg(p) 6321 return true 6322 } 6323 // match: (NeqPtr (ConstNil) p) 6324 // cond: 6325 // result: (IsNonNil p) 6326 for { 6327 v_0 := v.Args[0] 6328 if v_0.Op != OpConstNil { 6329 break 6330 } 6331 p := v.Args[1] 6332 v.reset(OpIsNonNil) 6333 v.AddArg(p) 6334 return true 6335 } 6336 return false 6337 } 6338 func rewriteValuegeneric_OpNeqSlice(v *Value, config *Config) bool { 6339 b := v.Block 6340 _ = b 6341 // match: (NeqSlice x y) 6342 // cond: 6343 // result: (NeqPtr (SlicePtr x) (SlicePtr y)) 6344 for { 6345 x := v.Args[0] 6346 y := v.Args[1] 6347 v.reset(OpNeqPtr) 6348 v0 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6349 v0.AddArg(x) 6350 v.AddArg(v0) 6351 v1 := b.NewValue0(v.Line, OpSlicePtr, config.fe.TypeBytePtr()) 6352 v1.AddArg(y) 6353 v.AddArg(v1) 6354 return true 6355 } 6356 } 6357 func rewriteValuegeneric_OpOffPtr(v *Value, config *Config) bool { 6358 b := v.Block 6359 _ = b 6360 // match: (OffPtr (OffPtr p [b]) [a]) 6361 // cond: 6362 // result: (OffPtr p [a+b]) 6363 for { 6364 a := v.AuxInt 6365 v_0 := v.Args[0] 6366 if v_0.Op != OpOffPtr { 6367 break 6368 } 6369 b := v_0.AuxInt 6370 p := v_0.Args[0] 6371 v.reset(OpOffPtr) 6372 v.AuxInt = a + b 6373 v.AddArg(p) 6374 return true 6375 } 6376 // match: (OffPtr p [0]) 6377 // cond: v.Type.Compare(p.Type) == CMPeq 6378 // result: p 6379 for { 6380 if v.AuxInt != 0 { 6381 break 6382 } 6383 p := v.Args[0] 6384 if !(v.Type.Compare(p.Type) == CMPeq) { 6385 break 6386 } 6387 v.reset(OpCopy) 6388 v.Type = p.Type 6389 v.AddArg(p) 6390 return true 6391 } 6392 return false 6393 } 6394 func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { 6395 b := v.Block 6396 _ = b 6397 // match: (Or16 x (Const16 <t> [c])) 6398 // cond: x.Op != OpConst16 6399 // result: (Or16 (Const16 <t> [c]) x) 6400 for { 6401 x := v.Args[0] 6402 v_1 := v.Args[1] 6403 if v_1.Op != OpConst16 { 6404 break 6405 } 6406 t := v_1.Type 6407 c := v_1.AuxInt 6408 if !(x.Op != OpConst16) { 6409 break 6410 } 6411 v.reset(OpOr16) 6412 v0 := b.NewValue0(v.Line, OpConst16, t) 6413 v0.AuxInt = c 6414 v.AddArg(v0) 6415 v.AddArg(x) 6416 return true 6417 } 6418 // match: (Or16 x x) 6419 // cond: 6420 // result: x 6421 for { 6422 x := v.Args[0] 6423 if x != v.Args[1] { 6424 break 6425 } 6426 v.reset(OpCopy) 6427 v.Type = x.Type 6428 v.AddArg(x) 6429 return true 6430 } 6431 // match: (Or16 (Const16 [0]) x) 6432 // cond: 6433 // result: x 6434 for { 6435 v_0 := v.Args[0] 6436 if v_0.Op != OpConst16 { 6437 break 6438 } 6439 if v_0.AuxInt != 0 { 6440 break 6441 } 6442 x := v.Args[1] 6443 v.reset(OpCopy) 6444 v.Type = x.Type 6445 v.AddArg(x) 6446 return true 6447 } 6448 // match: (Or16 (Const16 [-1]) _) 6449 // cond: 6450 // result: (Const16 [-1]) 6451 for { 6452 v_0 := v.Args[0] 6453 if v_0.Op != OpConst16 { 6454 break 6455 } 6456 if v_0.AuxInt != -1 { 6457 break 6458 } 6459 v.reset(OpConst16) 6460 v.AuxInt = -1 6461 return true 6462 } 6463 // match: (Or16 x (Or16 x y)) 6464 // cond: 6465 // result: (Or16 x y) 6466 for { 6467 x := v.Args[0] 6468 v_1 := v.Args[1] 6469 if v_1.Op != OpOr16 { 6470 break 6471 } 6472 if x != v_1.Args[0] { 6473 break 6474 } 6475 y := v_1.Args[1] 6476 v.reset(OpOr16) 6477 v.AddArg(x) 6478 v.AddArg(y) 6479 return true 6480 } 6481 // match: (Or16 x (Or16 y x)) 6482 // cond: 6483 // result: (Or16 x y) 6484 for { 6485 x := v.Args[0] 6486 v_1 := v.Args[1] 6487 if v_1.Op != OpOr16 { 6488 break 6489 } 6490 y := v_1.Args[0] 6491 if x != v_1.Args[1] { 6492 break 6493 } 6494 v.reset(OpOr16) 6495 v.AddArg(x) 6496 v.AddArg(y) 6497 return true 6498 } 6499 // match: (Or16 (Or16 x y) x) 6500 // cond: 6501 // result: (Or16 x y) 6502 for { 6503 v_0 := v.Args[0] 6504 if v_0.Op != OpOr16 { 6505 break 6506 } 6507 x := v_0.Args[0] 6508 y := v_0.Args[1] 6509 if x != v.Args[1] { 6510 break 6511 } 6512 v.reset(OpOr16) 6513 v.AddArg(x) 6514 v.AddArg(y) 6515 return true 6516 } 6517 // match: (Or16 (Or16 x y) y) 6518 // cond: 6519 // result: (Or16 x y) 6520 for { 6521 v_0 := v.Args[0] 6522 if v_0.Op != OpOr16 { 6523 break 6524 } 6525 x := v_0.Args[0] 6526 y := v_0.Args[1] 6527 if y != v.Args[1] { 6528 break 6529 } 6530 v.reset(OpOr16) 6531 v.AddArg(x) 6532 v.AddArg(y) 6533 return true 6534 } 6535 return false 6536 } 6537 func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { 6538 b := v.Block 6539 _ = b 6540 // match: (Or32 x (Const32 <t> [c])) 6541 // cond: x.Op != OpConst32 6542 // result: (Or32 (Const32 <t> [c]) x) 6543 for { 6544 x := v.Args[0] 6545 v_1 := v.Args[1] 6546 if v_1.Op != OpConst32 { 6547 break 6548 } 6549 t := v_1.Type 6550 c := v_1.AuxInt 6551 if !(x.Op != OpConst32) { 6552 break 6553 } 6554 v.reset(OpOr32) 6555 v0 := b.NewValue0(v.Line, OpConst32, t) 6556 v0.AuxInt = c 6557 v.AddArg(v0) 6558 v.AddArg(x) 6559 return true 6560 } 6561 // match: (Or32 x x) 6562 // cond: 6563 // result: x 6564 for { 6565 x := v.Args[0] 6566 if x != v.Args[1] { 6567 break 6568 } 6569 v.reset(OpCopy) 6570 v.Type = x.Type 6571 v.AddArg(x) 6572 return true 6573 } 6574 // match: (Or32 (Const32 [0]) x) 6575 // cond: 6576 // result: x 6577 for { 6578 v_0 := v.Args[0] 6579 if v_0.Op != OpConst32 { 6580 break 6581 } 6582 if v_0.AuxInt != 0 { 6583 break 6584 } 6585 x := v.Args[1] 6586 v.reset(OpCopy) 6587 v.Type = x.Type 6588 v.AddArg(x) 6589 return true 6590 } 6591 // match: (Or32 (Const32 [-1]) _) 6592 // cond: 6593 // result: (Const32 [-1]) 6594 for { 6595 v_0 := v.Args[0] 6596 if v_0.Op != OpConst32 { 6597 break 6598 } 6599 if v_0.AuxInt != -1 { 6600 break 6601 } 6602 v.reset(OpConst32) 6603 v.AuxInt = -1 6604 return true 6605 } 6606 // match: (Or32 x (Or32 x y)) 6607 // cond: 6608 // result: (Or32 x y) 6609 for { 6610 x := v.Args[0] 6611 v_1 := v.Args[1] 6612 if v_1.Op != OpOr32 { 6613 break 6614 } 6615 if x != v_1.Args[0] { 6616 break 6617 } 6618 y := v_1.Args[1] 6619 v.reset(OpOr32) 6620 v.AddArg(x) 6621 v.AddArg(y) 6622 return true 6623 } 6624 // match: (Or32 x (Or32 y x)) 6625 // cond: 6626 // result: (Or32 x y) 6627 for { 6628 x := v.Args[0] 6629 v_1 := v.Args[1] 6630 if v_1.Op != OpOr32 { 6631 break 6632 } 6633 y := v_1.Args[0] 6634 if x != v_1.Args[1] { 6635 break 6636 } 6637 v.reset(OpOr32) 6638 v.AddArg(x) 6639 v.AddArg(y) 6640 return true 6641 } 6642 // match: (Or32 (Or32 x y) x) 6643 // cond: 6644 // result: (Or32 x y) 6645 for { 6646 v_0 := v.Args[0] 6647 if v_0.Op != OpOr32 { 6648 break 6649 } 6650 x := v_0.Args[0] 6651 y := v_0.Args[1] 6652 if x != v.Args[1] { 6653 break 6654 } 6655 v.reset(OpOr32) 6656 v.AddArg(x) 6657 v.AddArg(y) 6658 return true 6659 } 6660 // match: (Or32 (Or32 x y) y) 6661 // cond: 6662 // result: (Or32 x y) 6663 for { 6664 v_0 := v.Args[0] 6665 if v_0.Op != OpOr32 { 6666 break 6667 } 6668 x := v_0.Args[0] 6669 y := v_0.Args[1] 6670 if y != v.Args[1] { 6671 break 6672 } 6673 v.reset(OpOr32) 6674 v.AddArg(x) 6675 v.AddArg(y) 6676 return true 6677 } 6678 return false 6679 } 6680 func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { 6681 b := v.Block 6682 _ = b 6683 // match: (Or64 x (Const64 <t> [c])) 6684 // cond: x.Op != OpConst64 6685 // result: (Or64 (Const64 <t> [c]) x) 6686 for { 6687 x := v.Args[0] 6688 v_1 := v.Args[1] 6689 if v_1.Op != OpConst64 { 6690 break 6691 } 6692 t := v_1.Type 6693 c := v_1.AuxInt 6694 if !(x.Op != OpConst64) { 6695 break 6696 } 6697 v.reset(OpOr64) 6698 v0 := b.NewValue0(v.Line, OpConst64, t) 6699 v0.AuxInt = c 6700 v.AddArg(v0) 6701 v.AddArg(x) 6702 return true 6703 } 6704 // match: (Or64 x x) 6705 // cond: 6706 // result: x 6707 for { 6708 x := v.Args[0] 6709 if x != v.Args[1] { 6710 break 6711 } 6712 v.reset(OpCopy) 6713 v.Type = x.Type 6714 v.AddArg(x) 6715 return true 6716 } 6717 // match: (Or64 (Const64 [0]) x) 6718 // cond: 6719 // result: x 6720 for { 6721 v_0 := v.Args[0] 6722 if v_0.Op != OpConst64 { 6723 break 6724 } 6725 if v_0.AuxInt != 0 { 6726 break 6727 } 6728 x := v.Args[1] 6729 v.reset(OpCopy) 6730 v.Type = x.Type 6731 v.AddArg(x) 6732 return true 6733 } 6734 // match: (Or64 (Const64 [-1]) _) 6735 // cond: 6736 // result: (Const64 [-1]) 6737 for { 6738 v_0 := v.Args[0] 6739 if v_0.Op != OpConst64 { 6740 break 6741 } 6742 if v_0.AuxInt != -1 { 6743 break 6744 } 6745 v.reset(OpConst64) 6746 v.AuxInt = -1 6747 return true 6748 } 6749 // match: (Or64 x (Or64 x y)) 6750 // cond: 6751 // result: (Or64 x y) 6752 for { 6753 x := v.Args[0] 6754 v_1 := v.Args[1] 6755 if v_1.Op != OpOr64 { 6756 break 6757 } 6758 if x != v_1.Args[0] { 6759 break 6760 } 6761 y := v_1.Args[1] 6762 v.reset(OpOr64) 6763 v.AddArg(x) 6764 v.AddArg(y) 6765 return true 6766 } 6767 // match: (Or64 x (Or64 y x)) 6768 // cond: 6769 // result: (Or64 x y) 6770 for { 6771 x := v.Args[0] 6772 v_1 := v.Args[1] 6773 if v_1.Op != OpOr64 { 6774 break 6775 } 6776 y := v_1.Args[0] 6777 if x != v_1.Args[1] { 6778 break 6779 } 6780 v.reset(OpOr64) 6781 v.AddArg(x) 6782 v.AddArg(y) 6783 return true 6784 } 6785 // match: (Or64 (Or64 x y) x) 6786 // cond: 6787 // result: (Or64 x y) 6788 for { 6789 v_0 := v.Args[0] 6790 if v_0.Op != OpOr64 { 6791 break 6792 } 6793 x := v_0.Args[0] 6794 y := v_0.Args[1] 6795 if x != v.Args[1] { 6796 break 6797 } 6798 v.reset(OpOr64) 6799 v.AddArg(x) 6800 v.AddArg(y) 6801 return true 6802 } 6803 // match: (Or64 (Or64 x y) y) 6804 // cond: 6805 // result: (Or64 x y) 6806 for { 6807 v_0 := v.Args[0] 6808 if v_0.Op != OpOr64 { 6809 break 6810 } 6811 x := v_0.Args[0] 6812 y := v_0.Args[1] 6813 if y != v.Args[1] { 6814 break 6815 } 6816 v.reset(OpOr64) 6817 v.AddArg(x) 6818 v.AddArg(y) 6819 return true 6820 } 6821 return false 6822 } 6823 func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { 6824 b := v.Block 6825 _ = b 6826 // match: (Or8 x (Const8 <t> [c])) 6827 // cond: x.Op != OpConst8 6828 // result: (Or8 (Const8 <t> [c]) x) 6829 for { 6830 x := v.Args[0] 6831 v_1 := v.Args[1] 6832 if v_1.Op != OpConst8 { 6833 break 6834 } 6835 t := v_1.Type 6836 c := v_1.AuxInt 6837 if !(x.Op != OpConst8) { 6838 break 6839 } 6840 v.reset(OpOr8) 6841 v0 := b.NewValue0(v.Line, OpConst8, t) 6842 v0.AuxInt = c 6843 v.AddArg(v0) 6844 v.AddArg(x) 6845 return true 6846 } 6847 // match: (Or8 x x) 6848 // cond: 6849 // result: x 6850 for { 6851 x := v.Args[0] 6852 if x != v.Args[1] { 6853 break 6854 } 6855 v.reset(OpCopy) 6856 v.Type = x.Type 6857 v.AddArg(x) 6858 return true 6859 } 6860 // match: (Or8 (Const8 [0]) x) 6861 // cond: 6862 // result: x 6863 for { 6864 v_0 := v.Args[0] 6865 if v_0.Op != OpConst8 { 6866 break 6867 } 6868 if v_0.AuxInt != 0 { 6869 break 6870 } 6871 x := v.Args[1] 6872 v.reset(OpCopy) 6873 v.Type = x.Type 6874 v.AddArg(x) 6875 return true 6876 } 6877 // match: (Or8 (Const8 [-1]) _) 6878 // cond: 6879 // result: (Const8 [-1]) 6880 for { 6881 v_0 := v.Args[0] 6882 if v_0.Op != OpConst8 { 6883 break 6884 } 6885 if v_0.AuxInt != -1 { 6886 break 6887 } 6888 v.reset(OpConst8) 6889 v.AuxInt = -1 6890 return true 6891 } 6892 // match: (Or8 x (Or8 x y)) 6893 // cond: 6894 // result: (Or8 x y) 6895 for { 6896 x := v.Args[0] 6897 v_1 := v.Args[1] 6898 if v_1.Op != OpOr8 { 6899 break 6900 } 6901 if x != v_1.Args[0] { 6902 break 6903 } 6904 y := v_1.Args[1] 6905 v.reset(OpOr8) 6906 v.AddArg(x) 6907 v.AddArg(y) 6908 return true 6909 } 6910 // match: (Or8 x (Or8 y x)) 6911 // cond: 6912 // result: (Or8 x y) 6913 for { 6914 x := v.Args[0] 6915 v_1 := v.Args[1] 6916 if v_1.Op != OpOr8 { 6917 break 6918 } 6919 y := v_1.Args[0] 6920 if x != v_1.Args[1] { 6921 break 6922 } 6923 v.reset(OpOr8) 6924 v.AddArg(x) 6925 v.AddArg(y) 6926 return true 6927 } 6928 // match: (Or8 (Or8 x y) x) 6929 // cond: 6930 // result: (Or8 x y) 6931 for { 6932 v_0 := v.Args[0] 6933 if v_0.Op != OpOr8 { 6934 break 6935 } 6936 x := v_0.Args[0] 6937 y := v_0.Args[1] 6938 if x != v.Args[1] { 6939 break 6940 } 6941 v.reset(OpOr8) 6942 v.AddArg(x) 6943 v.AddArg(y) 6944 return true 6945 } 6946 // match: (Or8 (Or8 x y) y) 6947 // cond: 6948 // result: (Or8 x y) 6949 for { 6950 v_0 := v.Args[0] 6951 if v_0.Op != OpOr8 { 6952 break 6953 } 6954 x := v_0.Args[0] 6955 y := v_0.Args[1] 6956 if y != v.Args[1] { 6957 break 6958 } 6959 v.reset(OpOr8) 6960 v.AddArg(x) 6961 v.AddArg(y) 6962 return true 6963 } 6964 return false 6965 } 6966 func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { 6967 b := v.Block 6968 _ = b 6969 // match: (Phi (Const8 [c]) (Const8 [c])) 6970 // cond: 6971 // result: (Const8 [c]) 6972 for { 6973 v_0 := v.Args[0] 6974 if v_0.Op != OpConst8 { 6975 break 6976 } 6977 c := v_0.AuxInt 6978 v_1 := v.Args[1] 6979 if v_1.Op != OpConst8 { 6980 break 6981 } 6982 if v_1.AuxInt != c { 6983 break 6984 } 6985 if len(v.Args) != 2 { 6986 break 6987 } 6988 v.reset(OpConst8) 6989 v.AuxInt = c 6990 return true 6991 } 6992 // match: (Phi (Const16 [c]) (Const16 [c])) 6993 // cond: 6994 // result: (Const16 [c]) 6995 for { 6996 v_0 := v.Args[0] 6997 if v_0.Op != OpConst16 { 6998 break 6999 } 7000 c := v_0.AuxInt 7001 v_1 := v.Args[1] 7002 if v_1.Op != OpConst16 { 7003 break 7004 } 7005 if v_1.AuxInt != c { 7006 break 7007 } 7008 if len(v.Args) != 2 { 7009 break 7010 } 7011 v.reset(OpConst16) 7012 v.AuxInt = c 7013 return true 7014 } 7015 // match: (Phi (Const32 [c]) (Const32 [c])) 7016 // cond: 7017 // result: (Const32 [c]) 7018 for { 7019 v_0 := v.Args[0] 7020 if v_0.Op != OpConst32 { 7021 break 7022 } 7023 c := v_0.AuxInt 7024 v_1 := v.Args[1] 7025 if v_1.Op != OpConst32 { 7026 break 7027 } 7028 if v_1.AuxInt != c { 7029 break 7030 } 7031 if len(v.Args) != 2 { 7032 break 7033 } 7034 v.reset(OpConst32) 7035 v.AuxInt = c 7036 return true 7037 } 7038 // match: (Phi (Const64 [c]) (Const64 [c])) 7039 // cond: 7040 // result: (Const64 [c]) 7041 for { 7042 v_0 := v.Args[0] 7043 if v_0.Op != OpConst64 { 7044 break 7045 } 7046 c := v_0.AuxInt 7047 v_1 := v.Args[1] 7048 if v_1.Op != OpConst64 { 7049 break 7050 } 7051 if v_1.AuxInt != c { 7052 break 7053 } 7054 if len(v.Args) != 2 { 7055 break 7056 } 7057 v.reset(OpConst64) 7058 v.AuxInt = c 7059 return true 7060 } 7061 return false 7062 } 7063 func rewriteValuegeneric_OpPtrIndex(v *Value, config *Config) bool { 7064 b := v.Block 7065 _ = b 7066 // match: (PtrIndex <t> ptr idx) 7067 // cond: config.PtrSize == 4 7068 // result: (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.ElemType().Size()]))) 7069 for { 7070 t := v.Type 7071 ptr := v.Args[0] 7072 idx := v.Args[1] 7073 if !(config.PtrSize == 4) { 7074 break 7075 } 7076 v.reset(OpAddPtr) 7077 v.AddArg(ptr) 7078 v0 := b.NewValue0(v.Line, OpMul32, config.fe.TypeInt()) 7079 v0.AddArg(idx) 7080 v1 := b.NewValue0(v.Line, OpConst32, config.fe.TypeInt()) 7081 v1.AuxInt = t.ElemType().Size() 7082 v0.AddArg(v1) 7083 v.AddArg(v0) 7084 return true 7085 } 7086 // match: (PtrIndex <t> ptr idx) 7087 // cond: config.PtrSize == 8 7088 // result: (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.ElemType().Size()]))) 7089 for { 7090 t := v.Type 7091 ptr := v.Args[0] 7092 idx := v.Args[1] 7093 if !(config.PtrSize == 8) { 7094 break 7095 } 7096 v.reset(OpAddPtr) 7097 v.AddArg(ptr) 7098 v0 := b.NewValue0(v.Line, OpMul64, config.fe.TypeInt()) 7099 v0.AddArg(idx) 7100 v1 := b.NewValue0(v.Line, OpConst64, config.fe.TypeInt()) 7101 v1.AuxInt = t.ElemType().Size() 7102 v0.AddArg(v1) 7103 v.AddArg(v0) 7104 return true 7105 } 7106 return false 7107 } 7108 func rewriteValuegeneric_OpRsh16Ux16(v *Value, config *Config) bool { 7109 b := v.Block 7110 _ = b 7111 // match: (Rsh16Ux16 <t> x (Const16 [c])) 7112 // cond: 7113 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint16(c))])) 7114 for { 7115 t := v.Type 7116 x := v.Args[0] 7117 v_1 := v.Args[1] 7118 if v_1.Op != OpConst16 { 7119 break 7120 } 7121 c := v_1.AuxInt 7122 v.reset(OpRsh16Ux64) 7123 v.AddArg(x) 7124 v0 := b.NewValue0(v.Line, OpConst64, t) 7125 v0.AuxInt = int64(uint16(c)) 7126 v.AddArg(v0) 7127 return true 7128 } 7129 // match: (Rsh16Ux16 (Const16 [0]) _) 7130 // cond: 7131 // result: (Const16 [0]) 7132 for { 7133 v_0 := v.Args[0] 7134 if v_0.Op != OpConst16 { 7135 break 7136 } 7137 if v_0.AuxInt != 0 { 7138 break 7139 } 7140 v.reset(OpConst16) 7141 v.AuxInt = 0 7142 return true 7143 } 7144 return false 7145 } 7146 func rewriteValuegeneric_OpRsh16Ux32(v *Value, config *Config) bool { 7147 b := v.Block 7148 _ = b 7149 // match: (Rsh16Ux32 <t> x (Const32 [c])) 7150 // cond: 7151 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint32(c))])) 7152 for { 7153 t := v.Type 7154 x := v.Args[0] 7155 v_1 := v.Args[1] 7156 if v_1.Op != OpConst32 { 7157 break 7158 } 7159 c := v_1.AuxInt 7160 v.reset(OpRsh16Ux64) 7161 v.AddArg(x) 7162 v0 := b.NewValue0(v.Line, OpConst64, t) 7163 v0.AuxInt = int64(uint32(c)) 7164 v.AddArg(v0) 7165 return true 7166 } 7167 // match: (Rsh16Ux32 (Const16 [0]) _) 7168 // cond: 7169 // result: (Const16 [0]) 7170 for { 7171 v_0 := v.Args[0] 7172 if v_0.Op != OpConst16 { 7173 break 7174 } 7175 if v_0.AuxInt != 0 { 7176 break 7177 } 7178 v.reset(OpConst16) 7179 v.AuxInt = 0 7180 return true 7181 } 7182 return false 7183 } 7184 func rewriteValuegeneric_OpRsh16Ux64(v *Value, config *Config) bool { 7185 b := v.Block 7186 _ = b 7187 // match: (Rsh16Ux64 (Const16 [c]) (Const64 [d])) 7188 // cond: 7189 // result: (Const16 [int64(int16(uint16(c) >> uint64(d)))]) 7190 for { 7191 v_0 := v.Args[0] 7192 if v_0.Op != OpConst16 { 7193 break 7194 } 7195 c := v_0.AuxInt 7196 v_1 := v.Args[1] 7197 if v_1.Op != OpConst64 { 7198 break 7199 } 7200 d := v_1.AuxInt 7201 v.reset(OpConst16) 7202 v.AuxInt = int64(int16(uint16(c) >> uint64(d))) 7203 return true 7204 } 7205 // match: (Rsh16Ux64 x (Const64 [0])) 7206 // cond: 7207 // result: x 7208 for { 7209 x := v.Args[0] 7210 v_1 := v.Args[1] 7211 if v_1.Op != OpConst64 { 7212 break 7213 } 7214 if v_1.AuxInt != 0 { 7215 break 7216 } 7217 v.reset(OpCopy) 7218 v.Type = x.Type 7219 v.AddArg(x) 7220 return true 7221 } 7222 // match: (Rsh16Ux64 (Const16 [0]) _) 7223 // cond: 7224 // result: (Const16 [0]) 7225 for { 7226 v_0 := v.Args[0] 7227 if v_0.Op != OpConst16 { 7228 break 7229 } 7230 if v_0.AuxInt != 0 { 7231 break 7232 } 7233 v.reset(OpConst16) 7234 v.AuxInt = 0 7235 return true 7236 } 7237 // match: (Rsh16Ux64 _ (Const64 [c])) 7238 // cond: uint64(c) >= 16 7239 // result: (Const16 [0]) 7240 for { 7241 v_1 := v.Args[1] 7242 if v_1.Op != OpConst64 { 7243 break 7244 } 7245 c := v_1.AuxInt 7246 if !(uint64(c) >= 16) { 7247 break 7248 } 7249 v.reset(OpConst16) 7250 v.AuxInt = 0 7251 return true 7252 } 7253 // match: (Rsh16Ux64 <t> (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) 7254 // cond: !uaddOvf(c,d) 7255 // result: (Rsh16Ux64 x (Const64 <t> [c+d])) 7256 for { 7257 t := v.Type 7258 v_0 := v.Args[0] 7259 if v_0.Op != OpRsh16Ux64 { 7260 break 7261 } 7262 x := v_0.Args[0] 7263 v_0_1 := v_0.Args[1] 7264 if v_0_1.Op != OpConst64 { 7265 break 7266 } 7267 c := v_0_1.AuxInt 7268 v_1 := v.Args[1] 7269 if v_1.Op != OpConst64 { 7270 break 7271 } 7272 d := v_1.AuxInt 7273 if !(!uaddOvf(c, d)) { 7274 break 7275 } 7276 v.reset(OpRsh16Ux64) 7277 v.AddArg(x) 7278 v0 := b.NewValue0(v.Line, OpConst64, t) 7279 v0.AuxInt = c + d 7280 v.AddArg(v0) 7281 return true 7282 } 7283 // match: (Rsh16Ux64 (Lsh16x64 (Rsh16Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 7284 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 7285 // result: (Rsh16Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 7286 for { 7287 v_0 := v.Args[0] 7288 if v_0.Op != OpLsh16x64 { 7289 break 7290 } 7291 v_0_0 := v_0.Args[0] 7292 if v_0_0.Op != OpRsh16Ux64 { 7293 break 7294 } 7295 x := v_0_0.Args[0] 7296 v_0_0_1 := v_0_0.Args[1] 7297 if v_0_0_1.Op != OpConst64 { 7298 break 7299 } 7300 c1 := v_0_0_1.AuxInt 7301 v_0_1 := v_0.Args[1] 7302 if v_0_1.Op != OpConst64 { 7303 break 7304 } 7305 c2 := v_0_1.AuxInt 7306 v_1 := v.Args[1] 7307 if v_1.Op != OpConst64 { 7308 break 7309 } 7310 c3 := v_1.AuxInt 7311 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 7312 break 7313 } 7314 v.reset(OpRsh16Ux64) 7315 v.AddArg(x) 7316 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 7317 v0.AuxInt = c1 - c2 + c3 7318 v.AddArg(v0) 7319 return true 7320 } 7321 return false 7322 } 7323 func rewriteValuegeneric_OpRsh16Ux8(v *Value, config *Config) bool { 7324 b := v.Block 7325 _ = b 7326 // match: (Rsh16Ux8 <t> x (Const8 [c])) 7327 // cond: 7328 // result: (Rsh16Ux64 x (Const64 <t> [int64(uint8(c))])) 7329 for { 7330 t := v.Type 7331 x := v.Args[0] 7332 v_1 := v.Args[1] 7333 if v_1.Op != OpConst8 { 7334 break 7335 } 7336 c := v_1.AuxInt 7337 v.reset(OpRsh16Ux64) 7338 v.AddArg(x) 7339 v0 := b.NewValue0(v.Line, OpConst64, t) 7340 v0.AuxInt = int64(uint8(c)) 7341 v.AddArg(v0) 7342 return true 7343 } 7344 // match: (Rsh16Ux8 (Const16 [0]) _) 7345 // cond: 7346 // result: (Const16 [0]) 7347 for { 7348 v_0 := v.Args[0] 7349 if v_0.Op != OpConst16 { 7350 break 7351 } 7352 if v_0.AuxInt != 0 { 7353 break 7354 } 7355 v.reset(OpConst16) 7356 v.AuxInt = 0 7357 return true 7358 } 7359 return false 7360 } 7361 func rewriteValuegeneric_OpRsh16x16(v *Value, config *Config) bool { 7362 b := v.Block 7363 _ = b 7364 // match: (Rsh16x16 <t> x (Const16 [c])) 7365 // cond: 7366 // result: (Rsh16x64 x (Const64 <t> [int64(uint16(c))])) 7367 for { 7368 t := v.Type 7369 x := v.Args[0] 7370 v_1 := v.Args[1] 7371 if v_1.Op != OpConst16 { 7372 break 7373 } 7374 c := v_1.AuxInt 7375 v.reset(OpRsh16x64) 7376 v.AddArg(x) 7377 v0 := b.NewValue0(v.Line, OpConst64, t) 7378 v0.AuxInt = int64(uint16(c)) 7379 v.AddArg(v0) 7380 return true 7381 } 7382 // match: (Rsh16x16 (Const16 [0]) _) 7383 // cond: 7384 // result: (Const16 [0]) 7385 for { 7386 v_0 := v.Args[0] 7387 if v_0.Op != OpConst16 { 7388 break 7389 } 7390 if v_0.AuxInt != 0 { 7391 break 7392 } 7393 v.reset(OpConst16) 7394 v.AuxInt = 0 7395 return true 7396 } 7397 return false 7398 } 7399 func rewriteValuegeneric_OpRsh16x32(v *Value, config *Config) bool { 7400 b := v.Block 7401 _ = b 7402 // match: (Rsh16x32 <t> x (Const32 [c])) 7403 // cond: 7404 // result: (Rsh16x64 x (Const64 <t> [int64(uint32(c))])) 7405 for { 7406 t := v.Type 7407 x := v.Args[0] 7408 v_1 := v.Args[1] 7409 if v_1.Op != OpConst32 { 7410 break 7411 } 7412 c := v_1.AuxInt 7413 v.reset(OpRsh16x64) 7414 v.AddArg(x) 7415 v0 := b.NewValue0(v.Line, OpConst64, t) 7416 v0.AuxInt = int64(uint32(c)) 7417 v.AddArg(v0) 7418 return true 7419 } 7420 // match: (Rsh16x32 (Const16 [0]) _) 7421 // cond: 7422 // result: (Const16 [0]) 7423 for { 7424 v_0 := v.Args[0] 7425 if v_0.Op != OpConst16 { 7426 break 7427 } 7428 if v_0.AuxInt != 0 { 7429 break 7430 } 7431 v.reset(OpConst16) 7432 v.AuxInt = 0 7433 return true 7434 } 7435 return false 7436 } 7437 func rewriteValuegeneric_OpRsh16x64(v *Value, config *Config) bool { 7438 b := v.Block 7439 _ = b 7440 // match: (Rsh16x64 (Const16 [c]) (Const64 [d])) 7441 // cond: 7442 // result: (Const16 [int64(int16(c) >> uint64(d))]) 7443 for { 7444 v_0 := v.Args[0] 7445 if v_0.Op != OpConst16 { 7446 break 7447 } 7448 c := v_0.AuxInt 7449 v_1 := v.Args[1] 7450 if v_1.Op != OpConst64 { 7451 break 7452 } 7453 d := v_1.AuxInt 7454 v.reset(OpConst16) 7455 v.AuxInt = int64(int16(c) >> uint64(d)) 7456 return true 7457 } 7458 // match: (Rsh16x64 x (Const64 [0])) 7459 // cond: 7460 // result: x 7461 for { 7462 x := v.Args[0] 7463 v_1 := v.Args[1] 7464 if v_1.Op != OpConst64 { 7465 break 7466 } 7467 if v_1.AuxInt != 0 { 7468 break 7469 } 7470 v.reset(OpCopy) 7471 v.Type = x.Type 7472 v.AddArg(x) 7473 return true 7474 } 7475 // match: (Rsh16x64 (Const16 [0]) _) 7476 // cond: 7477 // result: (Const16 [0]) 7478 for { 7479 v_0 := v.Args[0] 7480 if v_0.Op != OpConst16 { 7481 break 7482 } 7483 if v_0.AuxInt != 0 { 7484 break 7485 } 7486 v.reset(OpConst16) 7487 v.AuxInt = 0 7488 return true 7489 } 7490 // match: (Rsh16x64 <t> (Rsh16x64 x (Const64 [c])) (Const64 [d])) 7491 // cond: !uaddOvf(c,d) 7492 // result: (Rsh16x64 x (Const64 <t> [c+d])) 7493 for { 7494 t := v.Type 7495 v_0 := v.Args[0] 7496 if v_0.Op != OpRsh16x64 { 7497 break 7498 } 7499 x := v_0.Args[0] 7500 v_0_1 := v_0.Args[1] 7501 if v_0_1.Op != OpConst64 { 7502 break 7503 } 7504 c := v_0_1.AuxInt 7505 v_1 := v.Args[1] 7506 if v_1.Op != OpConst64 { 7507 break 7508 } 7509 d := v_1.AuxInt 7510 if !(!uaddOvf(c, d)) { 7511 break 7512 } 7513 v.reset(OpRsh16x64) 7514 v.AddArg(x) 7515 v0 := b.NewValue0(v.Line, OpConst64, t) 7516 v0.AuxInt = c + d 7517 v.AddArg(v0) 7518 return true 7519 } 7520 return false 7521 } 7522 func rewriteValuegeneric_OpRsh16x8(v *Value, config *Config) bool { 7523 b := v.Block 7524 _ = b 7525 // match: (Rsh16x8 <t> x (Const8 [c])) 7526 // cond: 7527 // result: (Rsh16x64 x (Const64 <t> [int64(uint8(c))])) 7528 for { 7529 t := v.Type 7530 x := v.Args[0] 7531 v_1 := v.Args[1] 7532 if v_1.Op != OpConst8 { 7533 break 7534 } 7535 c := v_1.AuxInt 7536 v.reset(OpRsh16x64) 7537 v.AddArg(x) 7538 v0 := b.NewValue0(v.Line, OpConst64, t) 7539 v0.AuxInt = int64(uint8(c)) 7540 v.AddArg(v0) 7541 return true 7542 } 7543 // match: (Rsh16x8 (Const16 [0]) _) 7544 // cond: 7545 // result: (Const16 [0]) 7546 for { 7547 v_0 := v.Args[0] 7548 if v_0.Op != OpConst16 { 7549 break 7550 } 7551 if v_0.AuxInt != 0 { 7552 break 7553 } 7554 v.reset(OpConst16) 7555 v.AuxInt = 0 7556 return true 7557 } 7558 return false 7559 } 7560 func rewriteValuegeneric_OpRsh32Ux16(v *Value, config *Config) bool { 7561 b := v.Block 7562 _ = b 7563 // match: (Rsh32Ux16 <t> x (Const16 [c])) 7564 // cond: 7565 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint16(c))])) 7566 for { 7567 t := v.Type 7568 x := v.Args[0] 7569 v_1 := v.Args[1] 7570 if v_1.Op != OpConst16 { 7571 break 7572 } 7573 c := v_1.AuxInt 7574 v.reset(OpRsh32Ux64) 7575 v.AddArg(x) 7576 v0 := b.NewValue0(v.Line, OpConst64, t) 7577 v0.AuxInt = int64(uint16(c)) 7578 v.AddArg(v0) 7579 return true 7580 } 7581 // match: (Rsh32Ux16 (Const32 [0]) _) 7582 // cond: 7583 // result: (Const32 [0]) 7584 for { 7585 v_0 := v.Args[0] 7586 if v_0.Op != OpConst32 { 7587 break 7588 } 7589 if v_0.AuxInt != 0 { 7590 break 7591 } 7592 v.reset(OpConst32) 7593 v.AuxInt = 0 7594 return true 7595 } 7596 return false 7597 } 7598 func rewriteValuegeneric_OpRsh32Ux32(v *Value, config *Config) bool { 7599 b := v.Block 7600 _ = b 7601 // match: (Rsh32Ux32 <t> x (Const32 [c])) 7602 // cond: 7603 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint32(c))])) 7604 for { 7605 t := v.Type 7606 x := v.Args[0] 7607 v_1 := v.Args[1] 7608 if v_1.Op != OpConst32 { 7609 break 7610 } 7611 c := v_1.AuxInt 7612 v.reset(OpRsh32Ux64) 7613 v.AddArg(x) 7614 v0 := b.NewValue0(v.Line, OpConst64, t) 7615 v0.AuxInt = int64(uint32(c)) 7616 v.AddArg(v0) 7617 return true 7618 } 7619 // match: (Rsh32Ux32 (Const32 [0]) _) 7620 // cond: 7621 // result: (Const32 [0]) 7622 for { 7623 v_0 := v.Args[0] 7624 if v_0.Op != OpConst32 { 7625 break 7626 } 7627 if v_0.AuxInt != 0 { 7628 break 7629 } 7630 v.reset(OpConst32) 7631 v.AuxInt = 0 7632 return true 7633 } 7634 return false 7635 } 7636 func rewriteValuegeneric_OpRsh32Ux64(v *Value, config *Config) bool { 7637 b := v.Block 7638 _ = b 7639 // match: (Rsh32Ux64 (Const32 [c]) (Const64 [d])) 7640 // cond: 7641 // result: (Const32 [int64(int32(uint32(c) >> uint64(d)))]) 7642 for { 7643 v_0 := v.Args[0] 7644 if v_0.Op != OpConst32 { 7645 break 7646 } 7647 c := v_0.AuxInt 7648 v_1 := v.Args[1] 7649 if v_1.Op != OpConst64 { 7650 break 7651 } 7652 d := v_1.AuxInt 7653 v.reset(OpConst32) 7654 v.AuxInt = int64(int32(uint32(c) >> uint64(d))) 7655 return true 7656 } 7657 // match: (Rsh32Ux64 x (Const64 [0])) 7658 // cond: 7659 // result: x 7660 for { 7661 x := v.Args[0] 7662 v_1 := v.Args[1] 7663 if v_1.Op != OpConst64 { 7664 break 7665 } 7666 if v_1.AuxInt != 0 { 7667 break 7668 } 7669 v.reset(OpCopy) 7670 v.Type = x.Type 7671 v.AddArg(x) 7672 return true 7673 } 7674 // match: (Rsh32Ux64 (Const32 [0]) _) 7675 // cond: 7676 // result: (Const32 [0]) 7677 for { 7678 v_0 := v.Args[0] 7679 if v_0.Op != OpConst32 { 7680 break 7681 } 7682 if v_0.AuxInt != 0 { 7683 break 7684 } 7685 v.reset(OpConst32) 7686 v.AuxInt = 0 7687 return true 7688 } 7689 // match: (Rsh32Ux64 _ (Const64 [c])) 7690 // cond: uint64(c) >= 32 7691 // result: (Const32 [0]) 7692 for { 7693 v_1 := v.Args[1] 7694 if v_1.Op != OpConst64 { 7695 break 7696 } 7697 c := v_1.AuxInt 7698 if !(uint64(c) >= 32) { 7699 break 7700 } 7701 v.reset(OpConst32) 7702 v.AuxInt = 0 7703 return true 7704 } 7705 // match: (Rsh32Ux64 <t> (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) 7706 // cond: !uaddOvf(c,d) 7707 // result: (Rsh32Ux64 x (Const64 <t> [c+d])) 7708 for { 7709 t := v.Type 7710 v_0 := v.Args[0] 7711 if v_0.Op != OpRsh32Ux64 { 7712 break 7713 } 7714 x := v_0.Args[0] 7715 v_0_1 := v_0.Args[1] 7716 if v_0_1.Op != OpConst64 { 7717 break 7718 } 7719 c := v_0_1.AuxInt 7720 v_1 := v.Args[1] 7721 if v_1.Op != OpConst64 { 7722 break 7723 } 7724 d := v_1.AuxInt 7725 if !(!uaddOvf(c, d)) { 7726 break 7727 } 7728 v.reset(OpRsh32Ux64) 7729 v.AddArg(x) 7730 v0 := b.NewValue0(v.Line, OpConst64, t) 7731 v0.AuxInt = c + d 7732 v.AddArg(v0) 7733 return true 7734 } 7735 // match: (Rsh32Ux64 (Lsh32x64 (Rsh32Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 7736 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 7737 // result: (Rsh32Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 7738 for { 7739 v_0 := v.Args[0] 7740 if v_0.Op != OpLsh32x64 { 7741 break 7742 } 7743 v_0_0 := v_0.Args[0] 7744 if v_0_0.Op != OpRsh32Ux64 { 7745 break 7746 } 7747 x := v_0_0.Args[0] 7748 v_0_0_1 := v_0_0.Args[1] 7749 if v_0_0_1.Op != OpConst64 { 7750 break 7751 } 7752 c1 := v_0_0_1.AuxInt 7753 v_0_1 := v_0.Args[1] 7754 if v_0_1.Op != OpConst64 { 7755 break 7756 } 7757 c2 := v_0_1.AuxInt 7758 v_1 := v.Args[1] 7759 if v_1.Op != OpConst64 { 7760 break 7761 } 7762 c3 := v_1.AuxInt 7763 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 7764 break 7765 } 7766 v.reset(OpRsh32Ux64) 7767 v.AddArg(x) 7768 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 7769 v0.AuxInt = c1 - c2 + c3 7770 v.AddArg(v0) 7771 return true 7772 } 7773 return false 7774 } 7775 func rewriteValuegeneric_OpRsh32Ux8(v *Value, config *Config) bool { 7776 b := v.Block 7777 _ = b 7778 // match: (Rsh32Ux8 <t> x (Const8 [c])) 7779 // cond: 7780 // result: (Rsh32Ux64 x (Const64 <t> [int64(uint8(c))])) 7781 for { 7782 t := v.Type 7783 x := v.Args[0] 7784 v_1 := v.Args[1] 7785 if v_1.Op != OpConst8 { 7786 break 7787 } 7788 c := v_1.AuxInt 7789 v.reset(OpRsh32Ux64) 7790 v.AddArg(x) 7791 v0 := b.NewValue0(v.Line, OpConst64, t) 7792 v0.AuxInt = int64(uint8(c)) 7793 v.AddArg(v0) 7794 return true 7795 } 7796 // match: (Rsh32Ux8 (Const32 [0]) _) 7797 // cond: 7798 // result: (Const32 [0]) 7799 for { 7800 v_0 := v.Args[0] 7801 if v_0.Op != OpConst32 { 7802 break 7803 } 7804 if v_0.AuxInt != 0 { 7805 break 7806 } 7807 v.reset(OpConst32) 7808 v.AuxInt = 0 7809 return true 7810 } 7811 return false 7812 } 7813 func rewriteValuegeneric_OpRsh32x16(v *Value, config *Config) bool { 7814 b := v.Block 7815 _ = b 7816 // match: (Rsh32x16 <t> x (Const16 [c])) 7817 // cond: 7818 // result: (Rsh32x64 x (Const64 <t> [int64(uint16(c))])) 7819 for { 7820 t := v.Type 7821 x := v.Args[0] 7822 v_1 := v.Args[1] 7823 if v_1.Op != OpConst16 { 7824 break 7825 } 7826 c := v_1.AuxInt 7827 v.reset(OpRsh32x64) 7828 v.AddArg(x) 7829 v0 := b.NewValue0(v.Line, OpConst64, t) 7830 v0.AuxInt = int64(uint16(c)) 7831 v.AddArg(v0) 7832 return true 7833 } 7834 // match: (Rsh32x16 (Const32 [0]) _) 7835 // cond: 7836 // result: (Const32 [0]) 7837 for { 7838 v_0 := v.Args[0] 7839 if v_0.Op != OpConst32 { 7840 break 7841 } 7842 if v_0.AuxInt != 0 { 7843 break 7844 } 7845 v.reset(OpConst32) 7846 v.AuxInt = 0 7847 return true 7848 } 7849 return false 7850 } 7851 func rewriteValuegeneric_OpRsh32x32(v *Value, config *Config) bool { 7852 b := v.Block 7853 _ = b 7854 // match: (Rsh32x32 <t> x (Const32 [c])) 7855 // cond: 7856 // result: (Rsh32x64 x (Const64 <t> [int64(uint32(c))])) 7857 for { 7858 t := v.Type 7859 x := v.Args[0] 7860 v_1 := v.Args[1] 7861 if v_1.Op != OpConst32 { 7862 break 7863 } 7864 c := v_1.AuxInt 7865 v.reset(OpRsh32x64) 7866 v.AddArg(x) 7867 v0 := b.NewValue0(v.Line, OpConst64, t) 7868 v0.AuxInt = int64(uint32(c)) 7869 v.AddArg(v0) 7870 return true 7871 } 7872 // match: (Rsh32x32 (Const32 [0]) _) 7873 // cond: 7874 // result: (Const32 [0]) 7875 for { 7876 v_0 := v.Args[0] 7877 if v_0.Op != OpConst32 { 7878 break 7879 } 7880 if v_0.AuxInt != 0 { 7881 break 7882 } 7883 v.reset(OpConst32) 7884 v.AuxInt = 0 7885 return true 7886 } 7887 return false 7888 } 7889 func rewriteValuegeneric_OpRsh32x64(v *Value, config *Config) bool { 7890 b := v.Block 7891 _ = b 7892 // match: (Rsh32x64 (Const32 [c]) (Const64 [d])) 7893 // cond: 7894 // result: (Const32 [int64(int32(c) >> uint64(d))]) 7895 for { 7896 v_0 := v.Args[0] 7897 if v_0.Op != OpConst32 { 7898 break 7899 } 7900 c := v_0.AuxInt 7901 v_1 := v.Args[1] 7902 if v_1.Op != OpConst64 { 7903 break 7904 } 7905 d := v_1.AuxInt 7906 v.reset(OpConst32) 7907 v.AuxInt = int64(int32(c) >> uint64(d)) 7908 return true 7909 } 7910 // match: (Rsh32x64 x (Const64 [0])) 7911 // cond: 7912 // result: x 7913 for { 7914 x := v.Args[0] 7915 v_1 := v.Args[1] 7916 if v_1.Op != OpConst64 { 7917 break 7918 } 7919 if v_1.AuxInt != 0 { 7920 break 7921 } 7922 v.reset(OpCopy) 7923 v.Type = x.Type 7924 v.AddArg(x) 7925 return true 7926 } 7927 // match: (Rsh32x64 (Const32 [0]) _) 7928 // cond: 7929 // result: (Const32 [0]) 7930 for { 7931 v_0 := v.Args[0] 7932 if v_0.Op != OpConst32 { 7933 break 7934 } 7935 if v_0.AuxInt != 0 { 7936 break 7937 } 7938 v.reset(OpConst32) 7939 v.AuxInt = 0 7940 return true 7941 } 7942 // match: (Rsh32x64 <t> (Rsh32x64 x (Const64 [c])) (Const64 [d])) 7943 // cond: !uaddOvf(c,d) 7944 // result: (Rsh32x64 x (Const64 <t> [c+d])) 7945 for { 7946 t := v.Type 7947 v_0 := v.Args[0] 7948 if v_0.Op != OpRsh32x64 { 7949 break 7950 } 7951 x := v_0.Args[0] 7952 v_0_1 := v_0.Args[1] 7953 if v_0_1.Op != OpConst64 { 7954 break 7955 } 7956 c := v_0_1.AuxInt 7957 v_1 := v.Args[1] 7958 if v_1.Op != OpConst64 { 7959 break 7960 } 7961 d := v_1.AuxInt 7962 if !(!uaddOvf(c, d)) { 7963 break 7964 } 7965 v.reset(OpRsh32x64) 7966 v.AddArg(x) 7967 v0 := b.NewValue0(v.Line, OpConst64, t) 7968 v0.AuxInt = c + d 7969 v.AddArg(v0) 7970 return true 7971 } 7972 return false 7973 } 7974 func rewriteValuegeneric_OpRsh32x8(v *Value, config *Config) bool { 7975 b := v.Block 7976 _ = b 7977 // match: (Rsh32x8 <t> x (Const8 [c])) 7978 // cond: 7979 // result: (Rsh32x64 x (Const64 <t> [int64(uint8(c))])) 7980 for { 7981 t := v.Type 7982 x := v.Args[0] 7983 v_1 := v.Args[1] 7984 if v_1.Op != OpConst8 { 7985 break 7986 } 7987 c := v_1.AuxInt 7988 v.reset(OpRsh32x64) 7989 v.AddArg(x) 7990 v0 := b.NewValue0(v.Line, OpConst64, t) 7991 v0.AuxInt = int64(uint8(c)) 7992 v.AddArg(v0) 7993 return true 7994 } 7995 // match: (Rsh32x8 (Const32 [0]) _) 7996 // cond: 7997 // result: (Const32 [0]) 7998 for { 7999 v_0 := v.Args[0] 8000 if v_0.Op != OpConst32 { 8001 break 8002 } 8003 if v_0.AuxInt != 0 { 8004 break 8005 } 8006 v.reset(OpConst32) 8007 v.AuxInt = 0 8008 return true 8009 } 8010 return false 8011 } 8012 func rewriteValuegeneric_OpRsh64Ux16(v *Value, config *Config) bool { 8013 b := v.Block 8014 _ = b 8015 // match: (Rsh64Ux16 <t> x (Const16 [c])) 8016 // cond: 8017 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint16(c))])) 8018 for { 8019 t := v.Type 8020 x := v.Args[0] 8021 v_1 := v.Args[1] 8022 if v_1.Op != OpConst16 { 8023 break 8024 } 8025 c := v_1.AuxInt 8026 v.reset(OpRsh64Ux64) 8027 v.AddArg(x) 8028 v0 := b.NewValue0(v.Line, OpConst64, t) 8029 v0.AuxInt = int64(uint16(c)) 8030 v.AddArg(v0) 8031 return true 8032 } 8033 // match: (Rsh64Ux16 (Const64 [0]) _) 8034 // cond: 8035 // result: (Const64 [0]) 8036 for { 8037 v_0 := v.Args[0] 8038 if v_0.Op != OpConst64 { 8039 break 8040 } 8041 if v_0.AuxInt != 0 { 8042 break 8043 } 8044 v.reset(OpConst64) 8045 v.AuxInt = 0 8046 return true 8047 } 8048 return false 8049 } 8050 func rewriteValuegeneric_OpRsh64Ux32(v *Value, config *Config) bool { 8051 b := v.Block 8052 _ = b 8053 // match: (Rsh64Ux32 <t> x (Const32 [c])) 8054 // cond: 8055 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint32(c))])) 8056 for { 8057 t := v.Type 8058 x := v.Args[0] 8059 v_1 := v.Args[1] 8060 if v_1.Op != OpConst32 { 8061 break 8062 } 8063 c := v_1.AuxInt 8064 v.reset(OpRsh64Ux64) 8065 v.AddArg(x) 8066 v0 := b.NewValue0(v.Line, OpConst64, t) 8067 v0.AuxInt = int64(uint32(c)) 8068 v.AddArg(v0) 8069 return true 8070 } 8071 // match: (Rsh64Ux32 (Const64 [0]) _) 8072 // cond: 8073 // result: (Const64 [0]) 8074 for { 8075 v_0 := v.Args[0] 8076 if v_0.Op != OpConst64 { 8077 break 8078 } 8079 if v_0.AuxInt != 0 { 8080 break 8081 } 8082 v.reset(OpConst64) 8083 v.AuxInt = 0 8084 return true 8085 } 8086 return false 8087 } 8088 func rewriteValuegeneric_OpRsh64Ux64(v *Value, config *Config) bool { 8089 b := v.Block 8090 _ = b 8091 // match: (Rsh64Ux64 (Const64 [c]) (Const64 [d])) 8092 // cond: 8093 // result: (Const64 [int64(uint64(c) >> uint64(d))]) 8094 for { 8095 v_0 := v.Args[0] 8096 if v_0.Op != OpConst64 { 8097 break 8098 } 8099 c := v_0.AuxInt 8100 v_1 := v.Args[1] 8101 if v_1.Op != OpConst64 { 8102 break 8103 } 8104 d := v_1.AuxInt 8105 v.reset(OpConst64) 8106 v.AuxInt = int64(uint64(c) >> uint64(d)) 8107 return true 8108 } 8109 // match: (Rsh64Ux64 x (Const64 [0])) 8110 // cond: 8111 // result: x 8112 for { 8113 x := v.Args[0] 8114 v_1 := v.Args[1] 8115 if v_1.Op != OpConst64 { 8116 break 8117 } 8118 if v_1.AuxInt != 0 { 8119 break 8120 } 8121 v.reset(OpCopy) 8122 v.Type = x.Type 8123 v.AddArg(x) 8124 return true 8125 } 8126 // match: (Rsh64Ux64 (Const64 [0]) _) 8127 // cond: 8128 // result: (Const64 [0]) 8129 for { 8130 v_0 := v.Args[0] 8131 if v_0.Op != OpConst64 { 8132 break 8133 } 8134 if v_0.AuxInt != 0 { 8135 break 8136 } 8137 v.reset(OpConst64) 8138 v.AuxInt = 0 8139 return true 8140 } 8141 // match: (Rsh64Ux64 _ (Const64 [c])) 8142 // cond: uint64(c) >= 64 8143 // result: (Const64 [0]) 8144 for { 8145 v_1 := v.Args[1] 8146 if v_1.Op != OpConst64 { 8147 break 8148 } 8149 c := v_1.AuxInt 8150 if !(uint64(c) >= 64) { 8151 break 8152 } 8153 v.reset(OpConst64) 8154 v.AuxInt = 0 8155 return true 8156 } 8157 // match: (Rsh64Ux64 <t> (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) 8158 // cond: !uaddOvf(c,d) 8159 // result: (Rsh64Ux64 x (Const64 <t> [c+d])) 8160 for { 8161 t := v.Type 8162 v_0 := v.Args[0] 8163 if v_0.Op != OpRsh64Ux64 { 8164 break 8165 } 8166 x := v_0.Args[0] 8167 v_0_1 := v_0.Args[1] 8168 if v_0_1.Op != OpConst64 { 8169 break 8170 } 8171 c := v_0_1.AuxInt 8172 v_1 := v.Args[1] 8173 if v_1.Op != OpConst64 { 8174 break 8175 } 8176 d := v_1.AuxInt 8177 if !(!uaddOvf(c, d)) { 8178 break 8179 } 8180 v.reset(OpRsh64Ux64) 8181 v.AddArg(x) 8182 v0 := b.NewValue0(v.Line, OpConst64, t) 8183 v0.AuxInt = c + d 8184 v.AddArg(v0) 8185 return true 8186 } 8187 // match: (Rsh64Ux64 (Lsh64x64 (Rsh64Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8188 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8189 // result: (Rsh64Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8190 for { 8191 v_0 := v.Args[0] 8192 if v_0.Op != OpLsh64x64 { 8193 break 8194 } 8195 v_0_0 := v_0.Args[0] 8196 if v_0_0.Op != OpRsh64Ux64 { 8197 break 8198 } 8199 x := v_0_0.Args[0] 8200 v_0_0_1 := v_0_0.Args[1] 8201 if v_0_0_1.Op != OpConst64 { 8202 break 8203 } 8204 c1 := v_0_0_1.AuxInt 8205 v_0_1 := v_0.Args[1] 8206 if v_0_1.Op != OpConst64 { 8207 break 8208 } 8209 c2 := v_0_1.AuxInt 8210 v_1 := v.Args[1] 8211 if v_1.Op != OpConst64 { 8212 break 8213 } 8214 c3 := v_1.AuxInt 8215 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8216 break 8217 } 8218 v.reset(OpRsh64Ux64) 8219 v.AddArg(x) 8220 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8221 v0.AuxInt = c1 - c2 + c3 8222 v.AddArg(v0) 8223 return true 8224 } 8225 return false 8226 } 8227 func rewriteValuegeneric_OpRsh64Ux8(v *Value, config *Config) bool { 8228 b := v.Block 8229 _ = b 8230 // match: (Rsh64Ux8 <t> x (Const8 [c])) 8231 // cond: 8232 // result: (Rsh64Ux64 x (Const64 <t> [int64(uint8(c))])) 8233 for { 8234 t := v.Type 8235 x := v.Args[0] 8236 v_1 := v.Args[1] 8237 if v_1.Op != OpConst8 { 8238 break 8239 } 8240 c := v_1.AuxInt 8241 v.reset(OpRsh64Ux64) 8242 v.AddArg(x) 8243 v0 := b.NewValue0(v.Line, OpConst64, t) 8244 v0.AuxInt = int64(uint8(c)) 8245 v.AddArg(v0) 8246 return true 8247 } 8248 // match: (Rsh64Ux8 (Const64 [0]) _) 8249 // cond: 8250 // result: (Const64 [0]) 8251 for { 8252 v_0 := v.Args[0] 8253 if v_0.Op != OpConst64 { 8254 break 8255 } 8256 if v_0.AuxInt != 0 { 8257 break 8258 } 8259 v.reset(OpConst64) 8260 v.AuxInt = 0 8261 return true 8262 } 8263 return false 8264 } 8265 func rewriteValuegeneric_OpRsh64x16(v *Value, config *Config) bool { 8266 b := v.Block 8267 _ = b 8268 // match: (Rsh64x16 <t> x (Const16 [c])) 8269 // cond: 8270 // result: (Rsh64x64 x (Const64 <t> [int64(uint16(c))])) 8271 for { 8272 t := v.Type 8273 x := v.Args[0] 8274 v_1 := v.Args[1] 8275 if v_1.Op != OpConst16 { 8276 break 8277 } 8278 c := v_1.AuxInt 8279 v.reset(OpRsh64x64) 8280 v.AddArg(x) 8281 v0 := b.NewValue0(v.Line, OpConst64, t) 8282 v0.AuxInt = int64(uint16(c)) 8283 v.AddArg(v0) 8284 return true 8285 } 8286 // match: (Rsh64x16 (Const64 [0]) _) 8287 // cond: 8288 // result: (Const64 [0]) 8289 for { 8290 v_0 := v.Args[0] 8291 if v_0.Op != OpConst64 { 8292 break 8293 } 8294 if v_0.AuxInt != 0 { 8295 break 8296 } 8297 v.reset(OpConst64) 8298 v.AuxInt = 0 8299 return true 8300 } 8301 return false 8302 } 8303 func rewriteValuegeneric_OpRsh64x32(v *Value, config *Config) bool { 8304 b := v.Block 8305 _ = b 8306 // match: (Rsh64x32 <t> x (Const32 [c])) 8307 // cond: 8308 // result: (Rsh64x64 x (Const64 <t> [int64(uint32(c))])) 8309 for { 8310 t := v.Type 8311 x := v.Args[0] 8312 v_1 := v.Args[1] 8313 if v_1.Op != OpConst32 { 8314 break 8315 } 8316 c := v_1.AuxInt 8317 v.reset(OpRsh64x64) 8318 v.AddArg(x) 8319 v0 := b.NewValue0(v.Line, OpConst64, t) 8320 v0.AuxInt = int64(uint32(c)) 8321 v.AddArg(v0) 8322 return true 8323 } 8324 // match: (Rsh64x32 (Const64 [0]) _) 8325 // cond: 8326 // result: (Const64 [0]) 8327 for { 8328 v_0 := v.Args[0] 8329 if v_0.Op != OpConst64 { 8330 break 8331 } 8332 if v_0.AuxInt != 0 { 8333 break 8334 } 8335 v.reset(OpConst64) 8336 v.AuxInt = 0 8337 return true 8338 } 8339 return false 8340 } 8341 func rewriteValuegeneric_OpRsh64x64(v *Value, config *Config) bool { 8342 b := v.Block 8343 _ = b 8344 // match: (Rsh64x64 (Const64 [c]) (Const64 [d])) 8345 // cond: 8346 // result: (Const64 [c >> uint64(d)]) 8347 for { 8348 v_0 := v.Args[0] 8349 if v_0.Op != OpConst64 { 8350 break 8351 } 8352 c := v_0.AuxInt 8353 v_1 := v.Args[1] 8354 if v_1.Op != OpConst64 { 8355 break 8356 } 8357 d := v_1.AuxInt 8358 v.reset(OpConst64) 8359 v.AuxInt = c >> uint64(d) 8360 return true 8361 } 8362 // match: (Rsh64x64 x (Const64 [0])) 8363 // cond: 8364 // result: x 8365 for { 8366 x := v.Args[0] 8367 v_1 := v.Args[1] 8368 if v_1.Op != OpConst64 { 8369 break 8370 } 8371 if v_1.AuxInt != 0 { 8372 break 8373 } 8374 v.reset(OpCopy) 8375 v.Type = x.Type 8376 v.AddArg(x) 8377 return true 8378 } 8379 // match: (Rsh64x64 (Const64 [0]) _) 8380 // cond: 8381 // result: (Const64 [0]) 8382 for { 8383 v_0 := v.Args[0] 8384 if v_0.Op != OpConst64 { 8385 break 8386 } 8387 if v_0.AuxInt != 0 { 8388 break 8389 } 8390 v.reset(OpConst64) 8391 v.AuxInt = 0 8392 return true 8393 } 8394 // match: (Rsh64x64 <t> (Rsh64x64 x (Const64 [c])) (Const64 [d])) 8395 // cond: !uaddOvf(c,d) 8396 // result: (Rsh64x64 x (Const64 <t> [c+d])) 8397 for { 8398 t := v.Type 8399 v_0 := v.Args[0] 8400 if v_0.Op != OpRsh64x64 { 8401 break 8402 } 8403 x := v_0.Args[0] 8404 v_0_1 := v_0.Args[1] 8405 if v_0_1.Op != OpConst64 { 8406 break 8407 } 8408 c := v_0_1.AuxInt 8409 v_1 := v.Args[1] 8410 if v_1.Op != OpConst64 { 8411 break 8412 } 8413 d := v_1.AuxInt 8414 if !(!uaddOvf(c, d)) { 8415 break 8416 } 8417 v.reset(OpRsh64x64) 8418 v.AddArg(x) 8419 v0 := b.NewValue0(v.Line, OpConst64, t) 8420 v0.AuxInt = c + d 8421 v.AddArg(v0) 8422 return true 8423 } 8424 return false 8425 } 8426 func rewriteValuegeneric_OpRsh64x8(v *Value, config *Config) bool { 8427 b := v.Block 8428 _ = b 8429 // match: (Rsh64x8 <t> x (Const8 [c])) 8430 // cond: 8431 // result: (Rsh64x64 x (Const64 <t> [int64(uint8(c))])) 8432 for { 8433 t := v.Type 8434 x := v.Args[0] 8435 v_1 := v.Args[1] 8436 if v_1.Op != OpConst8 { 8437 break 8438 } 8439 c := v_1.AuxInt 8440 v.reset(OpRsh64x64) 8441 v.AddArg(x) 8442 v0 := b.NewValue0(v.Line, OpConst64, t) 8443 v0.AuxInt = int64(uint8(c)) 8444 v.AddArg(v0) 8445 return true 8446 } 8447 // match: (Rsh64x8 (Const64 [0]) _) 8448 // cond: 8449 // result: (Const64 [0]) 8450 for { 8451 v_0 := v.Args[0] 8452 if v_0.Op != OpConst64 { 8453 break 8454 } 8455 if v_0.AuxInt != 0 { 8456 break 8457 } 8458 v.reset(OpConst64) 8459 v.AuxInt = 0 8460 return true 8461 } 8462 return false 8463 } 8464 func rewriteValuegeneric_OpRsh8Ux16(v *Value, config *Config) bool { 8465 b := v.Block 8466 _ = b 8467 // match: (Rsh8Ux16 <t> x (Const16 [c])) 8468 // cond: 8469 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint16(c))])) 8470 for { 8471 t := v.Type 8472 x := v.Args[0] 8473 v_1 := v.Args[1] 8474 if v_1.Op != OpConst16 { 8475 break 8476 } 8477 c := v_1.AuxInt 8478 v.reset(OpRsh8Ux64) 8479 v.AddArg(x) 8480 v0 := b.NewValue0(v.Line, OpConst64, t) 8481 v0.AuxInt = int64(uint16(c)) 8482 v.AddArg(v0) 8483 return true 8484 } 8485 // match: (Rsh8Ux16 (Const8 [0]) _) 8486 // cond: 8487 // result: (Const8 [0]) 8488 for { 8489 v_0 := v.Args[0] 8490 if v_0.Op != OpConst8 { 8491 break 8492 } 8493 if v_0.AuxInt != 0 { 8494 break 8495 } 8496 v.reset(OpConst8) 8497 v.AuxInt = 0 8498 return true 8499 } 8500 return false 8501 } 8502 func rewriteValuegeneric_OpRsh8Ux32(v *Value, config *Config) bool { 8503 b := v.Block 8504 _ = b 8505 // match: (Rsh8Ux32 <t> x (Const32 [c])) 8506 // cond: 8507 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint32(c))])) 8508 for { 8509 t := v.Type 8510 x := v.Args[0] 8511 v_1 := v.Args[1] 8512 if v_1.Op != OpConst32 { 8513 break 8514 } 8515 c := v_1.AuxInt 8516 v.reset(OpRsh8Ux64) 8517 v.AddArg(x) 8518 v0 := b.NewValue0(v.Line, OpConst64, t) 8519 v0.AuxInt = int64(uint32(c)) 8520 v.AddArg(v0) 8521 return true 8522 } 8523 // match: (Rsh8Ux32 (Const8 [0]) _) 8524 // cond: 8525 // result: (Const8 [0]) 8526 for { 8527 v_0 := v.Args[0] 8528 if v_0.Op != OpConst8 { 8529 break 8530 } 8531 if v_0.AuxInt != 0 { 8532 break 8533 } 8534 v.reset(OpConst8) 8535 v.AuxInt = 0 8536 return true 8537 } 8538 return false 8539 } 8540 func rewriteValuegeneric_OpRsh8Ux64(v *Value, config *Config) bool { 8541 b := v.Block 8542 _ = b 8543 // match: (Rsh8Ux64 (Const8 [c]) (Const64 [d])) 8544 // cond: 8545 // result: (Const8 [int64(int8(uint8(c) >> uint64(d)))]) 8546 for { 8547 v_0 := v.Args[0] 8548 if v_0.Op != OpConst8 { 8549 break 8550 } 8551 c := v_0.AuxInt 8552 v_1 := v.Args[1] 8553 if v_1.Op != OpConst64 { 8554 break 8555 } 8556 d := v_1.AuxInt 8557 v.reset(OpConst8) 8558 v.AuxInt = int64(int8(uint8(c) >> uint64(d))) 8559 return true 8560 } 8561 // match: (Rsh8Ux64 x (Const64 [0])) 8562 // cond: 8563 // result: x 8564 for { 8565 x := v.Args[0] 8566 v_1 := v.Args[1] 8567 if v_1.Op != OpConst64 { 8568 break 8569 } 8570 if v_1.AuxInt != 0 { 8571 break 8572 } 8573 v.reset(OpCopy) 8574 v.Type = x.Type 8575 v.AddArg(x) 8576 return true 8577 } 8578 // match: (Rsh8Ux64 (Const8 [0]) _) 8579 // cond: 8580 // result: (Const8 [0]) 8581 for { 8582 v_0 := v.Args[0] 8583 if v_0.Op != OpConst8 { 8584 break 8585 } 8586 if v_0.AuxInt != 0 { 8587 break 8588 } 8589 v.reset(OpConst8) 8590 v.AuxInt = 0 8591 return true 8592 } 8593 // match: (Rsh8Ux64 _ (Const64 [c])) 8594 // cond: uint64(c) >= 8 8595 // result: (Const8 [0]) 8596 for { 8597 v_1 := v.Args[1] 8598 if v_1.Op != OpConst64 { 8599 break 8600 } 8601 c := v_1.AuxInt 8602 if !(uint64(c) >= 8) { 8603 break 8604 } 8605 v.reset(OpConst8) 8606 v.AuxInt = 0 8607 return true 8608 } 8609 // match: (Rsh8Ux64 <t> (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) 8610 // cond: !uaddOvf(c,d) 8611 // result: (Rsh8Ux64 x (Const64 <t> [c+d])) 8612 for { 8613 t := v.Type 8614 v_0 := v.Args[0] 8615 if v_0.Op != OpRsh8Ux64 { 8616 break 8617 } 8618 x := v_0.Args[0] 8619 v_0_1 := v_0.Args[1] 8620 if v_0_1.Op != OpConst64 { 8621 break 8622 } 8623 c := v_0_1.AuxInt 8624 v_1 := v.Args[1] 8625 if v_1.Op != OpConst64 { 8626 break 8627 } 8628 d := v_1.AuxInt 8629 if !(!uaddOvf(c, d)) { 8630 break 8631 } 8632 v.reset(OpRsh8Ux64) 8633 v.AddArg(x) 8634 v0 := b.NewValue0(v.Line, OpConst64, t) 8635 v0.AuxInt = c + d 8636 v.AddArg(v0) 8637 return true 8638 } 8639 // match: (Rsh8Ux64 (Lsh8x64 (Rsh8Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) 8640 // cond: uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) 8641 // result: (Rsh8Ux64 x (Const64 <config.fe.TypeUInt64()> [c1-c2+c3])) 8642 for { 8643 v_0 := v.Args[0] 8644 if v_0.Op != OpLsh8x64 { 8645 break 8646 } 8647 v_0_0 := v_0.Args[0] 8648 if v_0_0.Op != OpRsh8Ux64 { 8649 break 8650 } 8651 x := v_0_0.Args[0] 8652 v_0_0_1 := v_0_0.Args[1] 8653 if v_0_0_1.Op != OpConst64 { 8654 break 8655 } 8656 c1 := v_0_0_1.AuxInt 8657 v_0_1 := v_0.Args[1] 8658 if v_0_1.Op != OpConst64 { 8659 break 8660 } 8661 c2 := v_0_1.AuxInt 8662 v_1 := v.Args[1] 8663 if v_1.Op != OpConst64 { 8664 break 8665 } 8666 c3 := v_1.AuxInt 8667 if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { 8668 break 8669 } 8670 v.reset(OpRsh8Ux64) 8671 v.AddArg(x) 8672 v0 := b.NewValue0(v.Line, OpConst64, config.fe.TypeUInt64()) 8673 v0.AuxInt = c1 - c2 + c3 8674 v.AddArg(v0) 8675 return true 8676 } 8677 return false 8678 } 8679 func rewriteValuegeneric_OpRsh8Ux8(v *Value, config *Config) bool { 8680 b := v.Block 8681 _ = b 8682 // match: (Rsh8Ux8 <t> x (Const8 [c])) 8683 // cond: 8684 // result: (Rsh8Ux64 x (Const64 <t> [int64(uint8(c))])) 8685 for { 8686 t := v.Type 8687 x := v.Args[0] 8688 v_1 := v.Args[1] 8689 if v_1.Op != OpConst8 { 8690 break 8691 } 8692 c := v_1.AuxInt 8693 v.reset(OpRsh8Ux64) 8694 v.AddArg(x) 8695 v0 := b.NewValue0(v.Line, OpConst64, t) 8696 v0.AuxInt = int64(uint8(c)) 8697 v.AddArg(v0) 8698 return true 8699 } 8700 // match: (Rsh8Ux8 (Const8 [0]) _) 8701 // cond: 8702 // result: (Const8 [0]) 8703 for { 8704 v_0 := v.Args[0] 8705 if v_0.Op != OpConst8 { 8706 break 8707 } 8708 if v_0.AuxInt != 0 { 8709 break 8710 } 8711 v.reset(OpConst8) 8712 v.AuxInt = 0 8713 return true 8714 } 8715 return false 8716 } 8717 func rewriteValuegeneric_OpRsh8x16(v *Value, config *Config) bool { 8718 b := v.Block 8719 _ = b 8720 // match: (Rsh8x16 <t> x (Const16 [c])) 8721 // cond: 8722 // result: (Rsh8x64 x (Const64 <t> [int64(uint16(c))])) 8723 for { 8724 t := v.Type 8725 x := v.Args[0] 8726 v_1 := v.Args[1] 8727 if v_1.Op != OpConst16 { 8728 break 8729 } 8730 c := v_1.AuxInt 8731 v.reset(OpRsh8x64) 8732 v.AddArg(x) 8733 v0 := b.NewValue0(v.Line, OpConst64, t) 8734 v0.AuxInt = int64(uint16(c)) 8735 v.AddArg(v0) 8736 return true 8737 } 8738 // match: (Rsh8x16 (Const8 [0]) _) 8739 // cond: 8740 // result: (Const8 [0]) 8741 for { 8742 v_0 := v.Args[0] 8743 if v_0.Op != OpConst8 { 8744 break 8745 } 8746 if v_0.AuxInt != 0 { 8747 break 8748 } 8749 v.reset(OpConst8) 8750 v.AuxInt = 0 8751 return true 8752 } 8753 return false 8754 } 8755 func rewriteValuegeneric_OpRsh8x32(v *Value, config *Config) bool { 8756 b := v.Block 8757 _ = b 8758 // match: (Rsh8x32 <t> x (Const32 [c])) 8759 // cond: 8760 // result: (Rsh8x64 x (Const64 <t> [int64(uint32(c))])) 8761 for { 8762 t := v.Type 8763 x := v.Args[0] 8764 v_1 := v.Args[1] 8765 if v_1.Op != OpConst32 { 8766 break 8767 } 8768 c := v_1.AuxInt 8769 v.reset(OpRsh8x64) 8770 v.AddArg(x) 8771 v0 := b.NewValue0(v.Line, OpConst64, t) 8772 v0.AuxInt = int64(uint32(c)) 8773 v.AddArg(v0) 8774 return true 8775 } 8776 // match: (Rsh8x32 (Const8 [0]) _) 8777 // cond: 8778 // result: (Const8 [0]) 8779 for { 8780 v_0 := v.Args[0] 8781 if v_0.Op != OpConst8 { 8782 break 8783 } 8784 if v_0.AuxInt != 0 { 8785 break 8786 } 8787 v.reset(OpConst8) 8788 v.AuxInt = 0 8789 return true 8790 } 8791 return false 8792 } 8793 func rewriteValuegeneric_OpRsh8x64(v *Value, config *Config) bool { 8794 b := v.Block 8795 _ = b 8796 // match: (Rsh8x64 (Const8 [c]) (Const64 [d])) 8797 // cond: 8798 // result: (Const8 [int64(int8(c) >> uint64(d))]) 8799 for { 8800 v_0 := v.Args[0] 8801 if v_0.Op != OpConst8 { 8802 break 8803 } 8804 c := v_0.AuxInt 8805 v_1 := v.Args[1] 8806 if v_1.Op != OpConst64 { 8807 break 8808 } 8809 d := v_1.AuxInt 8810 v.reset(OpConst8) 8811 v.AuxInt = int64(int8(c) >> uint64(d)) 8812 return true 8813 } 8814 // match: (Rsh8x64 x (Const64 [0])) 8815 // cond: 8816 // result: x 8817 for { 8818 x := v.Args[0] 8819 v_1 := v.Args[1] 8820 if v_1.Op != OpConst64 { 8821 break 8822 } 8823 if v_1.AuxInt != 0 { 8824 break 8825 } 8826 v.reset(OpCopy) 8827 v.Type = x.Type 8828 v.AddArg(x) 8829 return true 8830 } 8831 // match: (Rsh8x64 (Const8 [0]) _) 8832 // cond: 8833 // result: (Const8 [0]) 8834 for { 8835 v_0 := v.Args[0] 8836 if v_0.Op != OpConst8 { 8837 break 8838 } 8839 if v_0.AuxInt != 0 { 8840 break 8841 } 8842 v.reset(OpConst8) 8843 v.AuxInt = 0 8844 return true 8845 } 8846 // match: (Rsh8x64 <t> (Rsh8x64 x (Const64 [c])) (Const64 [d])) 8847 // cond: !uaddOvf(c,d) 8848 // result: (Rsh8x64 x (Const64 <t> [c+d])) 8849 for { 8850 t := v.Type 8851 v_0 := v.Args[0] 8852 if v_0.Op != OpRsh8x64 { 8853 break 8854 } 8855 x := v_0.Args[0] 8856 v_0_1 := v_0.Args[1] 8857 if v_0_1.Op != OpConst64 { 8858 break 8859 } 8860 c := v_0_1.AuxInt 8861 v_1 := v.Args[1] 8862 if v_1.Op != OpConst64 { 8863 break 8864 } 8865 d := v_1.AuxInt 8866 if !(!uaddOvf(c, d)) { 8867 break 8868 } 8869 v.reset(OpRsh8x64) 8870 v.AddArg(x) 8871 v0 := b.NewValue0(v.Line, OpConst64, t) 8872 v0.AuxInt = c + d 8873 v.AddArg(v0) 8874 return true 8875 } 8876 return false 8877 } 8878 func rewriteValuegeneric_OpRsh8x8(v *Value, config *Config) bool { 8879 b := v.Block 8880 _ = b 8881 // match: (Rsh8x8 <t> x (Const8 [c])) 8882 // cond: 8883 // result: (Rsh8x64 x (Const64 <t> [int64(uint8(c))])) 8884 for { 8885 t := v.Type 8886 x := v.Args[0] 8887 v_1 := v.Args[1] 8888 if v_1.Op != OpConst8 { 8889 break 8890 } 8891 c := v_1.AuxInt 8892 v.reset(OpRsh8x64) 8893 v.AddArg(x) 8894 v0 := b.NewValue0(v.Line, OpConst64, t) 8895 v0.AuxInt = int64(uint8(c)) 8896 v.AddArg(v0) 8897 return true 8898 } 8899 // match: (Rsh8x8 (Const8 [0]) _) 8900 // cond: 8901 // result: (Const8 [0]) 8902 for { 8903 v_0 := v.Args[0] 8904 if v_0.Op != OpConst8 { 8905 break 8906 } 8907 if v_0.AuxInt != 0 { 8908 break 8909 } 8910 v.reset(OpConst8) 8911 v.AuxInt = 0 8912 return true 8913 } 8914 return false 8915 } 8916 func rewriteValuegeneric_OpSliceCap(v *Value, config *Config) bool { 8917 b := v.Block 8918 _ = b 8919 // match: (SliceCap (SliceMake _ _ (Const64 <t> [c]))) 8920 // cond: 8921 // result: (Const64 <t> [c]) 8922 for { 8923 v_0 := v.Args[0] 8924 if v_0.Op != OpSliceMake { 8925 break 8926 } 8927 v_0_2 := v_0.Args[2] 8928 if v_0_2.Op != OpConst64 { 8929 break 8930 } 8931 t := v_0_2.Type 8932 c := v_0_2.AuxInt 8933 v.reset(OpConst64) 8934 v.Type = t 8935 v.AuxInt = c 8936 return true 8937 } 8938 // match: (SliceCap (SliceMake _ _ (SliceCap x))) 8939 // cond: 8940 // result: (SliceCap x) 8941 for { 8942 v_0 := v.Args[0] 8943 if v_0.Op != OpSliceMake { 8944 break 8945 } 8946 v_0_2 := v_0.Args[2] 8947 if v_0_2.Op != OpSliceCap { 8948 break 8949 } 8950 x := v_0_2.Args[0] 8951 v.reset(OpSliceCap) 8952 v.AddArg(x) 8953 return true 8954 } 8955 // match: (SliceCap (SliceMake _ _ (SliceLen x))) 8956 // cond: 8957 // result: (SliceLen x) 8958 for { 8959 v_0 := v.Args[0] 8960 if v_0.Op != OpSliceMake { 8961 break 8962 } 8963 v_0_2 := v_0.Args[2] 8964 if v_0_2.Op != OpSliceLen { 8965 break 8966 } 8967 x := v_0_2.Args[0] 8968 v.reset(OpSliceLen) 8969 v.AddArg(x) 8970 return true 8971 } 8972 return false 8973 } 8974 func rewriteValuegeneric_OpSliceLen(v *Value, config *Config) bool { 8975 b := v.Block 8976 _ = b 8977 // match: (SliceLen (SliceMake _ (Const64 <t> [c]) _)) 8978 // cond: 8979 // result: (Const64 <t> [c]) 8980 for { 8981 v_0 := v.Args[0] 8982 if v_0.Op != OpSliceMake { 8983 break 8984 } 8985 v_0_1 := v_0.Args[1] 8986 if v_0_1.Op != OpConst64 { 8987 break 8988 } 8989 t := v_0_1.Type 8990 c := v_0_1.AuxInt 8991 v.reset(OpConst64) 8992 v.Type = t 8993 v.AuxInt = c 8994 return true 8995 } 8996 // match: (SliceLen (SliceMake _ (SliceLen x) _)) 8997 // cond: 8998 // result: (SliceLen x) 8999 for { 9000 v_0 := v.Args[0] 9001 if v_0.Op != OpSliceMake { 9002 break 9003 } 9004 v_0_1 := v_0.Args[1] 9005 if v_0_1.Op != OpSliceLen { 9006 break 9007 } 9008 x := v_0_1.Args[0] 9009 v.reset(OpSliceLen) 9010 v.AddArg(x) 9011 return true 9012 } 9013 return false 9014 } 9015 func rewriteValuegeneric_OpSlicePtr(v *Value, config *Config) bool { 9016 b := v.Block 9017 _ = b 9018 // match: (SlicePtr (SliceMake (SlicePtr x) _ _)) 9019 // cond: 9020 // result: (SlicePtr x) 9021 for { 9022 v_0 := v.Args[0] 9023 if v_0.Op != OpSliceMake { 9024 break 9025 } 9026 v_0_0 := v_0.Args[0] 9027 if v_0_0.Op != OpSlicePtr { 9028 break 9029 } 9030 x := v_0_0.Args[0] 9031 v.reset(OpSlicePtr) 9032 v.AddArg(x) 9033 return true 9034 } 9035 return false 9036 } 9037 func rewriteValuegeneric_OpSqrt(v *Value, config *Config) bool { 9038 b := v.Block 9039 _ = b 9040 // match: (Sqrt (Const64F [c])) 9041 // cond: 9042 // result: (Const64F [f2i(math.Sqrt(i2f(c)))]) 9043 for { 9044 v_0 := v.Args[0] 9045 if v_0.Op != OpConst64F { 9046 break 9047 } 9048 c := v_0.AuxInt 9049 v.reset(OpConst64F) 9050 v.AuxInt = f2i(math.Sqrt(i2f(c))) 9051 return true 9052 } 9053 return false 9054 } 9055 func rewriteValuegeneric_OpStore(v *Value, config *Config) bool { 9056 b := v.Block 9057 _ = b 9058 // match: (Store _ (StructMake0) mem) 9059 // cond: 9060 // result: mem 9061 for { 9062 v_1 := v.Args[1] 9063 if v_1.Op != OpStructMake0 { 9064 break 9065 } 9066 mem := v.Args[2] 9067 v.reset(OpCopy) 9068 v.Type = mem.Type 9069 v.AddArg(mem) 9070 return true 9071 } 9072 // match: (Store dst (StructMake1 <t> f0) mem) 9073 // cond: 9074 // result: (Store [t.FieldType(0).Size()] dst f0 mem) 9075 for { 9076 dst := v.Args[0] 9077 v_1 := v.Args[1] 9078 if v_1.Op != OpStructMake1 { 9079 break 9080 } 9081 t := v_1.Type 9082 f0 := v_1.Args[0] 9083 mem := v.Args[2] 9084 v.reset(OpStore) 9085 v.AuxInt = t.FieldType(0).Size() 9086 v.AddArg(dst) 9087 v.AddArg(f0) 9088 v.AddArg(mem) 9089 return true 9090 } 9091 // match: (Store dst (StructMake2 <t> f0 f1) mem) 9092 // cond: 9093 // result: (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)) 9094 for { 9095 dst := v.Args[0] 9096 v_1 := v.Args[1] 9097 if v_1.Op != OpStructMake2 { 9098 break 9099 } 9100 t := v_1.Type 9101 f0 := v_1.Args[0] 9102 f1 := v_1.Args[1] 9103 mem := v.Args[2] 9104 v.reset(OpStore) 9105 v.AuxInt = t.FieldType(1).Size() 9106 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 9107 v0.AuxInt = t.FieldOff(1) 9108 v0.AddArg(dst) 9109 v.AddArg(v0) 9110 v.AddArg(f1) 9111 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 9112 v1.AuxInt = t.FieldType(0).Size() 9113 v1.AddArg(dst) 9114 v1.AddArg(f0) 9115 v1.AddArg(mem) 9116 v.AddArg(v1) 9117 return true 9118 } 9119 // match: (Store dst (StructMake3 <t> f0 f1 f2) mem) 9120 // cond: 9121 // result: (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem))) 9122 for { 9123 dst := v.Args[0] 9124 v_1 := v.Args[1] 9125 if v_1.Op != OpStructMake3 { 9126 break 9127 } 9128 t := v_1.Type 9129 f0 := v_1.Args[0] 9130 f1 := v_1.Args[1] 9131 f2 := v_1.Args[2] 9132 mem := v.Args[2] 9133 v.reset(OpStore) 9134 v.AuxInt = t.FieldType(2).Size() 9135 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 9136 v0.AuxInt = t.FieldOff(2) 9137 v0.AddArg(dst) 9138 v.AddArg(v0) 9139 v.AddArg(f2) 9140 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 9141 v1.AuxInt = t.FieldType(1).Size() 9142 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 9143 v2.AuxInt = t.FieldOff(1) 9144 v2.AddArg(dst) 9145 v1.AddArg(v2) 9146 v1.AddArg(f1) 9147 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 9148 v3.AuxInt = t.FieldType(0).Size() 9149 v3.AddArg(dst) 9150 v3.AddArg(f0) 9151 v3.AddArg(mem) 9152 v1.AddArg(v3) 9153 v.AddArg(v1) 9154 return true 9155 } 9156 // match: (Store dst (StructMake4 <t> f0 f1 f2 f3) mem) 9157 // cond: 9158 // result: (Store [t.FieldType(3).Size()] (OffPtr <t.FieldType(3).PtrTo()> [t.FieldOff(3)] dst) f3 (Store [t.FieldType(2).Size()] (OffPtr <t.FieldType(2).PtrTo()> [t.FieldOff(2)] dst) f2 (Store [t.FieldType(1).Size()] (OffPtr <t.FieldType(1).PtrTo()> [t.FieldOff(1)] dst) f1 (Store [t.FieldType(0).Size()] dst f0 mem)))) 9159 for { 9160 dst := v.Args[0] 9161 v_1 := v.Args[1] 9162 if v_1.Op != OpStructMake4 { 9163 break 9164 } 9165 t := v_1.Type 9166 f0 := v_1.Args[0] 9167 f1 := v_1.Args[1] 9168 f2 := v_1.Args[2] 9169 f3 := v_1.Args[3] 9170 mem := v.Args[2] 9171 v.reset(OpStore) 9172 v.AuxInt = t.FieldType(3).Size() 9173 v0 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(3).PtrTo()) 9174 v0.AuxInt = t.FieldOff(3) 9175 v0.AddArg(dst) 9176 v.AddArg(v0) 9177 v.AddArg(f3) 9178 v1 := b.NewValue0(v.Line, OpStore, TypeMem) 9179 v1.AuxInt = t.FieldType(2).Size() 9180 v2 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(2).PtrTo()) 9181 v2.AuxInt = t.FieldOff(2) 9182 v2.AddArg(dst) 9183 v1.AddArg(v2) 9184 v1.AddArg(f2) 9185 v3 := b.NewValue0(v.Line, OpStore, TypeMem) 9186 v3.AuxInt = t.FieldType(1).Size() 9187 v4 := b.NewValue0(v.Line, OpOffPtr, t.FieldType(1).PtrTo()) 9188 v4.AuxInt = t.FieldOff(1) 9189 v4.AddArg(dst) 9190 v3.AddArg(v4) 9191 v3.AddArg(f1) 9192 v5 := b.NewValue0(v.Line, OpStore, TypeMem) 9193 v5.AuxInt = t.FieldType(0).Size() 9194 v5.AddArg(dst) 9195 v5.AddArg(f0) 9196 v5.AddArg(mem) 9197 v3.AddArg(v5) 9198 v1.AddArg(v3) 9199 v.AddArg(v1) 9200 return true 9201 } 9202 // match: (Store [size] dst (Load <t> src mem) mem) 9203 // cond: !config.fe.CanSSA(t) 9204 // result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src mem) 9205 for { 9206 size := v.AuxInt 9207 dst := v.Args[0] 9208 v_1 := v.Args[1] 9209 if v_1.Op != OpLoad { 9210 break 9211 } 9212 t := v_1.Type 9213 src := v_1.Args[0] 9214 mem := v_1.Args[1] 9215 if mem != v.Args[2] { 9216 break 9217 } 9218 if !(!config.fe.CanSSA(t)) { 9219 break 9220 } 9221 v.reset(OpMove) 9222 v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() 9223 v.AddArg(dst) 9224 v.AddArg(src) 9225 v.AddArg(mem) 9226 return true 9227 } 9228 // match: (Store [size] dst (Load <t> src mem) (VarDef {x} mem)) 9229 // cond: !config.fe.CanSSA(t) 9230 // result: (Move [MakeSizeAndAlign(size, t.Alignment()).Int64()] dst src (VarDef {x} mem)) 9231 for { 9232 size := v.AuxInt 9233 dst := v.Args[0] 9234 v_1 := v.Args[1] 9235 if v_1.Op != OpLoad { 9236 break 9237 } 9238 t := v_1.Type 9239 src := v_1.Args[0] 9240 mem := v_1.Args[1] 9241 v_2 := v.Args[2] 9242 if v_2.Op != OpVarDef { 9243 break 9244 } 9245 x := v_2.Aux 9246 if mem != v_2.Args[0] { 9247 break 9248 } 9249 if !(!config.fe.CanSSA(t)) { 9250 break 9251 } 9252 v.reset(OpMove) 9253 v.AuxInt = MakeSizeAndAlign(size, t.Alignment()).Int64() 9254 v.AddArg(dst) 9255 v.AddArg(src) 9256 v0 := b.NewValue0(v.Line, OpVarDef, TypeMem) 9257 v0.Aux = x 9258 v0.AddArg(mem) 9259 v.AddArg(v0) 9260 return true 9261 } 9262 return false 9263 } 9264 func rewriteValuegeneric_OpStringLen(v *Value, config *Config) bool { 9265 b := v.Block 9266 _ = b 9267 // match: (StringLen (StringMake _ (Const64 <t> [c]))) 9268 // cond: 9269 // result: (Const64 <t> [c]) 9270 for { 9271 v_0 := v.Args[0] 9272 if v_0.Op != OpStringMake { 9273 break 9274 } 9275 v_0_1 := v_0.Args[1] 9276 if v_0_1.Op != OpConst64 { 9277 break 9278 } 9279 t := v_0_1.Type 9280 c := v_0_1.AuxInt 9281 v.reset(OpConst64) 9282 v.Type = t 9283 v.AuxInt = c 9284 return true 9285 } 9286 return false 9287 } 9288 func rewriteValuegeneric_OpStringPtr(v *Value, config *Config) bool { 9289 b := v.Block 9290 _ = b 9291 // match: (StringPtr (StringMake (Const64 <t> [c]) _)) 9292 // cond: 9293 // result: (Const64 <t> [c]) 9294 for { 9295 v_0 := v.Args[0] 9296 if v_0.Op != OpStringMake { 9297 break 9298 } 9299 v_0_0 := v_0.Args[0] 9300 if v_0_0.Op != OpConst64 { 9301 break 9302 } 9303 t := v_0_0.Type 9304 c := v_0_0.AuxInt 9305 v.reset(OpConst64) 9306 v.Type = t 9307 v.AuxInt = c 9308 return true 9309 } 9310 return false 9311 } 9312 func rewriteValuegeneric_OpStructSelect(v *Value, config *Config) bool { 9313 b := v.Block 9314 _ = b 9315 // match: (StructSelect (StructMake1 x)) 9316 // cond: 9317 // result: x 9318 for { 9319 v_0 := v.Args[0] 9320 if v_0.Op != OpStructMake1 { 9321 break 9322 } 9323 x := v_0.Args[0] 9324 v.reset(OpCopy) 9325 v.Type = x.Type 9326 v.AddArg(x) 9327 return true 9328 } 9329 // match: (StructSelect [0] (StructMake2 x _)) 9330 // cond: 9331 // result: x 9332 for { 9333 if v.AuxInt != 0 { 9334 break 9335 } 9336 v_0 := v.Args[0] 9337 if v_0.Op != OpStructMake2 { 9338 break 9339 } 9340 x := v_0.Args[0] 9341 v.reset(OpCopy) 9342 v.Type = x.Type 9343 v.AddArg(x) 9344 return true 9345 } 9346 // match: (StructSelect [1] (StructMake2 _ x)) 9347 // cond: 9348 // result: x 9349 for { 9350 if v.AuxInt != 1 { 9351 break 9352 } 9353 v_0 := v.Args[0] 9354 if v_0.Op != OpStructMake2 { 9355 break 9356 } 9357 x := v_0.Args[1] 9358 v.reset(OpCopy) 9359 v.Type = x.Type 9360 v.AddArg(x) 9361 return true 9362 } 9363 // match: (StructSelect [0] (StructMake3 x _ _)) 9364 // cond: 9365 // result: x 9366 for { 9367 if v.AuxInt != 0 { 9368 break 9369 } 9370 v_0 := v.Args[0] 9371 if v_0.Op != OpStructMake3 { 9372 break 9373 } 9374 x := v_0.Args[0] 9375 v.reset(OpCopy) 9376 v.Type = x.Type 9377 v.AddArg(x) 9378 return true 9379 } 9380 // match: (StructSelect [1] (StructMake3 _ x _)) 9381 // cond: 9382 // result: x 9383 for { 9384 if v.AuxInt != 1 { 9385 break 9386 } 9387 v_0 := v.Args[0] 9388 if v_0.Op != OpStructMake3 { 9389 break 9390 } 9391 x := v_0.Args[1] 9392 v.reset(OpCopy) 9393 v.Type = x.Type 9394 v.AddArg(x) 9395 return true 9396 } 9397 // match: (StructSelect [2] (StructMake3 _ _ x)) 9398 // cond: 9399 // result: x 9400 for { 9401 if v.AuxInt != 2 { 9402 break 9403 } 9404 v_0 := v.Args[0] 9405 if v_0.Op != OpStructMake3 { 9406 break 9407 } 9408 x := v_0.Args[2] 9409 v.reset(OpCopy) 9410 v.Type = x.Type 9411 v.AddArg(x) 9412 return true 9413 } 9414 // match: (StructSelect [0] (StructMake4 x _ _ _)) 9415 // cond: 9416 // result: x 9417 for { 9418 if v.AuxInt != 0 { 9419 break 9420 } 9421 v_0 := v.Args[0] 9422 if v_0.Op != OpStructMake4 { 9423 break 9424 } 9425 x := v_0.Args[0] 9426 v.reset(OpCopy) 9427 v.Type = x.Type 9428 v.AddArg(x) 9429 return true 9430 } 9431 // match: (StructSelect [1] (StructMake4 _ x _ _)) 9432 // cond: 9433 // result: x 9434 for { 9435 if v.AuxInt != 1 { 9436 break 9437 } 9438 v_0 := v.Args[0] 9439 if v_0.Op != OpStructMake4 { 9440 break 9441 } 9442 x := v_0.Args[1] 9443 v.reset(OpCopy) 9444 v.Type = x.Type 9445 v.AddArg(x) 9446 return true 9447 } 9448 // match: (StructSelect [2] (StructMake4 _ _ x _)) 9449 // cond: 9450 // result: x 9451 for { 9452 if v.AuxInt != 2 { 9453 break 9454 } 9455 v_0 := v.Args[0] 9456 if v_0.Op != OpStructMake4 { 9457 break 9458 } 9459 x := v_0.Args[2] 9460 v.reset(OpCopy) 9461 v.Type = x.Type 9462 v.AddArg(x) 9463 return true 9464 } 9465 // match: (StructSelect [3] (StructMake4 _ _ _ x)) 9466 // cond: 9467 // result: x 9468 for { 9469 if v.AuxInt != 3 { 9470 break 9471 } 9472 v_0 := v.Args[0] 9473 if v_0.Op != OpStructMake4 { 9474 break 9475 } 9476 x := v_0.Args[3] 9477 v.reset(OpCopy) 9478 v.Type = x.Type 9479 v.AddArg(x) 9480 return true 9481 } 9482 // match: (StructSelect [i] x:(Load <t> ptr mem)) 9483 // cond: !config.fe.CanSSA(t) 9484 // result: @x.Block (Load <v.Type> (OffPtr <v.Type.PtrTo()> [t.FieldOff(int(i))] ptr) mem) 9485 for { 9486 i := v.AuxInt 9487 x := v.Args[0] 9488 if x.Op != OpLoad { 9489 break 9490 } 9491 t := x.Type 9492 ptr := x.Args[0] 9493 mem := x.Args[1] 9494 if !(!config.fe.CanSSA(t)) { 9495 break 9496 } 9497 b = x.Block 9498 v0 := b.NewValue0(v.Line, OpLoad, v.Type) 9499 v.reset(OpCopy) 9500 v.AddArg(v0) 9501 v1 := b.NewValue0(v.Line, OpOffPtr, v.Type.PtrTo()) 9502 v1.AuxInt = t.FieldOff(int(i)) 9503 v1.AddArg(ptr) 9504 v0.AddArg(v1) 9505 v0.AddArg(mem) 9506 return true 9507 } 9508 return false 9509 } 9510 func rewriteValuegeneric_OpSub16(v *Value, config *Config) bool { 9511 b := v.Block 9512 _ = b 9513 // match: (Sub16 (Const16 [c]) (Const16 [d])) 9514 // cond: 9515 // result: (Const16 [int64(int16(c-d))]) 9516 for { 9517 v_0 := v.Args[0] 9518 if v_0.Op != OpConst16 { 9519 break 9520 } 9521 c := v_0.AuxInt 9522 v_1 := v.Args[1] 9523 if v_1.Op != OpConst16 { 9524 break 9525 } 9526 d := v_1.AuxInt 9527 v.reset(OpConst16) 9528 v.AuxInt = int64(int16(c - d)) 9529 return true 9530 } 9531 // match: (Sub16 x (Const16 <t> [c])) 9532 // cond: x.Op != OpConst16 9533 // result: (Add16 (Const16 <t> [int64(int16(-c))]) x) 9534 for { 9535 x := v.Args[0] 9536 v_1 := v.Args[1] 9537 if v_1.Op != OpConst16 { 9538 break 9539 } 9540 t := v_1.Type 9541 c := v_1.AuxInt 9542 if !(x.Op != OpConst16) { 9543 break 9544 } 9545 v.reset(OpAdd16) 9546 v0 := b.NewValue0(v.Line, OpConst16, t) 9547 v0.AuxInt = int64(int16(-c)) 9548 v.AddArg(v0) 9549 v.AddArg(x) 9550 return true 9551 } 9552 // match: (Sub16 x x) 9553 // cond: 9554 // result: (Const16 [0]) 9555 for { 9556 x := v.Args[0] 9557 if x != v.Args[1] { 9558 break 9559 } 9560 v.reset(OpConst16) 9561 v.AuxInt = 0 9562 return true 9563 } 9564 // match: (Sub16 (Add16 x y) x) 9565 // cond: 9566 // result: y 9567 for { 9568 v_0 := v.Args[0] 9569 if v_0.Op != OpAdd16 { 9570 break 9571 } 9572 x := v_0.Args[0] 9573 y := v_0.Args[1] 9574 if x != v.Args[1] { 9575 break 9576 } 9577 v.reset(OpCopy) 9578 v.Type = y.Type 9579 v.AddArg(y) 9580 return true 9581 } 9582 // match: (Sub16 (Add16 x y) y) 9583 // cond: 9584 // result: x 9585 for { 9586 v_0 := v.Args[0] 9587 if v_0.Op != OpAdd16 { 9588 break 9589 } 9590 x := v_0.Args[0] 9591 y := v_0.Args[1] 9592 if y != v.Args[1] { 9593 break 9594 } 9595 v.reset(OpCopy) 9596 v.Type = x.Type 9597 v.AddArg(x) 9598 return true 9599 } 9600 return false 9601 } 9602 func rewriteValuegeneric_OpSub32(v *Value, config *Config) bool { 9603 b := v.Block 9604 _ = b 9605 // match: (Sub32 (Const32 [c]) (Const32 [d])) 9606 // cond: 9607 // result: (Const32 [int64(int32(c-d))]) 9608 for { 9609 v_0 := v.Args[0] 9610 if v_0.Op != OpConst32 { 9611 break 9612 } 9613 c := v_0.AuxInt 9614 v_1 := v.Args[1] 9615 if v_1.Op != OpConst32 { 9616 break 9617 } 9618 d := v_1.AuxInt 9619 v.reset(OpConst32) 9620 v.AuxInt = int64(int32(c - d)) 9621 return true 9622 } 9623 // match: (Sub32 x (Const32 <t> [c])) 9624 // cond: x.Op != OpConst32 9625 // result: (Add32 (Const32 <t> [int64(int32(-c))]) x) 9626 for { 9627 x := v.Args[0] 9628 v_1 := v.Args[1] 9629 if v_1.Op != OpConst32 { 9630 break 9631 } 9632 t := v_1.Type 9633 c := v_1.AuxInt 9634 if !(x.Op != OpConst32) { 9635 break 9636 } 9637 v.reset(OpAdd32) 9638 v0 := b.NewValue0(v.Line, OpConst32, t) 9639 v0.AuxInt = int64(int32(-c)) 9640 v.AddArg(v0) 9641 v.AddArg(x) 9642 return true 9643 } 9644 // match: (Sub32 x x) 9645 // cond: 9646 // result: (Const32 [0]) 9647 for { 9648 x := v.Args[0] 9649 if x != v.Args[1] { 9650 break 9651 } 9652 v.reset(OpConst32) 9653 v.AuxInt = 0 9654 return true 9655 } 9656 // match: (Sub32 (Add32 x y) x) 9657 // cond: 9658 // result: y 9659 for { 9660 v_0 := v.Args[0] 9661 if v_0.Op != OpAdd32 { 9662 break 9663 } 9664 x := v_0.Args[0] 9665 y := v_0.Args[1] 9666 if x != v.Args[1] { 9667 break 9668 } 9669 v.reset(OpCopy) 9670 v.Type = y.Type 9671 v.AddArg(y) 9672 return true 9673 } 9674 // match: (Sub32 (Add32 x y) y) 9675 // cond: 9676 // result: x 9677 for { 9678 v_0 := v.Args[0] 9679 if v_0.Op != OpAdd32 { 9680 break 9681 } 9682 x := v_0.Args[0] 9683 y := v_0.Args[1] 9684 if y != v.Args[1] { 9685 break 9686 } 9687 v.reset(OpCopy) 9688 v.Type = x.Type 9689 v.AddArg(x) 9690 return true 9691 } 9692 return false 9693 } 9694 func rewriteValuegeneric_OpSub32F(v *Value, config *Config) bool { 9695 b := v.Block 9696 _ = b 9697 // match: (Sub32F (Const32F [c]) (Const32F [d])) 9698 // cond: 9699 // result: (Const32F [f2i(float64(i2f32(c) - i2f32(d)))]) 9700 for { 9701 v_0 := v.Args[0] 9702 if v_0.Op != OpConst32F { 9703 break 9704 } 9705 c := v_0.AuxInt 9706 v_1 := v.Args[1] 9707 if v_1.Op != OpConst32F { 9708 break 9709 } 9710 d := v_1.AuxInt 9711 v.reset(OpConst32F) 9712 v.AuxInt = f2i(float64(i2f32(c) - i2f32(d))) 9713 return true 9714 } 9715 // match: (Sub32F x (Const32F [0])) 9716 // cond: 9717 // result: x 9718 for { 9719 x := v.Args[0] 9720 v_1 := v.Args[1] 9721 if v_1.Op != OpConst32F { 9722 break 9723 } 9724 if v_1.AuxInt != 0 { 9725 break 9726 } 9727 v.reset(OpCopy) 9728 v.Type = x.Type 9729 v.AddArg(x) 9730 return true 9731 } 9732 return false 9733 } 9734 func rewriteValuegeneric_OpSub64(v *Value, config *Config) bool { 9735 b := v.Block 9736 _ = b 9737 // match: (Sub64 (Const64 [c]) (Const64 [d])) 9738 // cond: 9739 // result: (Const64 [c-d]) 9740 for { 9741 v_0 := v.Args[0] 9742 if v_0.Op != OpConst64 { 9743 break 9744 } 9745 c := v_0.AuxInt 9746 v_1 := v.Args[1] 9747 if v_1.Op != OpConst64 { 9748 break 9749 } 9750 d := v_1.AuxInt 9751 v.reset(OpConst64) 9752 v.AuxInt = c - d 9753 return true 9754 } 9755 // match: (Sub64 x (Const64 <t> [c])) 9756 // cond: x.Op != OpConst64 9757 // result: (Add64 (Const64 <t> [-c]) x) 9758 for { 9759 x := v.Args[0] 9760 v_1 := v.Args[1] 9761 if v_1.Op != OpConst64 { 9762 break 9763 } 9764 t := v_1.Type 9765 c := v_1.AuxInt 9766 if !(x.Op != OpConst64) { 9767 break 9768 } 9769 v.reset(OpAdd64) 9770 v0 := b.NewValue0(v.Line, OpConst64, t) 9771 v0.AuxInt = -c 9772 v.AddArg(v0) 9773 v.AddArg(x) 9774 return true 9775 } 9776 // match: (Sub64 x x) 9777 // cond: 9778 // result: (Const64 [0]) 9779 for { 9780 x := v.Args[0] 9781 if x != v.Args[1] { 9782 break 9783 } 9784 v.reset(OpConst64) 9785 v.AuxInt = 0 9786 return true 9787 } 9788 // match: (Sub64 (Add64 x y) x) 9789 // cond: 9790 // result: y 9791 for { 9792 v_0 := v.Args[0] 9793 if v_0.Op != OpAdd64 { 9794 break 9795 } 9796 x := v_0.Args[0] 9797 y := v_0.Args[1] 9798 if x != v.Args[1] { 9799 break 9800 } 9801 v.reset(OpCopy) 9802 v.Type = y.Type 9803 v.AddArg(y) 9804 return true 9805 } 9806 // match: (Sub64 (Add64 x y) y) 9807 // cond: 9808 // result: x 9809 for { 9810 v_0 := v.Args[0] 9811 if v_0.Op != OpAdd64 { 9812 break 9813 } 9814 x := v_0.Args[0] 9815 y := v_0.Args[1] 9816 if y != v.Args[1] { 9817 break 9818 } 9819 v.reset(OpCopy) 9820 v.Type = x.Type 9821 v.AddArg(x) 9822 return true 9823 } 9824 return false 9825 } 9826 func rewriteValuegeneric_OpSub64F(v *Value, config *Config) bool { 9827 b := v.Block 9828 _ = b 9829 // match: (Sub64F (Const64F [c]) (Const64F [d])) 9830 // cond: 9831 // result: (Const64F [f2i(i2f(c) - i2f(d))]) 9832 for { 9833 v_0 := v.Args[0] 9834 if v_0.Op != OpConst64F { 9835 break 9836 } 9837 c := v_0.AuxInt 9838 v_1 := v.Args[1] 9839 if v_1.Op != OpConst64F { 9840 break 9841 } 9842 d := v_1.AuxInt 9843 v.reset(OpConst64F) 9844 v.AuxInt = f2i(i2f(c) - i2f(d)) 9845 return true 9846 } 9847 // match: (Sub64F x (Const64F [0])) 9848 // cond: 9849 // result: x 9850 for { 9851 x := v.Args[0] 9852 v_1 := v.Args[1] 9853 if v_1.Op != OpConst64F { 9854 break 9855 } 9856 if v_1.AuxInt != 0 { 9857 break 9858 } 9859 v.reset(OpCopy) 9860 v.Type = x.Type 9861 v.AddArg(x) 9862 return true 9863 } 9864 return false 9865 } 9866 func rewriteValuegeneric_OpSub8(v *Value, config *Config) bool { 9867 b := v.Block 9868 _ = b 9869 // match: (Sub8 (Const8 [c]) (Const8 [d])) 9870 // cond: 9871 // result: (Const8 [int64(int8(c-d))]) 9872 for { 9873 v_0 := v.Args[0] 9874 if v_0.Op != OpConst8 { 9875 break 9876 } 9877 c := v_0.AuxInt 9878 v_1 := v.Args[1] 9879 if v_1.Op != OpConst8 { 9880 break 9881 } 9882 d := v_1.AuxInt 9883 v.reset(OpConst8) 9884 v.AuxInt = int64(int8(c - d)) 9885 return true 9886 } 9887 // match: (Sub8 x (Const8 <t> [c])) 9888 // cond: x.Op != OpConst8 9889 // result: (Add8 (Const8 <t> [int64(int8(-c))]) x) 9890 for { 9891 x := v.Args[0] 9892 v_1 := v.Args[1] 9893 if v_1.Op != OpConst8 { 9894 break 9895 } 9896 t := v_1.Type 9897 c := v_1.AuxInt 9898 if !(x.Op != OpConst8) { 9899 break 9900 } 9901 v.reset(OpAdd8) 9902 v0 := b.NewValue0(v.Line, OpConst8, t) 9903 v0.AuxInt = int64(int8(-c)) 9904 v.AddArg(v0) 9905 v.AddArg(x) 9906 return true 9907 } 9908 // match: (Sub8 x x) 9909 // cond: 9910 // result: (Const8 [0]) 9911 for { 9912 x := v.Args[0] 9913 if x != v.Args[1] { 9914 break 9915 } 9916 v.reset(OpConst8) 9917 v.AuxInt = 0 9918 return true 9919 } 9920 // match: (Sub8 (Add8 x y) x) 9921 // cond: 9922 // result: y 9923 for { 9924 v_0 := v.Args[0] 9925 if v_0.Op != OpAdd8 { 9926 break 9927 } 9928 x := v_0.Args[0] 9929 y := v_0.Args[1] 9930 if x != v.Args[1] { 9931 break 9932 } 9933 v.reset(OpCopy) 9934 v.Type = y.Type 9935 v.AddArg(y) 9936 return true 9937 } 9938 // match: (Sub8 (Add8 x y) y) 9939 // cond: 9940 // result: x 9941 for { 9942 v_0 := v.Args[0] 9943 if v_0.Op != OpAdd8 { 9944 break 9945 } 9946 x := v_0.Args[0] 9947 y := v_0.Args[1] 9948 if y != v.Args[1] { 9949 break 9950 } 9951 v.reset(OpCopy) 9952 v.Type = x.Type 9953 v.AddArg(x) 9954 return true 9955 } 9956 return false 9957 } 9958 func rewriteValuegeneric_OpTrunc16to8(v *Value, config *Config) bool { 9959 b := v.Block 9960 _ = b 9961 // match: (Trunc16to8 (Const16 [c])) 9962 // cond: 9963 // result: (Const8 [int64(int8(c))]) 9964 for { 9965 v_0 := v.Args[0] 9966 if v_0.Op != OpConst16 { 9967 break 9968 } 9969 c := v_0.AuxInt 9970 v.reset(OpConst8) 9971 v.AuxInt = int64(int8(c)) 9972 return true 9973 } 9974 // match: (Trunc16to8 (And16 (Const16 [y]) x)) 9975 // cond: y&0xFF == 0xFF 9976 // result: (Trunc16to8 x) 9977 for { 9978 v_0 := v.Args[0] 9979 if v_0.Op != OpAnd16 { 9980 break 9981 } 9982 v_0_0 := v_0.Args[0] 9983 if v_0_0.Op != OpConst16 { 9984 break 9985 } 9986 y := v_0_0.AuxInt 9987 x := v_0.Args[1] 9988 if !(y&0xFF == 0xFF) { 9989 break 9990 } 9991 v.reset(OpTrunc16to8) 9992 v.AddArg(x) 9993 return true 9994 } 9995 return false 9996 } 9997 func rewriteValuegeneric_OpTrunc32to16(v *Value, config *Config) bool { 9998 b := v.Block 9999 _ = b 10000 // match: (Trunc32to16 (Const32 [c])) 10001 // cond: 10002 // result: (Const16 [int64(int16(c))]) 10003 for { 10004 v_0 := v.Args[0] 10005 if v_0.Op != OpConst32 { 10006 break 10007 } 10008 c := v_0.AuxInt 10009 v.reset(OpConst16) 10010 v.AuxInt = int64(int16(c)) 10011 return true 10012 } 10013 // match: (Trunc32to16 (And32 (Const32 [y]) x)) 10014 // cond: y&0xFFFF == 0xFFFF 10015 // result: (Trunc32to16 x) 10016 for { 10017 v_0 := v.Args[0] 10018 if v_0.Op != OpAnd32 { 10019 break 10020 } 10021 v_0_0 := v_0.Args[0] 10022 if v_0_0.Op != OpConst32 { 10023 break 10024 } 10025 y := v_0_0.AuxInt 10026 x := v_0.Args[1] 10027 if !(y&0xFFFF == 0xFFFF) { 10028 break 10029 } 10030 v.reset(OpTrunc32to16) 10031 v.AddArg(x) 10032 return true 10033 } 10034 return false 10035 } 10036 func rewriteValuegeneric_OpTrunc32to8(v *Value, config *Config) bool { 10037 b := v.Block 10038 _ = b 10039 // match: (Trunc32to8 (Const32 [c])) 10040 // cond: 10041 // result: (Const8 [int64(int8(c))]) 10042 for { 10043 v_0 := v.Args[0] 10044 if v_0.Op != OpConst32 { 10045 break 10046 } 10047 c := v_0.AuxInt 10048 v.reset(OpConst8) 10049 v.AuxInt = int64(int8(c)) 10050 return true 10051 } 10052 // match: (Trunc32to8 (And32 (Const32 [y]) x)) 10053 // cond: y&0xFF == 0xFF 10054 // result: (Trunc32to8 x) 10055 for { 10056 v_0 := v.Args[0] 10057 if v_0.Op != OpAnd32 { 10058 break 10059 } 10060 v_0_0 := v_0.Args[0] 10061 if v_0_0.Op != OpConst32 { 10062 break 10063 } 10064 y := v_0_0.AuxInt 10065 x := v_0.Args[1] 10066 if !(y&0xFF == 0xFF) { 10067 break 10068 } 10069 v.reset(OpTrunc32to8) 10070 v.AddArg(x) 10071 return true 10072 } 10073 return false 10074 } 10075 func rewriteValuegeneric_OpTrunc64to16(v *Value, config *Config) bool { 10076 b := v.Block 10077 _ = b 10078 // match: (Trunc64to16 (Const64 [c])) 10079 // cond: 10080 // result: (Const16 [int64(int16(c))]) 10081 for { 10082 v_0 := v.Args[0] 10083 if v_0.Op != OpConst64 { 10084 break 10085 } 10086 c := v_0.AuxInt 10087 v.reset(OpConst16) 10088 v.AuxInt = int64(int16(c)) 10089 return true 10090 } 10091 // match: (Trunc64to16 (And64 (Const64 [y]) x)) 10092 // cond: y&0xFFFF == 0xFFFF 10093 // result: (Trunc64to16 x) 10094 for { 10095 v_0 := v.Args[0] 10096 if v_0.Op != OpAnd64 { 10097 break 10098 } 10099 v_0_0 := v_0.Args[0] 10100 if v_0_0.Op != OpConst64 { 10101 break 10102 } 10103 y := v_0_0.AuxInt 10104 x := v_0.Args[1] 10105 if !(y&0xFFFF == 0xFFFF) { 10106 break 10107 } 10108 v.reset(OpTrunc64to16) 10109 v.AddArg(x) 10110 return true 10111 } 10112 return false 10113 } 10114 func rewriteValuegeneric_OpTrunc64to32(v *Value, config *Config) bool { 10115 b := v.Block 10116 _ = b 10117 // match: (Trunc64to32 (Const64 [c])) 10118 // cond: 10119 // result: (Const32 [int64(int32(c))]) 10120 for { 10121 v_0 := v.Args[0] 10122 if v_0.Op != OpConst64 { 10123 break 10124 } 10125 c := v_0.AuxInt 10126 v.reset(OpConst32) 10127 v.AuxInt = int64(int32(c)) 10128 return true 10129 } 10130 // match: (Trunc64to32 (And64 (Const64 [y]) x)) 10131 // cond: y&0xFFFFFFFF == 0xFFFFFFFF 10132 // result: (Trunc64to32 x) 10133 for { 10134 v_0 := v.Args[0] 10135 if v_0.Op != OpAnd64 { 10136 break 10137 } 10138 v_0_0 := v_0.Args[0] 10139 if v_0_0.Op != OpConst64 { 10140 break 10141 } 10142 y := v_0_0.AuxInt 10143 x := v_0.Args[1] 10144 if !(y&0xFFFFFFFF == 0xFFFFFFFF) { 10145 break 10146 } 10147 v.reset(OpTrunc64to32) 10148 v.AddArg(x) 10149 return true 10150 } 10151 return false 10152 } 10153 func rewriteValuegeneric_OpTrunc64to8(v *Value, config *Config) bool { 10154 b := v.Block 10155 _ = b 10156 // match: (Trunc64to8 (Const64 [c])) 10157 // cond: 10158 // result: (Const8 [int64(int8(c))]) 10159 for { 10160 v_0 := v.Args[0] 10161 if v_0.Op != OpConst64 { 10162 break 10163 } 10164 c := v_0.AuxInt 10165 v.reset(OpConst8) 10166 v.AuxInt = int64(int8(c)) 10167 return true 10168 } 10169 // match: (Trunc64to8 (And64 (Const64 [y]) x)) 10170 // cond: y&0xFF == 0xFF 10171 // result: (Trunc64to8 x) 10172 for { 10173 v_0 := v.Args[0] 10174 if v_0.Op != OpAnd64 { 10175 break 10176 } 10177 v_0_0 := v_0.Args[0] 10178 if v_0_0.Op != OpConst64 { 10179 break 10180 } 10181 y := v_0_0.AuxInt 10182 x := v_0.Args[1] 10183 if !(y&0xFF == 0xFF) { 10184 break 10185 } 10186 v.reset(OpTrunc64to8) 10187 v.AddArg(x) 10188 return true 10189 } 10190 return false 10191 } 10192 func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { 10193 b := v.Block 10194 _ = b 10195 // match: (Xor16 x (Const16 <t> [c])) 10196 // cond: x.Op != OpConst16 10197 // result: (Xor16 (Const16 <t> [c]) x) 10198 for { 10199 x := v.Args[0] 10200 v_1 := v.Args[1] 10201 if v_1.Op != OpConst16 { 10202 break 10203 } 10204 t := v_1.Type 10205 c := v_1.AuxInt 10206 if !(x.Op != OpConst16) { 10207 break 10208 } 10209 v.reset(OpXor16) 10210 v0 := b.NewValue0(v.Line, OpConst16, t) 10211 v0.AuxInt = c 10212 v.AddArg(v0) 10213 v.AddArg(x) 10214 return true 10215 } 10216 // match: (Xor16 x x) 10217 // cond: 10218 // result: (Const16 [0]) 10219 for { 10220 x := v.Args[0] 10221 if x != v.Args[1] { 10222 break 10223 } 10224 v.reset(OpConst16) 10225 v.AuxInt = 0 10226 return true 10227 } 10228 // match: (Xor16 (Const16 [0]) x) 10229 // cond: 10230 // result: x 10231 for { 10232 v_0 := v.Args[0] 10233 if v_0.Op != OpConst16 { 10234 break 10235 } 10236 if v_0.AuxInt != 0 { 10237 break 10238 } 10239 x := v.Args[1] 10240 v.reset(OpCopy) 10241 v.Type = x.Type 10242 v.AddArg(x) 10243 return true 10244 } 10245 // match: (Xor16 x (Xor16 x y)) 10246 // cond: 10247 // result: y 10248 for { 10249 x := v.Args[0] 10250 v_1 := v.Args[1] 10251 if v_1.Op != OpXor16 { 10252 break 10253 } 10254 if x != v_1.Args[0] { 10255 break 10256 } 10257 y := v_1.Args[1] 10258 v.reset(OpCopy) 10259 v.Type = y.Type 10260 v.AddArg(y) 10261 return true 10262 } 10263 // match: (Xor16 x (Xor16 y x)) 10264 // cond: 10265 // result: y 10266 for { 10267 x := v.Args[0] 10268 v_1 := v.Args[1] 10269 if v_1.Op != OpXor16 { 10270 break 10271 } 10272 y := v_1.Args[0] 10273 if x != v_1.Args[1] { 10274 break 10275 } 10276 v.reset(OpCopy) 10277 v.Type = y.Type 10278 v.AddArg(y) 10279 return true 10280 } 10281 // match: (Xor16 (Xor16 x y) x) 10282 // cond: 10283 // result: y 10284 for { 10285 v_0 := v.Args[0] 10286 if v_0.Op != OpXor16 { 10287 break 10288 } 10289 x := v_0.Args[0] 10290 y := v_0.Args[1] 10291 if x != v.Args[1] { 10292 break 10293 } 10294 v.reset(OpCopy) 10295 v.Type = y.Type 10296 v.AddArg(y) 10297 return true 10298 } 10299 // match: (Xor16 (Xor16 x y) y) 10300 // cond: 10301 // result: x 10302 for { 10303 v_0 := v.Args[0] 10304 if v_0.Op != OpXor16 { 10305 break 10306 } 10307 x := v_0.Args[0] 10308 y := v_0.Args[1] 10309 if y != v.Args[1] { 10310 break 10311 } 10312 v.reset(OpCopy) 10313 v.Type = x.Type 10314 v.AddArg(x) 10315 return true 10316 } 10317 return false 10318 } 10319 func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { 10320 b := v.Block 10321 _ = b 10322 // match: (Xor32 x (Const32 <t> [c])) 10323 // cond: x.Op != OpConst32 10324 // result: (Xor32 (Const32 <t> [c]) x) 10325 for { 10326 x := v.Args[0] 10327 v_1 := v.Args[1] 10328 if v_1.Op != OpConst32 { 10329 break 10330 } 10331 t := v_1.Type 10332 c := v_1.AuxInt 10333 if !(x.Op != OpConst32) { 10334 break 10335 } 10336 v.reset(OpXor32) 10337 v0 := b.NewValue0(v.Line, OpConst32, t) 10338 v0.AuxInt = c 10339 v.AddArg(v0) 10340 v.AddArg(x) 10341 return true 10342 } 10343 // match: (Xor32 x x) 10344 // cond: 10345 // result: (Const32 [0]) 10346 for { 10347 x := v.Args[0] 10348 if x != v.Args[1] { 10349 break 10350 } 10351 v.reset(OpConst32) 10352 v.AuxInt = 0 10353 return true 10354 } 10355 // match: (Xor32 (Const32 [0]) x) 10356 // cond: 10357 // result: x 10358 for { 10359 v_0 := v.Args[0] 10360 if v_0.Op != OpConst32 { 10361 break 10362 } 10363 if v_0.AuxInt != 0 { 10364 break 10365 } 10366 x := v.Args[1] 10367 v.reset(OpCopy) 10368 v.Type = x.Type 10369 v.AddArg(x) 10370 return true 10371 } 10372 // match: (Xor32 x (Xor32 x y)) 10373 // cond: 10374 // result: y 10375 for { 10376 x := v.Args[0] 10377 v_1 := v.Args[1] 10378 if v_1.Op != OpXor32 { 10379 break 10380 } 10381 if x != v_1.Args[0] { 10382 break 10383 } 10384 y := v_1.Args[1] 10385 v.reset(OpCopy) 10386 v.Type = y.Type 10387 v.AddArg(y) 10388 return true 10389 } 10390 // match: (Xor32 x (Xor32 y x)) 10391 // cond: 10392 // result: y 10393 for { 10394 x := v.Args[0] 10395 v_1 := v.Args[1] 10396 if v_1.Op != OpXor32 { 10397 break 10398 } 10399 y := v_1.Args[0] 10400 if x != v_1.Args[1] { 10401 break 10402 } 10403 v.reset(OpCopy) 10404 v.Type = y.Type 10405 v.AddArg(y) 10406 return true 10407 } 10408 // match: (Xor32 (Xor32 x y) x) 10409 // cond: 10410 // result: y 10411 for { 10412 v_0 := v.Args[0] 10413 if v_0.Op != OpXor32 { 10414 break 10415 } 10416 x := v_0.Args[0] 10417 y := v_0.Args[1] 10418 if x != v.Args[1] { 10419 break 10420 } 10421 v.reset(OpCopy) 10422 v.Type = y.Type 10423 v.AddArg(y) 10424 return true 10425 } 10426 // match: (Xor32 (Xor32 x y) y) 10427 // cond: 10428 // result: x 10429 for { 10430 v_0 := v.Args[0] 10431 if v_0.Op != OpXor32 { 10432 break 10433 } 10434 x := v_0.Args[0] 10435 y := v_0.Args[1] 10436 if y != v.Args[1] { 10437 break 10438 } 10439 v.reset(OpCopy) 10440 v.Type = x.Type 10441 v.AddArg(x) 10442 return true 10443 } 10444 return false 10445 } 10446 func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { 10447 b := v.Block 10448 _ = b 10449 // match: (Xor64 x (Const64 <t> [c])) 10450 // cond: x.Op != OpConst64 10451 // result: (Xor64 (Const64 <t> [c]) x) 10452 for { 10453 x := v.Args[0] 10454 v_1 := v.Args[1] 10455 if v_1.Op != OpConst64 { 10456 break 10457 } 10458 t := v_1.Type 10459 c := v_1.AuxInt 10460 if !(x.Op != OpConst64) { 10461 break 10462 } 10463 v.reset(OpXor64) 10464 v0 := b.NewValue0(v.Line, OpConst64, t) 10465 v0.AuxInt = c 10466 v.AddArg(v0) 10467 v.AddArg(x) 10468 return true 10469 } 10470 // match: (Xor64 x x) 10471 // cond: 10472 // result: (Const64 [0]) 10473 for { 10474 x := v.Args[0] 10475 if x != v.Args[1] { 10476 break 10477 } 10478 v.reset(OpConst64) 10479 v.AuxInt = 0 10480 return true 10481 } 10482 // match: (Xor64 (Const64 [0]) x) 10483 // cond: 10484 // result: x 10485 for { 10486 v_0 := v.Args[0] 10487 if v_0.Op != OpConst64 { 10488 break 10489 } 10490 if v_0.AuxInt != 0 { 10491 break 10492 } 10493 x := v.Args[1] 10494 v.reset(OpCopy) 10495 v.Type = x.Type 10496 v.AddArg(x) 10497 return true 10498 } 10499 // match: (Xor64 x (Xor64 x y)) 10500 // cond: 10501 // result: y 10502 for { 10503 x := v.Args[0] 10504 v_1 := v.Args[1] 10505 if v_1.Op != OpXor64 { 10506 break 10507 } 10508 if x != v_1.Args[0] { 10509 break 10510 } 10511 y := v_1.Args[1] 10512 v.reset(OpCopy) 10513 v.Type = y.Type 10514 v.AddArg(y) 10515 return true 10516 } 10517 // match: (Xor64 x (Xor64 y x)) 10518 // cond: 10519 // result: y 10520 for { 10521 x := v.Args[0] 10522 v_1 := v.Args[1] 10523 if v_1.Op != OpXor64 { 10524 break 10525 } 10526 y := v_1.Args[0] 10527 if x != v_1.Args[1] { 10528 break 10529 } 10530 v.reset(OpCopy) 10531 v.Type = y.Type 10532 v.AddArg(y) 10533 return true 10534 } 10535 // match: (Xor64 (Xor64 x y) x) 10536 // cond: 10537 // result: y 10538 for { 10539 v_0 := v.Args[0] 10540 if v_0.Op != OpXor64 { 10541 break 10542 } 10543 x := v_0.Args[0] 10544 y := v_0.Args[1] 10545 if x != v.Args[1] { 10546 break 10547 } 10548 v.reset(OpCopy) 10549 v.Type = y.Type 10550 v.AddArg(y) 10551 return true 10552 } 10553 // match: (Xor64 (Xor64 x y) y) 10554 // cond: 10555 // result: x 10556 for { 10557 v_0 := v.Args[0] 10558 if v_0.Op != OpXor64 { 10559 break 10560 } 10561 x := v_0.Args[0] 10562 y := v_0.Args[1] 10563 if y != v.Args[1] { 10564 break 10565 } 10566 v.reset(OpCopy) 10567 v.Type = x.Type 10568 v.AddArg(x) 10569 return true 10570 } 10571 return false 10572 } 10573 func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { 10574 b := v.Block 10575 _ = b 10576 // match: (Xor8 x (Const8 <t> [c])) 10577 // cond: x.Op != OpConst8 10578 // result: (Xor8 (Const8 <t> [c]) x) 10579 for { 10580 x := v.Args[0] 10581 v_1 := v.Args[1] 10582 if v_1.Op != OpConst8 { 10583 break 10584 } 10585 t := v_1.Type 10586 c := v_1.AuxInt 10587 if !(x.Op != OpConst8) { 10588 break 10589 } 10590 v.reset(OpXor8) 10591 v0 := b.NewValue0(v.Line, OpConst8, t) 10592 v0.AuxInt = c 10593 v.AddArg(v0) 10594 v.AddArg(x) 10595 return true 10596 } 10597 // match: (Xor8 x x) 10598 // cond: 10599 // result: (Const8 [0]) 10600 for { 10601 x := v.Args[0] 10602 if x != v.Args[1] { 10603 break 10604 } 10605 v.reset(OpConst8) 10606 v.AuxInt = 0 10607 return true 10608 } 10609 // match: (Xor8 (Const8 [0]) x) 10610 // cond: 10611 // result: x 10612 for { 10613 v_0 := v.Args[0] 10614 if v_0.Op != OpConst8 { 10615 break 10616 } 10617 if v_0.AuxInt != 0 { 10618 break 10619 } 10620 x := v.Args[1] 10621 v.reset(OpCopy) 10622 v.Type = x.Type 10623 v.AddArg(x) 10624 return true 10625 } 10626 // match: (Xor8 x (Xor8 x y)) 10627 // cond: 10628 // result: y 10629 for { 10630 x := v.Args[0] 10631 v_1 := v.Args[1] 10632 if v_1.Op != OpXor8 { 10633 break 10634 } 10635 if x != v_1.Args[0] { 10636 break 10637 } 10638 y := v_1.Args[1] 10639 v.reset(OpCopy) 10640 v.Type = y.Type 10641 v.AddArg(y) 10642 return true 10643 } 10644 // match: (Xor8 x (Xor8 y x)) 10645 // cond: 10646 // result: y 10647 for { 10648 x := v.Args[0] 10649 v_1 := v.Args[1] 10650 if v_1.Op != OpXor8 { 10651 break 10652 } 10653 y := v_1.Args[0] 10654 if x != v_1.Args[1] { 10655 break 10656 } 10657 v.reset(OpCopy) 10658 v.Type = y.Type 10659 v.AddArg(y) 10660 return true 10661 } 10662 // match: (Xor8 (Xor8 x y) x) 10663 // cond: 10664 // result: y 10665 for { 10666 v_0 := v.Args[0] 10667 if v_0.Op != OpXor8 { 10668 break 10669 } 10670 x := v_0.Args[0] 10671 y := v_0.Args[1] 10672 if x != v.Args[1] { 10673 break 10674 } 10675 v.reset(OpCopy) 10676 v.Type = y.Type 10677 v.AddArg(y) 10678 return true 10679 } 10680 // match: (Xor8 (Xor8 x y) y) 10681 // cond: 10682 // result: x 10683 for { 10684 v_0 := v.Args[0] 10685 if v_0.Op != OpXor8 { 10686 break 10687 } 10688 x := v_0.Args[0] 10689 y := v_0.Args[1] 10690 if y != v.Args[1] { 10691 break 10692 } 10693 v.reset(OpCopy) 10694 v.Type = x.Type 10695 v.AddArg(x) 10696 return true 10697 } 10698 return false 10699 } 10700 func rewriteBlockgeneric(b *Block) bool { 10701 switch b.Kind { 10702 case BlockCheck: 10703 // match: (Check (NilCheck (GetG _) _) next) 10704 // cond: 10705 // result: (Plain nil next) 10706 for { 10707 v := b.Control 10708 if v.Op != OpNilCheck { 10709 break 10710 } 10711 v_0 := v.Args[0] 10712 if v_0.Op != OpGetG { 10713 break 10714 } 10715 next := b.Succs[0] 10716 b.Kind = BlockPlain 10717 b.SetControl(nil) 10718 _ = next 10719 return true 10720 } 10721 case BlockIf: 10722 // match: (If (Not cond) yes no) 10723 // cond: 10724 // result: (If cond no yes) 10725 for { 10726 v := b.Control 10727 if v.Op != OpNot { 10728 break 10729 } 10730 cond := v.Args[0] 10731 yes := b.Succs[0] 10732 no := b.Succs[1] 10733 b.Kind = BlockIf 10734 b.SetControl(cond) 10735 b.swapSuccessors() 10736 _ = no 10737 _ = yes 10738 return true 10739 } 10740 // match: (If (ConstBool [c]) yes no) 10741 // cond: c == 1 10742 // result: (First nil yes no) 10743 for { 10744 v := b.Control 10745 if v.Op != OpConstBool { 10746 break 10747 } 10748 c := v.AuxInt 10749 yes := b.Succs[0] 10750 no := b.Succs[1] 10751 if !(c == 1) { 10752 break 10753 } 10754 b.Kind = BlockFirst 10755 b.SetControl(nil) 10756 _ = yes 10757 _ = no 10758 return true 10759 } 10760 // match: (If (ConstBool [c]) yes no) 10761 // cond: c == 0 10762 // result: (First nil no yes) 10763 for { 10764 v := b.Control 10765 if v.Op != OpConstBool { 10766 break 10767 } 10768 c := v.AuxInt 10769 yes := b.Succs[0] 10770 no := b.Succs[1] 10771 if !(c == 0) { 10772 break 10773 } 10774 b.Kind = BlockFirst 10775 b.SetControl(nil) 10776 b.swapSuccessors() 10777 _ = no 10778 _ = yes 10779 return true 10780 } 10781 } 10782 return false 10783 }